1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
52 #include "fold-const.h"
53 #include "stor-layout.h"
55 #include "tree-iterator.h"
57 #include "insn-config.h"
67 #include "diagnostic-core.h"
69 #include "langhooks.h"
71 #include "internal-fn.h"
77 #include "generic-match.h"
80 /* Nonzero if we are folding constants inside an initializer; zero
82 int folding_initializer
= 0;
84 /* The following constants represent a bit based encoding of GCC's
85 comparison operators. This encoding simplifies transformations
86 on relational comparison operators, such as AND and OR. */
87 enum comparison_code
{
106 static bool negate_mathfn_p (enum built_in_function
);
107 static bool negate_expr_p (tree
);
108 static tree
negate_expr (tree
);
109 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
110 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
111 static enum comparison_code
comparison_to_compcode (enum tree_code
);
112 static enum tree_code
compcode_to_comparison (enum comparison_code
);
113 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
114 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
115 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
116 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
117 static tree
make_bit_field_ref (location_t
, tree
, tree
,
118 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
119 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
121 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
123 machine_mode
*, int *, int *,
125 static int simple_operand_p (const_tree
);
126 static bool simple_operand_p_2 (tree
);
127 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
128 static tree
range_predecessor (tree
);
129 static tree
range_successor (tree
);
130 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
131 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
132 static tree
unextend (tree
, int, int, tree
);
133 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
135 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
136 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
137 static tree
fold_binary_op_with_conditional_arg (location_t
,
138 enum tree_code
, tree
,
141 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (const_tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
147 static tree
fold_view_convert_expr (tree
, tree
);
148 static bool vec_cst_ctor_to_array (tree
, tree
*);
151 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
152 Otherwise, return LOC. */
155 expr_location_or (tree t
, location_t loc
)
157 location_t tloc
= EXPR_LOCATION (t
);
158 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
161 /* Similar to protected_set_expr_location, but never modify x in place,
162 if location can and needs to be set, unshare it. */
165 protected_set_expr_location_unshare (tree x
, location_t loc
)
167 if (CAN_HAVE_LOCATION_P (x
)
168 && EXPR_LOCATION (x
) != loc
169 && !(TREE_CODE (x
) == SAVE_EXPR
170 || TREE_CODE (x
) == TARGET_EXPR
171 || TREE_CODE (x
) == BIND_EXPR
))
174 SET_EXPR_LOCATION (x
, loc
);
179 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
180 division and returns the quotient. Otherwise returns
184 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
188 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
190 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
195 /* This is nonzero if we should defer warnings about undefined
196 overflow. This facility exists because these warnings are a
197 special case. The code to estimate loop iterations does not want
198 to issue any warnings, since it works with expressions which do not
199 occur in user code. Various bits of cleanup code call fold(), but
200 only use the result if it has certain characteristics (e.g., is a
201 constant); that code only wants to issue a warning if the result is
204 static int fold_deferring_overflow_warnings
;
206 /* If a warning about undefined overflow is deferred, this is the
207 warning. Note that this may cause us to turn two warnings into
208 one, but that is fine since it is sufficient to only give one
209 warning per expression. */
211 static const char* fold_deferred_overflow_warning
;
213 /* If a warning about undefined overflow is deferred, this is the
214 level at which the warning should be emitted. */
216 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
218 /* Start deferring overflow warnings. We could use a stack here to
219 permit nested calls, but at present it is not necessary. */
222 fold_defer_overflow_warnings (void)
224 ++fold_deferring_overflow_warnings
;
227 /* Stop deferring overflow warnings. If there is a pending warning,
228 and ISSUE is true, then issue the warning if appropriate. STMT is
229 the statement with which the warning should be associated (used for
230 location information); STMT may be NULL. CODE is the level of the
231 warning--a warn_strict_overflow_code value. This function will use
232 the smaller of CODE and the deferred code when deciding whether to
233 issue the warning. CODE may be zero to mean to always use the
237 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
242 gcc_assert (fold_deferring_overflow_warnings
> 0);
243 --fold_deferring_overflow_warnings
;
244 if (fold_deferring_overflow_warnings
> 0)
246 if (fold_deferred_overflow_warning
!= NULL
248 && code
< (int) fold_deferred_overflow_code
)
249 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
253 warnmsg
= fold_deferred_overflow_warning
;
254 fold_deferred_overflow_warning
= NULL
;
256 if (!issue
|| warnmsg
== NULL
)
259 if (gimple_no_warning_p (stmt
))
262 /* Use the smallest code level when deciding to issue the
264 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
265 code
= fold_deferred_overflow_code
;
267 if (!issue_strict_overflow_warning (code
))
271 locus
= input_location
;
273 locus
= gimple_location (stmt
);
274 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
277 /* Stop deferring overflow warnings, ignoring any deferred
281 fold_undefer_and_ignore_overflow_warnings (void)
283 fold_undefer_overflow_warnings (false, NULL
, 0);
286 /* Whether we are deferring overflow warnings. */
289 fold_deferring_overflow_warnings_p (void)
291 return fold_deferring_overflow_warnings
> 0;
294 /* This is called when we fold something based on the fact that signed
295 overflow is undefined. */
298 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
300 if (fold_deferring_overflow_warnings
> 0)
302 if (fold_deferred_overflow_warning
== NULL
303 || wc
< fold_deferred_overflow_code
)
305 fold_deferred_overflow_warning
= gmsgid
;
306 fold_deferred_overflow_code
= wc
;
309 else if (issue_strict_overflow_warning (wc
))
310 warning (OPT_Wstrict_overflow
, gmsgid
);
313 /* Return true if the built-in mathematical function specified by CODE
314 is odd, i.e. -f(x) == f(-x). */
317 negate_mathfn_p (enum built_in_function code
)
321 CASE_FLT_FN (BUILT_IN_ASIN
):
322 CASE_FLT_FN (BUILT_IN_ASINH
):
323 CASE_FLT_FN (BUILT_IN_ATAN
):
324 CASE_FLT_FN (BUILT_IN_ATANH
):
325 CASE_FLT_FN (BUILT_IN_CASIN
):
326 CASE_FLT_FN (BUILT_IN_CASINH
):
327 CASE_FLT_FN (BUILT_IN_CATAN
):
328 CASE_FLT_FN (BUILT_IN_CATANH
):
329 CASE_FLT_FN (BUILT_IN_CBRT
):
330 CASE_FLT_FN (BUILT_IN_CPROJ
):
331 CASE_FLT_FN (BUILT_IN_CSIN
):
332 CASE_FLT_FN (BUILT_IN_CSINH
):
333 CASE_FLT_FN (BUILT_IN_CTAN
):
334 CASE_FLT_FN (BUILT_IN_CTANH
):
335 CASE_FLT_FN (BUILT_IN_ERF
):
336 CASE_FLT_FN (BUILT_IN_LLROUND
):
337 CASE_FLT_FN (BUILT_IN_LROUND
):
338 CASE_FLT_FN (BUILT_IN_ROUND
):
339 CASE_FLT_FN (BUILT_IN_SIN
):
340 CASE_FLT_FN (BUILT_IN_SINH
):
341 CASE_FLT_FN (BUILT_IN_TAN
):
342 CASE_FLT_FN (BUILT_IN_TANH
):
343 CASE_FLT_FN (BUILT_IN_TRUNC
):
346 CASE_FLT_FN (BUILT_IN_LLRINT
):
347 CASE_FLT_FN (BUILT_IN_LRINT
):
348 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
349 CASE_FLT_FN (BUILT_IN_RINT
):
350 return !flag_rounding_math
;
358 /* Check whether we may negate an integer constant T without causing
362 may_negate_without_overflow_p (const_tree t
)
366 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
368 type
= TREE_TYPE (t
);
369 if (TYPE_UNSIGNED (type
))
372 return !wi::only_sign_bit_p (t
);
375 /* Determine whether an expression T can be cheaply negated using
376 the function negate_expr without introducing undefined overflow. */
379 negate_expr_p (tree t
)
386 type
= TREE_TYPE (t
);
389 switch (TREE_CODE (t
))
392 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
395 /* Check that -CST will not overflow type. */
396 return may_negate_without_overflow_p (t
);
398 return (INTEGRAL_TYPE_P (type
)
399 && TYPE_OVERFLOW_WRAPS (type
));
405 return !TYPE_OVERFLOW_SANITIZED (type
);
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
413 return negate_expr_p (TREE_REALPART (t
))
414 && negate_expr_p (TREE_IMAGPART (t
));
418 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
421 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
423 for (i
= 0; i
< count
; i
++)
424 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
431 return negate_expr_p (TREE_OPERAND (t
, 0))
432 && negate_expr_p (TREE_OPERAND (t
, 1));
435 return negate_expr_p (TREE_OPERAND (t
, 0));
438 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
439 || HONOR_SIGNED_ZEROS (element_mode (type
)))
441 /* -(A + B) -> (-B) - A. */
442 if (negate_expr_p (TREE_OPERAND (t
, 1))
443 && reorder_operands_p (TREE_OPERAND (t
, 0),
444 TREE_OPERAND (t
, 1)))
446 /* -(A + B) -> (-A) - B. */
447 return negate_expr_p (TREE_OPERAND (t
, 0));
450 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
451 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
452 && !HONOR_SIGNED_ZEROS (element_mode (type
))
453 && reorder_operands_p (TREE_OPERAND (t
, 0),
454 TREE_OPERAND (t
, 1));
457 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
463 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
464 return negate_expr_p (TREE_OPERAND (t
, 1))
465 || negate_expr_p (TREE_OPERAND (t
, 0));
471 /* In general we can't negate A / B, because if A is INT_MIN and
472 B is 1, we may turn this into INT_MIN / -1 which is undefined
473 and actually traps on some architectures. But if overflow is
474 undefined, we can negate, because - (INT_MIN / 1) is an
476 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
478 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
480 /* If overflow is undefined then we have to be careful because
481 we ask whether it's ok to associate the negate with the
482 division which is not ok for example for
483 -((a - b) / c) where (-(a - b)) / c may invoke undefined
484 overflow because of negating INT_MIN. So do not use
485 negate_expr_p here but open-code the two important cases. */
486 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
487 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
488 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
491 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
493 return negate_expr_p (TREE_OPERAND (t
, 1));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type
) == REAL_TYPE
)
499 tree tem
= strip_float_extensions (t
);
501 return negate_expr_p (tem
);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t
)))
508 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
513 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
515 tree op1
= TREE_OPERAND (t
, 1);
516 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
527 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
528 simplification is possible.
529 If negate_expr_p would return true for T, NULL_TREE will never be
533 fold_negate_expr (location_t loc
, tree t
)
535 tree type
= TREE_TYPE (t
);
538 switch (TREE_CODE (t
))
540 /* Convert - (~A) to A + 1. */
542 if (INTEGRAL_TYPE_P (type
))
543 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
544 build_one_cst (type
));
548 tem
= fold_negate_const (t
, type
);
549 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
550 || (ANY_INTEGRAL_TYPE_P (type
)
551 && !TYPE_OVERFLOW_TRAPS (type
)
552 && TYPE_OVERFLOW_WRAPS (type
))
553 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
558 tem
= fold_negate_const (t
, type
);
562 tem
= fold_negate_const (t
, type
);
567 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
568 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
570 return build_complex (type
, rpart
, ipart
);
576 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
577 tree
*elts
= XALLOCAVEC (tree
, count
);
579 for (i
= 0; i
< count
; i
++)
581 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
582 if (elts
[i
] == NULL_TREE
)
586 return build_vector (type
, elts
);
590 if (negate_expr_p (t
))
591 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
592 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
593 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
597 if (negate_expr_p (t
))
598 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
599 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
603 if (!TYPE_OVERFLOW_SANITIZED (type
))
604 return TREE_OPERAND (t
, 0);
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
609 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t
, 1))
613 && reorder_operands_p (TREE_OPERAND (t
, 0),
614 TREE_OPERAND (t
, 1)))
616 tem
= negate_expr (TREE_OPERAND (t
, 1));
617 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
618 tem
, TREE_OPERAND (t
, 0));
621 /* -(A + B) -> (-A) - B. */
622 if (negate_expr_p (TREE_OPERAND (t
, 0)))
624 tem
= negate_expr (TREE_OPERAND (t
, 0));
625 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
626 tem
, TREE_OPERAND (t
, 1));
632 /* - (A - B) -> B - A */
633 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
634 && !HONOR_SIGNED_ZEROS (element_mode (type
))
635 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
636 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
637 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
641 if (TYPE_UNSIGNED (type
))
647 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
649 tem
= TREE_OPERAND (t
, 1);
650 if (negate_expr_p (tem
))
651 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
652 TREE_OPERAND (t
, 0), negate_expr (tem
));
653 tem
= TREE_OPERAND (t
, 0);
654 if (negate_expr_p (tem
))
655 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
656 negate_expr (tem
), TREE_OPERAND (t
, 1));
663 /* In general we can't negate A / B, because if A is INT_MIN and
664 B is 1, we may turn this into INT_MIN / -1 which is undefined
665 and actually traps on some architectures. But if overflow is
666 undefined, we can negate, because - (INT_MIN / 1) is an
668 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
670 const char * const warnmsg
= G_("assuming signed overflow does not "
671 "occur when negating a division");
672 tem
= TREE_OPERAND (t
, 1);
673 if (negate_expr_p (tem
))
675 if (INTEGRAL_TYPE_P (type
)
676 && (TREE_CODE (tem
) != INTEGER_CST
677 || integer_onep (tem
)))
678 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
679 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
680 TREE_OPERAND (t
, 0), negate_expr (tem
));
682 /* If overflow is undefined then we have to be careful because
683 we ask whether it's ok to associate the negate with the
684 division which is not ok for example for
685 -((a - b) / c) where (-(a - b)) / c may invoke undefined
686 overflow because of negating INT_MIN. So do not use
687 negate_expr_p here but open-code the two important cases. */
688 tem
= TREE_OPERAND (t
, 0);
689 if ((INTEGRAL_TYPE_P (type
)
690 && (TREE_CODE (tem
) == NEGATE_EXPR
691 || (TREE_CODE (tem
) == INTEGER_CST
692 && may_negate_without_overflow_p (tem
))))
693 || !INTEGRAL_TYPE_P (type
))
694 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
695 negate_expr (tem
), TREE_OPERAND (t
, 1));
700 /* Convert -((double)float) into (double)(-float). */
701 if (TREE_CODE (type
) == REAL_TYPE
)
703 tem
= strip_float_extensions (t
);
704 if (tem
!= t
&& negate_expr_p (tem
))
705 return fold_convert_loc (loc
, type
, negate_expr (tem
));
710 /* Negate -f(x) as f(-x). */
711 if (negate_mathfn_p (builtin_mathfn_code (t
))
712 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
716 fndecl
= get_callee_fndecl (t
);
717 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
718 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
723 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
724 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
726 tree op1
= TREE_OPERAND (t
, 1);
727 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
729 tree ntype
= TYPE_UNSIGNED (type
)
730 ? signed_type_for (type
)
731 : unsigned_type_for (type
);
732 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
733 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
734 return fold_convert_loc (loc
, type
, temp
);
746 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
747 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
759 loc
= EXPR_LOCATION (t
);
760 type
= TREE_TYPE (t
);
763 tem
= fold_negate_expr (loc
, t
);
765 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
766 return fold_convert_loc (loc
, type
, tem
);
769 /* Split a tree IN into a constant, literal and variable parts that could be
770 combined with CODE to make IN. "constant" means an expression with
771 TREE_CONSTANT but that isn't an actual constant. CODE must be a
772 commutative arithmetic operation. Store the constant part into *CONP,
773 the literal in *LITP and return the variable part. If a part isn't
774 present, set it to null. If the tree does not decompose in this way,
775 return the entire tree as the variable part and the other parts as null.
777 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
778 case, we negate an operand that was subtracted. Except if it is a
779 literal for which we use *MINUS_LITP instead.
781 If NEGATE_P is true, we are negating all of IN, again except a literal
782 for which we use *MINUS_LITP instead.
784 If IN is itself a literal or constant, return it as appropriate.
786 Note that we do not guarantee that any of the three values will be the
787 same type as IN, but they will have the same signedness and mode. */
790 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
791 tree
*minus_litp
, int negate_p
)
799 /* Strip any conversions that don't change the machine mode or signedness. */
800 STRIP_SIGN_NOPS (in
);
802 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
803 || TREE_CODE (in
) == FIXED_CST
)
805 else if (TREE_CODE (in
) == code
806 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
807 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
808 /* We can associate addition and subtraction together (even
809 though the C standard doesn't say so) for integers because
810 the value is not affected. For reals, the value might be
811 affected, so we can't. */
812 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
813 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
815 tree op0
= TREE_OPERAND (in
, 0);
816 tree op1
= TREE_OPERAND (in
, 1);
817 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
818 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
820 /* First see if either of the operands is a literal, then a constant. */
821 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
822 || TREE_CODE (op0
) == FIXED_CST
)
823 *litp
= op0
, op0
= 0;
824 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
825 || TREE_CODE (op1
) == FIXED_CST
)
826 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
828 if (op0
!= 0 && TREE_CONSTANT (op0
))
829 *conp
= op0
, op0
= 0;
830 else if (op1
!= 0 && TREE_CONSTANT (op1
))
831 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
833 /* If we haven't dealt with either operand, this is not a case we can
834 decompose. Otherwise, VAR is either of the ones remaining, if any. */
835 if (op0
!= 0 && op1
!= 0)
840 var
= op1
, neg_var_p
= neg1_p
;
842 /* Now do any needed negations. */
844 *minus_litp
= *litp
, *litp
= 0;
846 *conp
= negate_expr (*conp
);
848 var
= negate_expr (var
);
850 else if (TREE_CODE (in
) == BIT_NOT_EXPR
851 && code
== PLUS_EXPR
)
853 /* -X - 1 is folded to ~X, undo that here. */
854 *minus_litp
= build_one_cst (TREE_TYPE (in
));
855 var
= negate_expr (TREE_OPERAND (in
, 0));
857 else if (TREE_CONSTANT (in
))
865 *minus_litp
= *litp
, *litp
= 0;
866 else if (*minus_litp
)
867 *litp
= *minus_litp
, *minus_litp
= 0;
868 *conp
= negate_expr (*conp
);
869 var
= negate_expr (var
);
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
881 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
892 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
894 if (code
== PLUS_EXPR
)
896 if (TREE_CODE (t1
) == NEGATE_EXPR
)
897 return build2_loc (loc
, MINUS_EXPR
, type
,
898 fold_convert_loc (loc
, type
, t2
),
899 fold_convert_loc (loc
, type
,
900 TREE_OPERAND (t1
, 0)));
901 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
902 return build2_loc (loc
, MINUS_EXPR
, type
,
903 fold_convert_loc (loc
, type
, t1
),
904 fold_convert_loc (loc
, type
,
905 TREE_OPERAND (t2
, 0)));
906 else if (integer_zerop (t2
))
907 return fold_convert_loc (loc
, type
, t1
);
909 else if (code
== MINUS_EXPR
)
911 if (integer_zerop (t2
))
912 return fold_convert_loc (loc
, type
, t1
);
915 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
916 fold_convert_loc (loc
, type
, t2
));
919 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
920 fold_convert_loc (loc
, type
, t2
));
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
927 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
929 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
931 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
946 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
947 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
948 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
957 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
962 tree type
= TREE_TYPE (arg1
);
963 signop sign
= TYPE_SIGN (type
);
964 bool overflow
= false;
966 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
967 TYPE_SIGN (TREE_TYPE (parg2
)));
972 res
= wi::bit_or (arg1
, arg2
);
976 res
= wi::bit_xor (arg1
, arg2
);
980 res
= wi::bit_and (arg1
, arg2
);
985 if (wi::neg_p (arg2
))
988 if (code
== RSHIFT_EXPR
)
994 if (code
== RSHIFT_EXPR
)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res
= wi::rshift (arg1
, arg2
, sign
);
1000 res
= wi::lshift (arg1
, arg2
);
1005 if (wi::neg_p (arg2
))
1008 if (code
== RROTATE_EXPR
)
1009 code
= LROTATE_EXPR
;
1011 code
= RROTATE_EXPR
;
1014 if (code
== RROTATE_EXPR
)
1015 res
= wi::rrotate (arg1
, arg2
);
1017 res
= wi::lrotate (arg1
, arg2
);
1021 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1025 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1029 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1032 case MULT_HIGHPART_EXPR
:
1033 res
= wi::mul_high (arg1
, arg2
, sign
);
1036 case TRUNC_DIV_EXPR
:
1037 case EXACT_DIV_EXPR
:
1040 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1043 case FLOOR_DIV_EXPR
:
1046 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1052 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1055 case ROUND_DIV_EXPR
:
1058 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1061 case TRUNC_MOD_EXPR
:
1064 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1067 case FLOOR_MOD_EXPR
:
1070 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1076 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1079 case ROUND_MOD_EXPR
:
1082 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1086 res
= wi::min (arg1
, arg2
, sign
);
1090 res
= wi::max (arg1
, arg2
, sign
);
1097 t
= force_fit_type (type
, res
, overflowable
,
1098 (((sign
== SIGNED
|| overflowable
== -1)
1100 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1106 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1108 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1117 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1119 /* Sanity check for the recursive cases. */
1126 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1128 if (code
== POINTER_PLUS_EXPR
)
1129 return int_const_binop (PLUS_EXPR
,
1130 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1132 return int_const_binop (code
, arg1
, arg2
);
1135 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1140 REAL_VALUE_TYPE value
;
1141 REAL_VALUE_TYPE result
;
1145 /* The following codes are handled by real_arithmetic. */
1160 d1
= TREE_REAL_CST (arg1
);
1161 d2
= TREE_REAL_CST (arg2
);
1163 type
= TREE_TYPE (arg1
);
1164 mode
= TYPE_MODE (type
);
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a NaN. */
1168 if (HONOR_SNANS (mode
)
1169 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1172 /* Don't perform operation if it would raise a division
1173 by zero exception. */
1174 if (code
== RDIV_EXPR
1175 && REAL_VALUES_EQUAL (d2
, dconst0
)
1176 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1179 /* If either operand is a NaN, just return it. Otherwise, set up
1180 for floating-point trap; we return an overflow. */
1181 if (REAL_VALUE_ISNAN (d1
))
1183 else if (REAL_VALUE_ISNAN (d2
))
1186 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1187 real_convert (&result
, mode
, &value
);
1189 /* Don't constant fold this floating point operation if
1190 the result has overflowed and flag_trapping_math. */
1191 if (flag_trapping_math
1192 && MODE_HAS_INFINITIES (mode
)
1193 && REAL_VALUE_ISINF (result
)
1194 && !REAL_VALUE_ISINF (d1
)
1195 && !REAL_VALUE_ISINF (d2
))
1198 /* Don't constant fold this floating point operation if the
1199 result may dependent upon the run-time rounding mode and
1200 flag_rounding_math is set, or if GCC's software emulation
1201 is unable to accurately represent the result. */
1202 if ((flag_rounding_math
1203 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1204 && (inexact
|| !real_identical (&result
, &value
)))
1207 t
= build_real (type
, result
);
1209 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1213 if (TREE_CODE (arg1
) == FIXED_CST
)
1215 FIXED_VALUE_TYPE f1
;
1216 FIXED_VALUE_TYPE f2
;
1217 FIXED_VALUE_TYPE result
;
1222 /* The following codes are handled by fixed_arithmetic. */
1228 case TRUNC_DIV_EXPR
:
1229 if (TREE_CODE (arg2
) != FIXED_CST
)
1231 f2
= TREE_FIXED_CST (arg2
);
1237 if (TREE_CODE (arg2
) != INTEGER_CST
)
1240 f2
.data
.high
= w2
.elt (1);
1241 f2
.data
.low
= w2
.elt (0);
1250 f1
= TREE_FIXED_CST (arg1
);
1251 type
= TREE_TYPE (arg1
);
1252 sat_p
= TYPE_SATURATING (type
);
1253 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1254 t
= build_fixed (type
, result
);
1255 /* Propagate overflow flags. */
1256 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1257 TREE_OVERFLOW (t
) = 1;
1261 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1263 tree type
= TREE_TYPE (arg1
);
1264 tree r1
= TREE_REALPART (arg1
);
1265 tree i1
= TREE_IMAGPART (arg1
);
1266 tree r2
= TREE_REALPART (arg2
);
1267 tree i2
= TREE_IMAGPART (arg2
);
1274 real
= const_binop (code
, r1
, r2
);
1275 imag
= const_binop (code
, i1
, i2
);
1279 if (COMPLEX_FLOAT_TYPE_P (type
))
1280 return do_mpc_arg2 (arg1
, arg2
, type
,
1281 /* do_nonfinite= */ folding_initializer
,
1284 real
= const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, r1
, r2
),
1286 const_binop (MULT_EXPR
, i1
, i2
));
1287 imag
= const_binop (PLUS_EXPR
,
1288 const_binop (MULT_EXPR
, r1
, i2
),
1289 const_binop (MULT_EXPR
, i1
, r2
));
1293 if (COMPLEX_FLOAT_TYPE_P (type
))
1294 return do_mpc_arg2 (arg1
, arg2
, type
,
1295 /* do_nonfinite= */ folding_initializer
,
1298 case TRUNC_DIV_EXPR
:
1300 case FLOOR_DIV_EXPR
:
1301 case ROUND_DIV_EXPR
:
1302 if (flag_complex_method
== 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1312 = const_binop (PLUS_EXPR
,
1313 const_binop (MULT_EXPR
, r2
, r2
),
1314 const_binop (MULT_EXPR
, i2
, i2
));
1316 = const_binop (PLUS_EXPR
,
1317 const_binop (MULT_EXPR
, r1
, r2
),
1318 const_binop (MULT_EXPR
, i1
, i2
));
1320 = const_binop (MINUS_EXPR
,
1321 const_binop (MULT_EXPR
, i1
, r2
),
1322 const_binop (MULT_EXPR
, r1
, i2
));
1324 real
= const_binop (code
, t1
, magsquared
);
1325 imag
= const_binop (code
, t2
, magsquared
);
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1335 fold_abs_const (r2
, TREE_TYPE (type
)),
1336 fold_abs_const (i2
, TREE_TYPE (type
)));
1338 if (integer_nonzerop (compare
))
1340 /* In the TRUE branch, we compute
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1347 tree ratio
= const_binop (code
, r2
, i2
);
1348 tree div
= const_binop (PLUS_EXPR
, i2
,
1349 const_binop (MULT_EXPR
, r2
, ratio
));
1350 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1351 real
= const_binop (PLUS_EXPR
, real
, i1
);
1352 real
= const_binop (code
, real
, div
);
1354 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1355 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1356 imag
= const_binop (code
, imag
, div
);
1360 /* In the FALSE branch, we compute
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1367 tree ratio
= const_binop (code
, i2
, r2
);
1368 tree div
= const_binop (PLUS_EXPR
, r2
,
1369 const_binop (MULT_EXPR
, i2
, ratio
));
1371 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1372 real
= const_binop (PLUS_EXPR
, real
, r1
);
1373 real
= const_binop (code
, real
, div
);
1375 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1376 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1377 imag
= const_binop (code
, imag
, div
);
1387 return build_complex (type
, real
, imag
);
1390 if (TREE_CODE (arg1
) == VECTOR_CST
1391 && TREE_CODE (arg2
) == VECTOR_CST
)
1393 tree type
= TREE_TYPE (arg1
);
1394 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1395 tree
*elts
= XALLOCAVEC (tree
, count
);
1397 for (i
= 0; i
< count
; i
++)
1399 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1400 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1402 elts
[i
] = const_binop (code
, elem1
, elem2
);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts
[i
] == NULL_TREE
)
1410 return build_vector (type
, elts
);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1
) == VECTOR_CST
1415 && TREE_CODE (arg2
) == INTEGER_CST
)
1417 tree type
= TREE_TYPE (arg1
);
1418 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1419 tree
*elts
= XALLOCAVEC (tree
, count
);
1421 for (i
= 0; i
< count
; i
++)
1423 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1425 elts
[i
] = const_binop (code
, elem1
, arg2
);
1427 /* It is possible that const_binop cannot handle the given
1428 code and return NULL_TREE. */
1429 if (elts
[i
] == NULL_TREE
)
1433 return build_vector (type
, elts
);
1438 /* Overload that adds a TYPE parameter to be able to dispatch
1439 to fold_relational_const. */
1442 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1444 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1445 return fold_relational_const (code
, type
, arg1
, arg2
);
1447 /* ??? Until we make the const_binop worker take the type of the
1448 result as argument put those cases that need it here. */
1452 if ((TREE_CODE (arg1
) == REAL_CST
1453 && TREE_CODE (arg2
) == REAL_CST
)
1454 || (TREE_CODE (arg1
) == INTEGER_CST
1455 && TREE_CODE (arg2
) == INTEGER_CST
))
1456 return build_complex (type
, arg1
, arg2
);
1459 case VEC_PACK_TRUNC_EXPR
:
1460 case VEC_PACK_FIX_TRUNC_EXPR
:
1462 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1465 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1466 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1467 if (TREE_CODE (arg1
) != VECTOR_CST
1468 || TREE_CODE (arg2
) != VECTOR_CST
)
1471 elts
= XALLOCAVEC (tree
, nelts
);
1472 if (!vec_cst_ctor_to_array (arg1
, elts
)
1473 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1476 for (i
= 0; i
< nelts
; i
++)
1478 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1479 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1480 TREE_TYPE (type
), elts
[i
]);
1481 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1485 return build_vector (type
, elts
);
1488 case VEC_WIDEN_MULT_LO_EXPR
:
1489 case VEC_WIDEN_MULT_HI_EXPR
:
1490 case VEC_WIDEN_MULT_EVEN_EXPR
:
1491 case VEC_WIDEN_MULT_ODD_EXPR
:
1493 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1494 unsigned int out
, ofs
, scale
;
1497 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1498 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1499 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1502 elts
= XALLOCAVEC (tree
, nelts
* 4);
1503 if (!vec_cst_ctor_to_array (arg1
, elts
)
1504 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1507 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1508 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1509 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1510 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1511 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1513 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1516 for (out
= 0; out
< nelts
; out
++)
1518 unsigned int in1
= (out
<< scale
) + ofs
;
1519 unsigned int in2
= in1
+ nelts
* 2;
1522 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1523 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1525 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1527 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1528 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1532 return build_vector (type
, elts
);
1538 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1541 /* Make sure type and arg0 have the same saturating flag. */
1542 gcc_checking_assert (TYPE_SATURATING (type
)
1543 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1545 return const_binop (code
, arg1
, arg2
);
1548 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1549 Return zero if computing the constants is not possible. */
1552 const_unop (enum tree_code code
, tree type
, tree arg0
)
1558 case FIX_TRUNC_EXPR
:
1559 case FIXED_CONVERT_EXPR
:
1560 return fold_convert_const (code
, type
, arg0
);
1562 case ADDR_SPACE_CONVERT_EXPR
:
1563 if (integer_zerop (arg0
))
1564 return fold_convert_const (code
, type
, arg0
);
1567 case VIEW_CONVERT_EXPR
:
1568 return fold_view_convert_expr (type
, arg0
);
1572 /* Can't call fold_negate_const directly here as that doesn't
1573 handle all cases and we might not be able to negate some
1575 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1576 if (tem
&& CONSTANT_CLASS_P (tem
))
1582 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1583 return fold_abs_const (arg0
, type
);
1587 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1589 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1591 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1596 if (TREE_CODE (arg0
) == INTEGER_CST
)
1597 return fold_not_const (arg0
, type
);
1598 /* Perform BIT_NOT_EXPR on each element individually. */
1599 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1603 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1605 elements
= XALLOCAVEC (tree
, count
);
1606 for (i
= 0; i
< count
; i
++)
1608 elem
= VECTOR_CST_ELT (arg0
, i
);
1609 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1610 if (elem
== NULL_TREE
)
1615 return build_vector (type
, elements
);
1619 case TRUTH_NOT_EXPR
:
1620 if (TREE_CODE (arg0
) == INTEGER_CST
)
1621 return constant_boolean_node (integer_zerop (arg0
), type
);
1625 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1626 return fold_convert (type
, TREE_REALPART (arg0
));
1630 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1631 return fold_convert (type
, TREE_IMAGPART (arg0
));
1634 case VEC_UNPACK_LO_EXPR
:
1635 case VEC_UNPACK_HI_EXPR
:
1636 case VEC_UNPACK_FLOAT_LO_EXPR
:
1637 case VEC_UNPACK_FLOAT_HI_EXPR
:
1639 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1641 enum tree_code subcode
;
1643 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1644 if (TREE_CODE (arg0
) != VECTOR_CST
)
1647 elts
= XALLOCAVEC (tree
, nelts
* 2);
1648 if (!vec_cst_ctor_to_array (arg0
, elts
))
1651 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1652 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1655 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1658 subcode
= FLOAT_EXPR
;
1660 for (i
= 0; i
< nelts
; i
++)
1662 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1663 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1667 return build_vector (type
, elts
);
1670 case REDUC_MIN_EXPR
:
1671 case REDUC_MAX_EXPR
:
1672 case REDUC_PLUS_EXPR
:
1674 unsigned int nelts
, i
;
1676 enum tree_code subcode
;
1678 if (TREE_CODE (arg0
) != VECTOR_CST
)
1680 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1682 elts
= XALLOCAVEC (tree
, nelts
);
1683 if (!vec_cst_ctor_to_array (arg0
, elts
))
1688 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1689 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1690 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1691 default: gcc_unreachable ();
1694 for (i
= 1; i
< nelts
; i
++)
1696 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1697 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1711 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1712 indicates which particular sizetype to create. */
1715 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1717 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1720 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1721 is a tree code. The type of the result is taken from the operands.
1722 Both must be equivalent integer types, ala int_binop_types_match_p.
1723 If the operands are constant, so is the result. */
1726 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1728 tree type
= TREE_TYPE (arg0
);
1730 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1731 return error_mark_node
;
1733 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1736 /* Handle the special case of two integer constants faster. */
1737 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1739 /* And some specific cases even faster than that. */
1740 if (code
== PLUS_EXPR
)
1742 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1744 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1747 else if (code
== MINUS_EXPR
)
1749 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1752 else if (code
== MULT_EXPR
)
1754 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1758 /* Handle general case of two integer constants. For sizetype
1759 constant calculations we always want to know about overflow,
1760 even in the unsigned case. */
1761 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1764 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1767 /* Given two values, either both of sizetype or both of bitsizetype,
1768 compute the difference between the two values. Return the value
1769 in signed type corresponding to the type of the operands. */
1772 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1774 tree type
= TREE_TYPE (arg0
);
1777 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1780 /* If the type is already signed, just do the simple thing. */
1781 if (!TYPE_UNSIGNED (type
))
1782 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1784 if (type
== sizetype
)
1786 else if (type
== bitsizetype
)
1787 ctype
= sbitsizetype
;
1789 ctype
= signed_type_for (type
);
1791 /* If either operand is not a constant, do the conversions to the signed
1792 type and subtract. The hardware will do the right thing with any
1793 overflow in the subtraction. */
1794 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1795 return size_binop_loc (loc
, MINUS_EXPR
,
1796 fold_convert_loc (loc
, ctype
, arg0
),
1797 fold_convert_loc (loc
, ctype
, arg1
));
1799 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1800 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1801 overflow) and negate (which can't either). Special-case a result
1802 of zero while we're here. */
1803 if (tree_int_cst_equal (arg0
, arg1
))
1804 return build_int_cst (ctype
, 0);
1805 else if (tree_int_cst_lt (arg1
, arg0
))
1806 return fold_convert_loc (loc
, ctype
,
1807 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1809 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1810 fold_convert_loc (loc
, ctype
,
1811 size_binop_loc (loc
,
1816 /* A subroutine of fold_convert_const handling conversions of an
1817 INTEGER_CST to another integer type. */
1820 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1822 /* Given an integer constant, make new constant with new type,
1823 appropriately sign-extended or truncated. Use widest_int
1824 so that any extension is done according ARG1's type. */
1825 return force_fit_type (type
, wi::to_widest (arg1
),
1826 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1827 TREE_OVERFLOW (arg1
));
1830 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1831 to an integer type. */
1834 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1836 bool overflow
= false;
1839 /* The following code implements the floating point to integer
1840 conversion rules required by the Java Language Specification,
1841 that IEEE NaNs are mapped to zero and values that overflow
1842 the target precision saturate, i.e. values greater than
1843 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1844 are mapped to INT_MIN. These semantics are allowed by the
1845 C and C++ standards that simply state that the behavior of
1846 FP-to-integer conversion is unspecified upon overflow. */
1850 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1854 case FIX_TRUNC_EXPR
:
1855 real_trunc (&r
, VOIDmode
, &x
);
1862 /* If R is NaN, return zero and show we have an overflow. */
1863 if (REAL_VALUE_ISNAN (r
))
1866 val
= wi::zero (TYPE_PRECISION (type
));
1869 /* See if R is less than the lower bound or greater than the
1874 tree lt
= TYPE_MIN_VALUE (type
);
1875 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1876 if (REAL_VALUES_LESS (r
, l
))
1885 tree ut
= TYPE_MAX_VALUE (type
);
1888 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1889 if (REAL_VALUES_LESS (u
, r
))
1898 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1900 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1904 /* A subroutine of fold_convert_const handling conversions of a
1905 FIXED_CST to an integer type. */
1908 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1911 double_int temp
, temp_trunc
;
1914 /* Right shift FIXED_CST to temp by fbit. */
1915 temp
= TREE_FIXED_CST (arg1
).data
;
1916 mode
= TREE_FIXED_CST (arg1
).mode
;
1917 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1919 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1920 HOST_BITS_PER_DOUBLE_INT
,
1921 SIGNED_FIXED_POINT_MODE_P (mode
));
1923 /* Left shift temp to temp_trunc by fbit. */
1924 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1925 HOST_BITS_PER_DOUBLE_INT
,
1926 SIGNED_FIXED_POINT_MODE_P (mode
));
1930 temp
= double_int_zero
;
1931 temp_trunc
= double_int_zero
;
1934 /* If FIXED_CST is negative, we need to round the value toward 0.
1935 By checking if the fractional bits are not zero to add 1 to temp. */
1936 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1937 && temp_trunc
.is_negative ()
1938 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1939 temp
+= double_int_one
;
1941 /* Given a fixed-point constant, make new constant with new type,
1942 appropriately sign-extended or truncated. */
1943 t
= force_fit_type (type
, temp
, -1,
1944 (temp
.is_negative ()
1945 && (TYPE_UNSIGNED (type
)
1946 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1947 | TREE_OVERFLOW (arg1
));
1952 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1953 to another floating point type. */
1956 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1958 REAL_VALUE_TYPE value
;
1961 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1962 t
= build_real (type
, value
);
1964 /* If converting an infinity or NAN to a representation that doesn't
1965 have one, set the overflow bit so that we can produce some kind of
1966 error message at the appropriate point if necessary. It's not the
1967 most user-friendly message, but it's better than nothing. */
1968 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1969 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1970 TREE_OVERFLOW (t
) = 1;
1971 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1972 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1973 TREE_OVERFLOW (t
) = 1;
1974 /* Regular overflow, conversion produced an infinity in a mode that
1975 can't represent them. */
1976 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1977 && REAL_VALUE_ISINF (value
)
1978 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1979 TREE_OVERFLOW (t
) = 1;
1981 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1985 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1986 to a floating point type. */
1989 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1991 REAL_VALUE_TYPE value
;
1994 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1995 t
= build_real (type
, value
);
1997 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2001 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2002 to another fixed-point type. */
2005 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2007 FIXED_VALUE_TYPE value
;
2011 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2012 TYPE_SATURATING (type
));
2013 t
= build_fixed (type
, value
);
2015 /* Propagate overflow flags. */
2016 if (overflow_p
| TREE_OVERFLOW (arg1
))
2017 TREE_OVERFLOW (t
) = 1;
2021 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2022 to a fixed-point type. */
2025 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2027 FIXED_VALUE_TYPE value
;
2032 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2034 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2035 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2036 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2038 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2040 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2041 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2042 TYPE_SATURATING (type
));
2043 t
= build_fixed (type
, value
);
2045 /* Propagate overflow flags. */
2046 if (overflow_p
| TREE_OVERFLOW (arg1
))
2047 TREE_OVERFLOW (t
) = 1;
2051 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2052 to a fixed-point type. */
2055 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2057 FIXED_VALUE_TYPE value
;
2061 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2062 &TREE_REAL_CST (arg1
),
2063 TYPE_SATURATING (type
));
2064 t
= build_fixed (type
, value
);
2066 /* Propagate overflow flags. */
2067 if (overflow_p
| TREE_OVERFLOW (arg1
))
2068 TREE_OVERFLOW (t
) = 1;
2072 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2073 type TYPE. If no simplification can be done return NULL_TREE. */
2076 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2078 if (TREE_TYPE (arg1
) == type
)
2081 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2082 || TREE_CODE (type
) == OFFSET_TYPE
)
2084 if (TREE_CODE (arg1
) == INTEGER_CST
)
2085 return fold_convert_const_int_from_int (type
, arg1
);
2086 else if (TREE_CODE (arg1
) == REAL_CST
)
2087 return fold_convert_const_int_from_real (code
, type
, arg1
);
2088 else if (TREE_CODE (arg1
) == FIXED_CST
)
2089 return fold_convert_const_int_from_fixed (type
, arg1
);
2091 else if (TREE_CODE (type
) == REAL_TYPE
)
2093 if (TREE_CODE (arg1
) == INTEGER_CST
)
2094 return build_real_from_int_cst (type
, arg1
);
2095 else if (TREE_CODE (arg1
) == REAL_CST
)
2096 return fold_convert_const_real_from_real (type
, arg1
);
2097 else if (TREE_CODE (arg1
) == FIXED_CST
)
2098 return fold_convert_const_real_from_fixed (type
, arg1
);
2100 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2102 if (TREE_CODE (arg1
) == FIXED_CST
)
2103 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2104 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2105 return fold_convert_const_fixed_from_int (type
, arg1
);
2106 else if (TREE_CODE (arg1
) == REAL_CST
)
2107 return fold_convert_const_fixed_from_real (type
, arg1
);
2112 /* Construct a vector of zero elements of vector type TYPE. */
2115 build_zero_vector (tree type
)
2119 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2120 return build_vector_from_val (type
, t
);
2123 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2126 fold_convertible_p (const_tree type
, const_tree arg
)
2128 tree orig
= TREE_TYPE (arg
);
2133 if (TREE_CODE (arg
) == ERROR_MARK
2134 || TREE_CODE (type
) == ERROR_MARK
2135 || TREE_CODE (orig
) == ERROR_MARK
)
2138 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2141 switch (TREE_CODE (type
))
2143 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2144 case POINTER_TYPE
: case REFERENCE_TYPE
:
2146 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2147 || TREE_CODE (orig
) == OFFSET_TYPE
)
2149 return (TREE_CODE (orig
) == VECTOR_TYPE
2150 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2153 case FIXED_POINT_TYPE
:
2157 return TREE_CODE (type
) == TREE_CODE (orig
);
2164 /* Convert expression ARG to type TYPE. Used by the middle-end for
2165 simple conversions in preference to calling the front-end's convert. */
2168 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2170 tree orig
= TREE_TYPE (arg
);
2176 if (TREE_CODE (arg
) == ERROR_MARK
2177 || TREE_CODE (type
) == ERROR_MARK
2178 || TREE_CODE (orig
) == ERROR_MARK
)
2179 return error_mark_node
;
2181 switch (TREE_CODE (type
))
2184 case REFERENCE_TYPE
:
2185 /* Handle conversions between pointers to different address spaces. */
2186 if (POINTER_TYPE_P (orig
)
2187 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2188 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2189 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2192 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2194 if (TREE_CODE (arg
) == INTEGER_CST
)
2196 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2197 if (tem
!= NULL_TREE
)
2200 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2201 || TREE_CODE (orig
) == OFFSET_TYPE
)
2202 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2203 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2204 return fold_convert_loc (loc
, type
,
2205 fold_build1_loc (loc
, REALPART_EXPR
,
2206 TREE_TYPE (orig
), arg
));
2207 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2208 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2209 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2212 if (TREE_CODE (arg
) == INTEGER_CST
)
2214 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2215 if (tem
!= NULL_TREE
)
2218 else if (TREE_CODE (arg
) == REAL_CST
)
2220 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2221 if (tem
!= NULL_TREE
)
2224 else if (TREE_CODE (arg
) == FIXED_CST
)
2226 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2227 if (tem
!= NULL_TREE
)
2231 switch (TREE_CODE (orig
))
2234 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2235 case POINTER_TYPE
: case REFERENCE_TYPE
:
2236 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2239 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2241 case FIXED_POINT_TYPE
:
2242 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2245 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2246 return fold_convert_loc (loc
, type
, tem
);
2252 case FIXED_POINT_TYPE
:
2253 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2254 || TREE_CODE (arg
) == REAL_CST
)
2256 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2257 if (tem
!= NULL_TREE
)
2258 goto fold_convert_exit
;
2261 switch (TREE_CODE (orig
))
2263 case FIXED_POINT_TYPE
:
2268 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2271 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2272 return fold_convert_loc (loc
, type
, tem
);
2279 switch (TREE_CODE (orig
))
2282 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2283 case POINTER_TYPE
: case REFERENCE_TYPE
:
2285 case FIXED_POINT_TYPE
:
2286 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2287 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2288 fold_convert_loc (loc
, TREE_TYPE (type
),
2289 integer_zero_node
));
2294 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2296 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2297 TREE_OPERAND (arg
, 0));
2298 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2299 TREE_OPERAND (arg
, 1));
2300 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2303 arg
= save_expr (arg
);
2304 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2305 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2306 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2307 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2308 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2316 if (integer_zerop (arg
))
2317 return build_zero_vector (type
);
2318 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2319 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2320 || TREE_CODE (orig
) == VECTOR_TYPE
);
2321 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2324 tem
= fold_ignored_result (arg
);
2325 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2328 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2329 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2333 protected_set_expr_location_unshare (tem
, loc
);
2337 /* Return false if expr can be assumed not to be an lvalue, true
2341 maybe_lvalue_p (const_tree x
)
2343 /* We only need to wrap lvalue tree codes. */
2344 switch (TREE_CODE (x
))
2357 case ARRAY_RANGE_REF
:
2363 case PREINCREMENT_EXPR
:
2364 case PREDECREMENT_EXPR
:
2366 case TRY_CATCH_EXPR
:
2367 case WITH_CLEANUP_EXPR
:
2376 /* Assume the worst for front-end tree codes. */
2377 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2385 /* Return an expr equal to X but certainly not valid as an lvalue. */
2388 non_lvalue_loc (location_t loc
, tree x
)
2390 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2395 if (! maybe_lvalue_p (x
))
2397 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2400 /* When pedantic, return an expr equal to X but certainly not valid as a
2401 pedantic lvalue. Otherwise, return X. */
2404 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2406 return protected_set_expr_location_unshare (x
, loc
);
2409 /* Given a tree comparison code, return the code that is the logical inverse.
2410 It is generally not safe to do this for floating-point comparisons, except
2411 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2412 ERROR_MARK in this case. */
2415 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2417 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2418 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2428 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2430 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2432 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2434 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2448 return UNORDERED_EXPR
;
2449 case UNORDERED_EXPR
:
2450 return ORDERED_EXPR
;
2456 /* Similar, but return the comparison that results if the operands are
2457 swapped. This is safe for floating-point. */
2460 swap_tree_comparison (enum tree_code code
)
2467 case UNORDERED_EXPR
:
2493 /* Convert a comparison tree code from an enum tree_code representation
2494 into a compcode bit-based encoding. This function is the inverse of
2495 compcode_to_comparison. */
2497 static enum comparison_code
2498 comparison_to_compcode (enum tree_code code
)
2515 return COMPCODE_ORD
;
2516 case UNORDERED_EXPR
:
2517 return COMPCODE_UNORD
;
2519 return COMPCODE_UNLT
;
2521 return COMPCODE_UNEQ
;
2523 return COMPCODE_UNLE
;
2525 return COMPCODE_UNGT
;
2527 return COMPCODE_LTGT
;
2529 return COMPCODE_UNGE
;
2535 /* Convert a compcode bit-based encoding of a comparison operator back
2536 to GCC's enum tree_code representation. This function is the
2537 inverse of comparison_to_compcode. */
2539 static enum tree_code
2540 compcode_to_comparison (enum comparison_code code
)
2557 return ORDERED_EXPR
;
2558 case COMPCODE_UNORD
:
2559 return UNORDERED_EXPR
;
2577 /* Return a tree for the comparison which is the combination of
2578 doing the AND or OR (depending on CODE) of the two operations LCODE
2579 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2580 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2581 if this makes the transformation invalid. */
2584 combine_comparisons (location_t loc
,
2585 enum tree_code code
, enum tree_code lcode
,
2586 enum tree_code rcode
, tree truth_type
,
2587 tree ll_arg
, tree lr_arg
)
2589 bool honor_nans
= HONOR_NANS (ll_arg
);
2590 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2591 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2596 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2597 compcode
= lcompcode
& rcompcode
;
2600 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2601 compcode
= lcompcode
| rcompcode
;
2610 /* Eliminate unordered comparisons, as well as LTGT and ORD
2611 which are not used unless the mode has NaNs. */
2612 compcode
&= ~COMPCODE_UNORD
;
2613 if (compcode
== COMPCODE_LTGT
)
2614 compcode
= COMPCODE_NE
;
2615 else if (compcode
== COMPCODE_ORD
)
2616 compcode
= COMPCODE_TRUE
;
2618 else if (flag_trapping_math
)
2620 /* Check that the original operation and the optimized ones will trap
2621 under the same condition. */
2622 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2623 && (lcompcode
!= COMPCODE_EQ
)
2624 && (lcompcode
!= COMPCODE_ORD
);
2625 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2626 && (rcompcode
!= COMPCODE_EQ
)
2627 && (rcompcode
!= COMPCODE_ORD
);
2628 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2629 && (compcode
!= COMPCODE_EQ
)
2630 && (compcode
!= COMPCODE_ORD
);
2632 /* In a short-circuited boolean expression the LHS might be
2633 such that the RHS, if evaluated, will never trap. For
2634 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2635 if neither x nor y is NaN. (This is a mixed blessing: for
2636 example, the expression above will never trap, hence
2637 optimizing it to x < y would be invalid). */
2638 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2639 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2642 /* If the comparison was short-circuited, and only the RHS
2643 trapped, we may now generate a spurious trap. */
2645 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2648 /* If we changed the conditions that cause a trap, we lose. */
2649 if ((ltrap
|| rtrap
) != trap
)
2653 if (compcode
== COMPCODE_TRUE
)
2654 return constant_boolean_node (true, truth_type
);
2655 else if (compcode
== COMPCODE_FALSE
)
2656 return constant_boolean_node (false, truth_type
);
2659 enum tree_code tcode
;
2661 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2662 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2666 /* Return nonzero if two operands (typically of the same tree node)
2667 are necessarily equal. If either argument has side-effects this
2668 function returns zero. FLAGS modifies behavior as follows:
2670 If OEP_ONLY_CONST is set, only return nonzero for constants.
2671 This function tests whether the operands are indistinguishable;
2672 it does not test whether they are equal using C's == operation.
2673 The distinction is important for IEEE floating point, because
2674 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2675 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2677 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2678 even though it may hold multiple values during a function.
2679 This is because a GCC tree node guarantees that nothing else is
2680 executed between the evaluation of its "operands" (which may often
2681 be evaluated in arbitrary order). Hence if the operands themselves
2682 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2683 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2684 unset means assuming isochronic (or instantaneous) tree equivalence.
2685 Unless comparing arbitrary expression trees, such as from different
2686 statements, this flag can usually be left unset.
2688 If OEP_PURE_SAME is set, then pure functions with identical arguments
2689 are considered the same. It is used when the caller has other ways
2690 to ensure that global memory is unchanged in between. */
2693 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2695 /* If either is ERROR_MARK, they aren't equal. */
2696 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2697 || TREE_TYPE (arg0
) == error_mark_node
2698 || TREE_TYPE (arg1
) == error_mark_node
)
2701 /* Similar, if either does not have a type (like a released SSA name),
2702 they aren't equal. */
2703 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2706 /* Check equality of integer constants before bailing out due to
2707 precision differences. */
2708 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2709 return tree_int_cst_equal (arg0
, arg1
);
2711 /* If both types don't have the same signedness, then we can't consider
2712 them equal. We must check this before the STRIP_NOPS calls
2713 because they may change the signedness of the arguments. As pointers
2714 strictly don't have a signedness, require either two pointers or
2715 two non-pointers as well. */
2716 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2717 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2720 /* We cannot consider pointers to different address space equal. */
2721 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2722 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2723 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2726 /* If both types don't have the same precision, then it is not safe
2728 if (element_precision (TREE_TYPE (arg0
))
2729 != element_precision (TREE_TYPE (arg1
)))
2735 /* In case both args are comparisons but with different comparison
2736 code, try to swap the comparison operands of one arg to produce
2737 a match and compare that variant. */
2738 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2739 && COMPARISON_CLASS_P (arg0
)
2740 && COMPARISON_CLASS_P (arg1
))
2742 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2744 if (TREE_CODE (arg0
) == swap_code
)
2745 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2746 TREE_OPERAND (arg1
, 1), flags
)
2747 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2748 TREE_OPERAND (arg1
, 0), flags
);
2751 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2752 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2753 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2756 /* This is needed for conversions and for COMPONENT_REF.
2757 Might as well play it safe and always test this. */
2758 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2759 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2760 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2763 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2764 We don't care about side effects in that case because the SAVE_EXPR
2765 takes care of that for us. In all other cases, two expressions are
2766 equal if they have no side effects. If we have two identical
2767 expressions with side effects that should be treated the same due
2768 to the only side effects being identical SAVE_EXPR's, that will
2769 be detected in the recursive calls below.
2770 If we are taking an invariant address of two identical objects
2771 they are necessarily equal as well. */
2772 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2773 && (TREE_CODE (arg0
) == SAVE_EXPR
2774 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2775 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2778 /* Next handle constant cases, those for which we can return 1 even
2779 if ONLY_CONST is set. */
2780 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2781 switch (TREE_CODE (arg0
))
2784 return tree_int_cst_equal (arg0
, arg1
);
2787 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2788 TREE_FIXED_CST (arg1
));
2791 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2792 TREE_REAL_CST (arg1
)))
2796 if (!HONOR_SIGNED_ZEROS (arg0
))
2798 /* If we do not distinguish between signed and unsigned zero,
2799 consider them equal. */
2800 if (real_zerop (arg0
) && real_zerop (arg1
))
2809 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2812 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2814 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2815 VECTOR_CST_ELT (arg1
, i
), flags
))
2822 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2824 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2828 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2829 && ! memcmp (TREE_STRING_POINTER (arg0
),
2830 TREE_STRING_POINTER (arg1
),
2831 TREE_STRING_LENGTH (arg0
)));
2834 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2835 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2836 ? OEP_CONSTANT_ADDRESS_OF
| OEP_ADDRESS_OF
: 0);
2841 if (flags
& OEP_ONLY_CONST
)
2844 /* Define macros to test an operand from arg0 and arg1 for equality and a
2845 variant that allows null and views null as being different from any
2846 non-null value. In the latter case, if either is null, the both
2847 must be; otherwise, do the normal comparison. */
2848 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2849 TREE_OPERAND (arg1, N), flags)
2851 #define OP_SAME_WITH_NULL(N) \
2852 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2853 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2855 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2858 /* Two conversions are equal only if signedness and modes match. */
2859 switch (TREE_CODE (arg0
))
2862 case FIX_TRUNC_EXPR
:
2863 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2864 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2874 case tcc_comparison
:
2876 if (OP_SAME (0) && OP_SAME (1))
2879 /* For commutative ops, allow the other order. */
2880 return (commutative_tree_code (TREE_CODE (arg0
))
2881 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2882 TREE_OPERAND (arg1
, 1), flags
)
2883 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2884 TREE_OPERAND (arg1
, 0), flags
));
2887 /* If either of the pointer (or reference) expressions we are
2888 dereferencing contain a side effect, these cannot be equal,
2889 but their addresses can be. */
2890 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2891 && (TREE_SIDE_EFFECTS (arg0
)
2892 || TREE_SIDE_EFFECTS (arg1
)))
2895 switch (TREE_CODE (arg0
))
2898 if (!(flags
& OEP_ADDRESS_OF
)
2899 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2900 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2902 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2909 case TARGET_MEM_REF
:
2911 /* Require equal access sizes, and similar pointer types.
2912 We can have incomplete types for array references of
2913 variable-sized arrays from the Fortran frontend
2914 though. Also verify the types are compatible. */
2915 if (!((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2916 || (TYPE_SIZE (TREE_TYPE (arg0
))
2917 && TYPE_SIZE (TREE_TYPE (arg1
))
2918 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2919 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2920 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2921 && ((flags
& OEP_ADDRESS_OF
)
2922 || (alias_ptr_types_compatible_p
2923 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2924 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2925 && (MR_DEPENDENCE_CLIQUE (arg0
)
2926 == MR_DEPENDENCE_CLIQUE (arg1
))
2927 && (MR_DEPENDENCE_BASE (arg0
)
2928 == MR_DEPENDENCE_BASE (arg1
))
2929 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2930 == TYPE_ALIGN (TREE_TYPE (arg1
)))))))
2932 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2933 return (OP_SAME (0) && OP_SAME (1)
2934 /* TARGET_MEM_REF require equal extra operands. */
2935 && (TREE_CODE (arg0
) != TARGET_MEM_REF
2936 || (OP_SAME_WITH_NULL (2)
2937 && OP_SAME_WITH_NULL (3)
2938 && OP_SAME_WITH_NULL (4))));
2941 case ARRAY_RANGE_REF
:
2942 /* Operands 2 and 3 may be null.
2943 Compare the array index by value if it is constant first as we
2944 may have different types but same value here. */
2947 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2948 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2949 TREE_OPERAND (arg1
, 1))
2951 && OP_SAME_WITH_NULL (2)
2952 && OP_SAME_WITH_NULL (3));
2955 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2956 may be NULL when we're called to compare MEM_EXPRs. */
2957 if (!OP_SAME_WITH_NULL (0)
2960 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2961 return OP_SAME_WITH_NULL (2);
2966 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2967 return OP_SAME (1) && OP_SAME (2);
2973 case tcc_expression
:
2974 switch (TREE_CODE (arg0
))
2977 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2978 TREE_OPERAND (arg1
, 0),
2979 flags
| OEP_ADDRESS_OF
);
2981 case TRUTH_NOT_EXPR
:
2984 case TRUTH_ANDIF_EXPR
:
2985 case TRUTH_ORIF_EXPR
:
2986 return OP_SAME (0) && OP_SAME (1);
2989 case WIDEN_MULT_PLUS_EXPR
:
2990 case WIDEN_MULT_MINUS_EXPR
:
2993 /* The multiplcation operands are commutative. */
2996 case TRUTH_AND_EXPR
:
2998 case TRUTH_XOR_EXPR
:
2999 if (OP_SAME (0) && OP_SAME (1))
3002 /* Otherwise take into account this is a commutative operation. */
3003 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3004 TREE_OPERAND (arg1
, 1), flags
)
3005 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3006 TREE_OPERAND (arg1
, 0), flags
));
3011 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3018 switch (TREE_CODE (arg0
))
3021 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3022 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3023 /* If not both CALL_EXPRs are either internal or normal function
3024 functions, then they are not equal. */
3026 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3028 /* If the CALL_EXPRs call different internal functions, then they
3030 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3035 /* If the CALL_EXPRs call different functions, then they are not
3037 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3043 unsigned int cef
= call_expr_flags (arg0
);
3044 if (flags
& OEP_PURE_SAME
)
3045 cef
&= ECF_CONST
| ECF_PURE
;
3052 /* Now see if all the arguments are the same. */
3054 const_call_expr_arg_iterator iter0
, iter1
;
3056 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3057 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3059 a0
= next_const_call_expr_arg (&iter0
),
3060 a1
= next_const_call_expr_arg (&iter1
))
3061 if (! operand_equal_p (a0
, a1
, flags
))
3064 /* If we get here and both argument lists are exhausted
3065 then the CALL_EXPRs are equal. */
3066 return ! (a0
|| a1
);
3072 case tcc_declaration
:
3073 /* Consider __builtin_sqrt equal to sqrt. */
3074 return (TREE_CODE (arg0
) == FUNCTION_DECL
3075 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3076 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3077 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3084 #undef OP_SAME_WITH_NULL
3087 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3088 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3090 When in doubt, return 0. */
3093 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3095 int unsignedp1
, unsignedpo
;
3096 tree primarg0
, primarg1
, primother
;
3097 unsigned int correct_width
;
3099 if (operand_equal_p (arg0
, arg1
, 0))
3102 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3103 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3106 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3107 and see if the inner values are the same. This removes any
3108 signedness comparison, which doesn't matter here. */
3109 primarg0
= arg0
, primarg1
= arg1
;
3110 STRIP_NOPS (primarg0
);
3111 STRIP_NOPS (primarg1
);
3112 if (operand_equal_p (primarg0
, primarg1
, 0))
3115 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3116 actual comparison operand, ARG0.
3118 First throw away any conversions to wider types
3119 already present in the operands. */
3121 primarg1
= get_narrower (arg1
, &unsignedp1
);
3122 primother
= get_narrower (other
, &unsignedpo
);
3124 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3125 if (unsignedp1
== unsignedpo
3126 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3127 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3129 tree type
= TREE_TYPE (arg0
);
3131 /* Make sure shorter operand is extended the right way
3132 to match the longer operand. */
3133 primarg1
= fold_convert (signed_or_unsigned_type_for
3134 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3136 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3143 /* See if ARG is an expression that is either a comparison or is performing
3144 arithmetic on comparisons. The comparisons must only be comparing
3145 two different values, which will be stored in *CVAL1 and *CVAL2; if
3146 they are nonzero it means that some operands have already been found.
3147 No variables may be used anywhere else in the expression except in the
3148 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3149 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3151 If this is true, return 1. Otherwise, return zero. */
3154 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3156 enum tree_code code
= TREE_CODE (arg
);
3157 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3159 /* We can handle some of the tcc_expression cases here. */
3160 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3162 else if (tclass
== tcc_expression
3163 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3164 || code
== COMPOUND_EXPR
))
3165 tclass
= tcc_binary
;
3167 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3168 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3170 /* If we've already found a CVAL1 or CVAL2, this expression is
3171 two complex to handle. */
3172 if (*cval1
|| *cval2
)
3182 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3185 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3186 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3187 cval1
, cval2
, save_p
));
3192 case tcc_expression
:
3193 if (code
== COND_EXPR
)
3194 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3195 cval1
, cval2
, save_p
)
3196 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3197 cval1
, cval2
, save_p
)
3198 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3199 cval1
, cval2
, save_p
));
3202 case tcc_comparison
:
3203 /* First see if we can handle the first operand, then the second. For
3204 the second operand, we know *CVAL1 can't be zero. It must be that
3205 one side of the comparison is each of the values; test for the
3206 case where this isn't true by failing if the two operands
3209 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3210 TREE_OPERAND (arg
, 1), 0))
3214 *cval1
= TREE_OPERAND (arg
, 0);
3215 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3217 else if (*cval2
== 0)
3218 *cval2
= TREE_OPERAND (arg
, 0);
3219 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3224 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3226 else if (*cval2
== 0)
3227 *cval2
= TREE_OPERAND (arg
, 1);
3228 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3240 /* ARG is a tree that is known to contain just arithmetic operations and
3241 comparisons. Evaluate the operations in the tree substituting NEW0 for
3242 any occurrence of OLD0 as an operand of a comparison and likewise for
3246 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3247 tree old1
, tree new1
)
3249 tree type
= TREE_TYPE (arg
);
3250 enum tree_code code
= TREE_CODE (arg
);
3251 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3253 /* We can handle some of the tcc_expression cases here. */
3254 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3256 else if (tclass
== tcc_expression
3257 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3258 tclass
= tcc_binary
;
3263 return fold_build1_loc (loc
, code
, type
,
3264 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3265 old0
, new0
, old1
, new1
));
3268 return fold_build2_loc (loc
, code
, type
,
3269 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3270 old0
, new0
, old1
, new1
),
3271 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3272 old0
, new0
, old1
, new1
));
3274 case tcc_expression
:
3278 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3282 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3286 return fold_build3_loc (loc
, code
, type
,
3287 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3288 old0
, new0
, old1
, new1
),
3289 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3290 old0
, new0
, old1
, new1
),
3291 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3292 old0
, new0
, old1
, new1
));
3296 /* Fall through - ??? */
3298 case tcc_comparison
:
3300 tree arg0
= TREE_OPERAND (arg
, 0);
3301 tree arg1
= TREE_OPERAND (arg
, 1);
3303 /* We need to check both for exact equality and tree equality. The
3304 former will be true if the operand has a side-effect. In that
3305 case, we know the operand occurred exactly once. */
3307 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3309 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3312 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3314 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3317 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3325 /* Return a tree for the case when the result of an expression is RESULT
3326 converted to TYPE and OMITTED was previously an operand of the expression
3327 but is now not needed (e.g., we folded OMITTED * 0).
3329 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3330 the conversion of RESULT to TYPE. */
3333 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3335 tree t
= fold_convert_loc (loc
, type
, result
);
3337 /* If the resulting operand is an empty statement, just return the omitted
3338 statement casted to void. */
3339 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3340 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3341 fold_ignored_result (omitted
));
3343 if (TREE_SIDE_EFFECTS (omitted
))
3344 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3345 fold_ignored_result (omitted
), t
);
3347 return non_lvalue_loc (loc
, t
);
3350 /* Return a tree for the case when the result of an expression is RESULT
3351 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3352 of the expression but are now not needed.
3354 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3355 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3356 evaluated before OMITTED2. Otherwise, if neither has side effects,
3357 just do the conversion of RESULT to TYPE. */
3360 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3361 tree omitted1
, tree omitted2
)
3363 tree t
= fold_convert_loc (loc
, type
, result
);
3365 if (TREE_SIDE_EFFECTS (omitted2
))
3366 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3367 if (TREE_SIDE_EFFECTS (omitted1
))
3368 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3370 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3374 /* Return a simplified tree node for the truth-negation of ARG. This
3375 never alters ARG itself. We assume that ARG is an operation that
3376 returns a truth value (0 or 1).
3378 FIXME: one would think we would fold the result, but it causes
3379 problems with the dominator optimizer. */
3382 fold_truth_not_expr (location_t loc
, tree arg
)
3384 tree type
= TREE_TYPE (arg
);
3385 enum tree_code code
= TREE_CODE (arg
);
3386 location_t loc1
, loc2
;
3388 /* If this is a comparison, we can simply invert it, except for
3389 floating-point non-equality comparisons, in which case we just
3390 enclose a TRUTH_NOT_EXPR around what we have. */
3392 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3394 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3395 if (FLOAT_TYPE_P (op_type
)
3396 && flag_trapping_math
3397 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3398 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3401 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3402 if (code
== ERROR_MARK
)
3405 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3406 TREE_OPERAND (arg
, 1));
3412 return constant_boolean_node (integer_zerop (arg
), type
);
3414 case TRUTH_AND_EXPR
:
3415 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3416 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3417 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3418 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3419 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3422 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3423 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3424 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3425 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3426 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3428 case TRUTH_XOR_EXPR
:
3429 /* Here we can invert either operand. We invert the first operand
3430 unless the second operand is a TRUTH_NOT_EXPR in which case our
3431 result is the XOR of the first operand with the inside of the
3432 negation of the second operand. */
3434 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3435 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3436 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3438 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3439 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3440 TREE_OPERAND (arg
, 1));
3442 case TRUTH_ANDIF_EXPR
:
3443 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3444 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3445 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3446 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3447 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3449 case TRUTH_ORIF_EXPR
:
3450 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3451 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3452 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3453 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3454 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3456 case TRUTH_NOT_EXPR
:
3457 return TREE_OPERAND (arg
, 0);
3461 tree arg1
= TREE_OPERAND (arg
, 1);
3462 tree arg2
= TREE_OPERAND (arg
, 2);
3464 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3465 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3467 /* A COND_EXPR may have a throw as one operand, which
3468 then has void type. Just leave void operands
3470 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3471 VOID_TYPE_P (TREE_TYPE (arg1
))
3472 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3473 VOID_TYPE_P (TREE_TYPE (arg2
))
3474 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3478 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3479 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3480 TREE_OPERAND (arg
, 0),
3481 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3483 case NON_LVALUE_EXPR
:
3484 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3485 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3488 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3489 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3491 /* ... fall through ... */
3494 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3495 return build1_loc (loc
, TREE_CODE (arg
), type
,
3496 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3499 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3501 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3504 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3506 case CLEANUP_POINT_EXPR
:
3507 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3508 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3509 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3516 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3517 assume that ARG is an operation that returns a truth value (0 or 1
3518 for scalars, 0 or -1 for vectors). Return the folded expression if
3519 folding is successful. Otherwise, return NULL_TREE. */
3522 fold_invert_truthvalue (location_t loc
, tree arg
)
3524 tree type
= TREE_TYPE (arg
);
3525 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3531 /* Return a simplified tree node for the truth-negation of ARG. This
3532 never alters ARG itself. We assume that ARG is an operation that
3533 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3536 invert_truthvalue_loc (location_t loc
, tree arg
)
3538 if (TREE_CODE (arg
) == ERROR_MARK
)
3541 tree type
= TREE_TYPE (arg
);
3542 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3548 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3549 operands are another bit-wise operation with a common input. If so,
3550 distribute the bit operations to save an operation and possibly two if
3551 constants are involved. For example, convert
3552 (A | B) & (A | C) into A | (B & C)
3553 Further simplification will occur if B and C are constants.
3555 If this optimization cannot be done, 0 will be returned. */
3558 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3559 tree arg0
, tree arg1
)
3564 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3565 || TREE_CODE (arg0
) == code
3566 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3567 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3570 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3572 common
= TREE_OPERAND (arg0
, 0);
3573 left
= TREE_OPERAND (arg0
, 1);
3574 right
= TREE_OPERAND (arg1
, 1);
3576 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3578 common
= TREE_OPERAND (arg0
, 0);
3579 left
= TREE_OPERAND (arg0
, 1);
3580 right
= TREE_OPERAND (arg1
, 0);
3582 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3584 common
= TREE_OPERAND (arg0
, 1);
3585 left
= TREE_OPERAND (arg0
, 0);
3586 right
= TREE_OPERAND (arg1
, 1);
3588 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3590 common
= TREE_OPERAND (arg0
, 1);
3591 left
= TREE_OPERAND (arg0
, 0);
3592 right
= TREE_OPERAND (arg1
, 0);
3597 common
= fold_convert_loc (loc
, type
, common
);
3598 left
= fold_convert_loc (loc
, type
, left
);
3599 right
= fold_convert_loc (loc
, type
, right
);
3600 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3601 fold_build2_loc (loc
, code
, type
, left
, right
));
3604 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3605 with code CODE. This optimization is unsafe. */
3607 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3608 tree arg0
, tree arg1
)
3610 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3611 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3613 /* (A / C) +- (B / C) -> (A +- B) / C. */
3615 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3616 TREE_OPERAND (arg1
, 1), 0))
3617 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3618 fold_build2_loc (loc
, code
, type
,
3619 TREE_OPERAND (arg0
, 0),
3620 TREE_OPERAND (arg1
, 0)),
3621 TREE_OPERAND (arg0
, 1));
3623 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3624 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3625 TREE_OPERAND (arg1
, 0), 0)
3626 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3627 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3629 REAL_VALUE_TYPE r0
, r1
;
3630 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3631 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3633 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3635 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3636 real_arithmetic (&r0
, code
, &r0
, &r1
);
3637 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3638 TREE_OPERAND (arg0
, 0),
3639 build_real (type
, r0
));
3645 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3646 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3649 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3650 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3652 tree result
, bftype
;
3656 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3657 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3658 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3659 && tree_fits_shwi_p (size
)
3660 && tree_to_shwi (size
) == bitsize
)
3661 return fold_convert_loc (loc
, type
, inner
);
3665 if (TYPE_PRECISION (bftype
) != bitsize
3666 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3667 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3669 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3670 size_int (bitsize
), bitsize_int (bitpos
));
3673 result
= fold_convert_loc (loc
, type
, result
);
3678 /* Optimize a bit-field compare.
3680 There are two cases: First is a compare against a constant and the
3681 second is a comparison of two items where the fields are at the same
3682 bit position relative to the start of a chunk (byte, halfword, word)
3683 large enough to contain it. In these cases we can avoid the shift
3684 implicit in bitfield extractions.
3686 For constants, we emit a compare of the shifted constant with the
3687 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3688 compared. For two fields at the same position, we do the ANDs with the
3689 similar mask and compare the result of the ANDs.
3691 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3692 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3693 are the left and right operands of the comparison, respectively.
3695 If the optimization described above can be done, we return the resulting
3696 tree. Otherwise we return zero. */
3699 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3700 tree compare_type
, tree lhs
, tree rhs
)
3702 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3703 tree type
= TREE_TYPE (lhs
);
3705 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3706 machine_mode lmode
, rmode
, nmode
;
3707 int lunsignedp
, runsignedp
;
3708 int lvolatilep
= 0, rvolatilep
= 0;
3709 tree linner
, rinner
= NULL_TREE
;
3713 /* Get all the information about the extractions being done. If the bit size
3714 if the same as the size of the underlying object, we aren't doing an
3715 extraction at all and so can do nothing. We also don't want to
3716 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3717 then will no longer be able to replace it. */
3718 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3719 &lunsignedp
, &lvolatilep
, false);
3720 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3721 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3726 /* If this is not a constant, we can only do something if bit positions,
3727 sizes, and signedness are the same. */
3728 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3729 &runsignedp
, &rvolatilep
, false);
3731 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3732 || lunsignedp
!= runsignedp
|| offset
!= 0
3733 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3737 /* See if we can find a mode to refer to this field. We should be able to,
3738 but fail if we can't. */
3739 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3740 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3741 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3742 TYPE_ALIGN (TREE_TYPE (rinner
))),
3744 if (nmode
== VOIDmode
)
3747 /* Set signed and unsigned types of the precision of this mode for the
3749 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3751 /* Compute the bit position and size for the new reference and our offset
3752 within it. If the new reference is the same size as the original, we
3753 won't optimize anything, so return zero. */
3754 nbitsize
= GET_MODE_BITSIZE (nmode
);
3755 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3757 if (nbitsize
== lbitsize
)
3760 if (BYTES_BIG_ENDIAN
)
3761 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3763 /* Make the mask to be used against the extracted field. */
3764 mask
= build_int_cst_type (unsigned_type
, -1);
3765 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3766 mask
= const_binop (RSHIFT_EXPR
, mask
,
3767 size_int (nbitsize
- lbitsize
- lbitpos
));
3770 /* If not comparing with constant, just rework the comparison
3772 return fold_build2_loc (loc
, code
, compare_type
,
3773 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3774 make_bit_field_ref (loc
, linner
,
3779 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3780 make_bit_field_ref (loc
, rinner
,
3786 /* Otherwise, we are handling the constant case. See if the constant is too
3787 big for the field. Warn and return a tree of for 0 (false) if so. We do
3788 this not only for its own sake, but to avoid having to test for this
3789 error case below. If we didn't, we might generate wrong code.
3791 For unsigned fields, the constant shifted right by the field length should
3792 be all zero. For signed fields, the high-order bits should agree with
3797 if (wi::lrshift (rhs
, lbitsize
) != 0)
3799 warning (0, "comparison is always %d due to width of bit-field",
3801 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3806 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3807 if (tem
!= 0 && tem
!= -1)
3809 warning (0, "comparison is always %d due to width of bit-field",
3811 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3815 /* Single-bit compares should always be against zero. */
3816 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3818 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3819 rhs
= build_int_cst (type
, 0);
3822 /* Make a new bitfield reference, shift the constant over the
3823 appropriate number of bits and mask it with the computed mask
3824 (in case this was a signed field). If we changed it, make a new one. */
3825 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3827 rhs
= const_binop (BIT_AND_EXPR
,
3828 const_binop (LSHIFT_EXPR
,
3829 fold_convert_loc (loc
, unsigned_type
, rhs
),
3830 size_int (lbitpos
)),
3833 lhs
= build2_loc (loc
, code
, compare_type
,
3834 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3838 /* Subroutine for fold_truth_andor_1: decode a field reference.
3840 If EXP is a comparison reference, we return the innermost reference.
3842 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3843 set to the starting bit number.
3845 If the innermost field can be completely contained in a mode-sized
3846 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3848 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3849 otherwise it is not changed.
3851 *PUNSIGNEDP is set to the signedness of the field.
3853 *PMASK is set to the mask used. This is either contained in a
3854 BIT_AND_EXPR or derived from the width of the field.
3856 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3858 Return 0 if this is not a component reference or is one that we can't
3859 do anything with. */
3862 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3863 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3864 int *punsignedp
, int *pvolatilep
,
3865 tree
*pmask
, tree
*pand_mask
)
3867 tree outer_type
= 0;
3869 tree mask
, inner
, offset
;
3871 unsigned int precision
;
3873 /* All the optimizations using this function assume integer fields.
3874 There are problems with FP fields since the type_for_size call
3875 below can fail for, e.g., XFmode. */
3876 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3879 /* We are interested in the bare arrangement of bits, so strip everything
3880 that doesn't affect the machine mode. However, record the type of the
3881 outermost expression if it may matter below. */
3882 if (CONVERT_EXPR_P (exp
)
3883 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3884 outer_type
= TREE_TYPE (exp
);
3887 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3889 and_mask
= TREE_OPERAND (exp
, 1);
3890 exp
= TREE_OPERAND (exp
, 0);
3891 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3892 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3896 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3897 punsignedp
, pvolatilep
, false);
3898 if ((inner
== exp
&& and_mask
== 0)
3899 || *pbitsize
< 0 || offset
!= 0
3900 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3903 /* If the number of bits in the reference is the same as the bitsize of
3904 the outer type, then the outer type gives the signedness. Otherwise
3905 (in case of a small bitfield) the signedness is unchanged. */
3906 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3907 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3909 /* Compute the mask to access the bitfield. */
3910 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3911 precision
= TYPE_PRECISION (unsigned_type
);
3913 mask
= build_int_cst_type (unsigned_type
, -1);
3915 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3916 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3918 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3920 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3921 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3924 *pand_mask
= and_mask
;
3928 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3929 bit positions and MASK is SIGNED. */
3932 all_ones_mask_p (const_tree mask
, unsigned int size
)
3934 tree type
= TREE_TYPE (mask
);
3935 unsigned int precision
= TYPE_PRECISION (type
);
3937 /* If this function returns true when the type of the mask is
3938 UNSIGNED, then there will be errors. In particular see
3939 gcc.c-torture/execute/990326-1.c. There does not appear to be
3940 any documentation paper trail as to why this is so. But the pre
3941 wide-int worked with that restriction and it has been preserved
3943 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3946 return wi::mask (size
, false, precision
) == mask
;
3949 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3950 represents the sign bit of EXP's type. If EXP represents a sign
3951 or zero extension, also test VAL against the unextended type.
3952 The return value is the (sub)expression whose sign bit is VAL,
3953 or NULL_TREE otherwise. */
3956 sign_bit_p (tree exp
, const_tree val
)
3961 /* Tree EXP must have an integral type. */
3962 t
= TREE_TYPE (exp
);
3963 if (! INTEGRAL_TYPE_P (t
))
3966 /* Tree VAL must be an integer constant. */
3967 if (TREE_CODE (val
) != INTEGER_CST
3968 || TREE_OVERFLOW (val
))
3971 width
= TYPE_PRECISION (t
);
3972 if (wi::only_sign_bit_p (val
, width
))
3975 /* Handle extension from a narrower type. */
3976 if (TREE_CODE (exp
) == NOP_EXPR
3977 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3978 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3983 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3984 to be evaluated unconditionally. */
3987 simple_operand_p (const_tree exp
)
3989 /* Strip any conversions that don't change the machine mode. */
3992 return (CONSTANT_CLASS_P (exp
)
3993 || TREE_CODE (exp
) == SSA_NAME
3995 && ! TREE_ADDRESSABLE (exp
)
3996 && ! TREE_THIS_VOLATILE (exp
)
3997 && ! DECL_NONLOCAL (exp
)
3998 /* Don't regard global variables as simple. They may be
3999 allocated in ways unknown to the compiler (shared memory,
4000 #pragma weak, etc). */
4001 && ! TREE_PUBLIC (exp
)
4002 && ! DECL_EXTERNAL (exp
)
4003 /* Weakrefs are not safe to be read, since they can be NULL.
4004 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4005 have DECL_WEAK flag set. */
4006 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4007 /* Loading a static variable is unduly expensive, but global
4008 registers aren't expensive. */
4009 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4012 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4013 to be evaluated unconditionally.
4014 I addition to simple_operand_p, we assume that comparisons, conversions,
4015 and logic-not operations are simple, if their operands are simple, too. */
4018 simple_operand_p_2 (tree exp
)
4020 enum tree_code code
;
4022 if (TREE_SIDE_EFFECTS (exp
)
4023 || tree_could_trap_p (exp
))
4026 while (CONVERT_EXPR_P (exp
))
4027 exp
= TREE_OPERAND (exp
, 0);
4029 code
= TREE_CODE (exp
);
4031 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4032 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4033 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4035 if (code
== TRUTH_NOT_EXPR
)
4036 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4038 return simple_operand_p (exp
);
4042 /* The following functions are subroutines to fold_range_test and allow it to
4043 try to change a logical combination of comparisons into a range test.
4046 X == 2 || X == 3 || X == 4 || X == 5
4050 (unsigned) (X - 2) <= 3
4052 We describe each set of comparisons as being either inside or outside
4053 a range, using a variable named like IN_P, and then describe the
4054 range with a lower and upper bound. If one of the bounds is omitted,
4055 it represents either the highest or lowest value of the type.
4057 In the comments below, we represent a range by two numbers in brackets
4058 preceded by a "+" to designate being inside that range, or a "-" to
4059 designate being outside that range, so the condition can be inverted by
4060 flipping the prefix. An omitted bound is represented by a "-". For
4061 example, "- [-, 10]" means being outside the range starting at the lowest
4062 possible value and ending at 10, in other words, being greater than 10.
4063 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4066 We set up things so that the missing bounds are handled in a consistent
4067 manner so neither a missing bound nor "true" and "false" need to be
4068 handled using a special case. */
4070 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4071 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4072 and UPPER1_P are nonzero if the respective argument is an upper bound
4073 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4074 must be specified for a comparison. ARG1 will be converted to ARG0's
4075 type if both are specified. */
4078 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4079 tree arg1
, int upper1_p
)
4085 /* If neither arg represents infinity, do the normal operation.
4086 Else, if not a comparison, return infinity. Else handle the special
4087 comparison rules. Note that most of the cases below won't occur, but
4088 are handled for consistency. */
4090 if (arg0
!= 0 && arg1
!= 0)
4092 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4093 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4095 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4098 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4101 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4102 for neither. In real maths, we cannot assume open ended ranges are
4103 the same. But, this is computer arithmetic, where numbers are finite.
4104 We can therefore make the transformation of any unbounded range with
4105 the value Z, Z being greater than any representable number. This permits
4106 us to treat unbounded ranges as equal. */
4107 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4108 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4112 result
= sgn0
== sgn1
;
4115 result
= sgn0
!= sgn1
;
4118 result
= sgn0
< sgn1
;
4121 result
= sgn0
<= sgn1
;
4124 result
= sgn0
> sgn1
;
4127 result
= sgn0
>= sgn1
;
4133 return constant_boolean_node (result
, type
);
4136 /* Helper routine for make_range. Perform one step for it, return
4137 new expression if the loop should continue or NULL_TREE if it should
4141 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4142 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4143 bool *strict_overflow_p
)
4145 tree arg0_type
= TREE_TYPE (arg0
);
4146 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4147 int in_p
= *p_in_p
, n_in_p
;
4151 case TRUTH_NOT_EXPR
:
4152 /* We can only do something if the range is testing for zero. */
4153 if (low
== NULL_TREE
|| high
== NULL_TREE
4154 || ! integer_zerop (low
) || ! integer_zerop (high
))
4159 case EQ_EXPR
: case NE_EXPR
:
4160 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4161 /* We can only do something if the range is testing for zero
4162 and if the second operand is an integer constant. Note that
4163 saying something is "in" the range we make is done by
4164 complementing IN_P since it will set in the initial case of
4165 being not equal to zero; "out" is leaving it alone. */
4166 if (low
== NULL_TREE
|| high
== NULL_TREE
4167 || ! integer_zerop (low
) || ! integer_zerop (high
)
4168 || TREE_CODE (arg1
) != INTEGER_CST
)
4173 case NE_EXPR
: /* - [c, c] */
4176 case EQ_EXPR
: /* + [c, c] */
4177 in_p
= ! in_p
, low
= high
= arg1
;
4179 case GT_EXPR
: /* - [-, c] */
4180 low
= 0, high
= arg1
;
4182 case GE_EXPR
: /* + [c, -] */
4183 in_p
= ! in_p
, low
= arg1
, high
= 0;
4185 case LT_EXPR
: /* - [c, -] */
4186 low
= arg1
, high
= 0;
4188 case LE_EXPR
: /* + [-, c] */
4189 in_p
= ! in_p
, low
= 0, high
= arg1
;
4195 /* If this is an unsigned comparison, we also know that EXP is
4196 greater than or equal to zero. We base the range tests we make
4197 on that fact, so we record it here so we can parse existing
4198 range tests. We test arg0_type since often the return type
4199 of, e.g. EQ_EXPR, is boolean. */
4200 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4202 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4204 build_int_cst (arg0_type
, 0),
4208 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4210 /* If the high bound is missing, but we have a nonzero low
4211 bound, reverse the range so it goes from zero to the low bound
4213 if (high
== 0 && low
&& ! integer_zerop (low
))
4216 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4217 build_int_cst (TREE_TYPE (low
), 1), 0);
4218 low
= build_int_cst (arg0_type
, 0);
4228 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4229 low and high are non-NULL, then normalize will DTRT. */
4230 if (!TYPE_UNSIGNED (arg0_type
)
4231 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4233 if (low
== NULL_TREE
)
4234 low
= TYPE_MIN_VALUE (arg0_type
);
4235 if (high
== NULL_TREE
)
4236 high
= TYPE_MAX_VALUE (arg0_type
);
4239 /* (-x) IN [a,b] -> x in [-b, -a] */
4240 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4241 build_int_cst (exp_type
, 0),
4243 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4244 build_int_cst (exp_type
, 0),
4246 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4252 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4253 build_int_cst (exp_type
, 1));
4257 if (TREE_CODE (arg1
) != INTEGER_CST
)
4260 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4261 move a constant to the other side. */
4262 if (!TYPE_UNSIGNED (arg0_type
)
4263 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4266 /* If EXP is signed, any overflow in the computation is undefined,
4267 so we don't worry about it so long as our computations on
4268 the bounds don't overflow. For unsigned, overflow is defined
4269 and this is exactly the right thing. */
4270 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4271 arg0_type
, low
, 0, arg1
, 0);
4272 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4273 arg0_type
, high
, 1, arg1
, 0);
4274 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4275 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4278 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4279 *strict_overflow_p
= true;
4282 /* Check for an unsigned range which has wrapped around the maximum
4283 value thus making n_high < n_low, and normalize it. */
4284 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4286 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4287 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4288 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4289 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4291 /* If the range is of the form +/- [ x+1, x ], we won't
4292 be able to normalize it. But then, it represents the
4293 whole range or the empty set, so make it
4295 if (tree_int_cst_equal (n_low
, low
)
4296 && tree_int_cst_equal (n_high
, high
))
4302 low
= n_low
, high
= n_high
;
4310 case NON_LVALUE_EXPR
:
4311 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4314 if (! INTEGRAL_TYPE_P (arg0_type
)
4315 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4316 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4319 n_low
= low
, n_high
= high
;
4322 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4325 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4327 /* If we're converting arg0 from an unsigned type, to exp,
4328 a signed type, we will be doing the comparison as unsigned.
4329 The tests above have already verified that LOW and HIGH
4332 So we have to ensure that we will handle large unsigned
4333 values the same way that the current signed bounds treat
4336 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4340 /* For fixed-point modes, we need to pass the saturating flag
4341 as the 2nd parameter. */
4342 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4344 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4345 TYPE_SATURATING (arg0_type
));
4348 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4350 /* A range without an upper bound is, naturally, unbounded.
4351 Since convert would have cropped a very large value, use
4352 the max value for the destination type. */
4354 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4355 : TYPE_MAX_VALUE (arg0_type
);
4357 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4358 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4359 fold_convert_loc (loc
, arg0_type
,
4361 build_int_cst (arg0_type
, 1));
4363 /* If the low bound is specified, "and" the range with the
4364 range for which the original unsigned value will be
4368 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4369 1, fold_convert_loc (loc
, arg0_type
,
4374 in_p
= (n_in_p
== in_p
);
4378 /* Otherwise, "or" the range with the range of the input
4379 that will be interpreted as negative. */
4380 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4381 1, fold_convert_loc (loc
, arg0_type
,
4386 in_p
= (in_p
!= n_in_p
);
4400 /* Given EXP, a logical expression, set the range it is testing into
4401 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4402 actually being tested. *PLOW and *PHIGH will be made of the same
4403 type as the returned expression. If EXP is not a comparison, we
4404 will most likely not be returning a useful value and range. Set
4405 *STRICT_OVERFLOW_P to true if the return value is only valid
4406 because signed overflow is undefined; otherwise, do not change
4407 *STRICT_OVERFLOW_P. */
4410 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4411 bool *strict_overflow_p
)
4413 enum tree_code code
;
4414 tree arg0
, arg1
= NULL_TREE
;
4415 tree exp_type
, nexp
;
4418 location_t loc
= EXPR_LOCATION (exp
);
4420 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4421 and see if we can refine the range. Some of the cases below may not
4422 happen, but it doesn't seem worth worrying about this. We "continue"
4423 the outer loop when we've changed something; otherwise we "break"
4424 the switch, which will "break" the while. */
4427 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4431 code
= TREE_CODE (exp
);
4432 exp_type
= TREE_TYPE (exp
);
4435 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4437 if (TREE_OPERAND_LENGTH (exp
) > 0)
4438 arg0
= TREE_OPERAND (exp
, 0);
4439 if (TREE_CODE_CLASS (code
) == tcc_binary
4440 || TREE_CODE_CLASS (code
) == tcc_comparison
4441 || (TREE_CODE_CLASS (code
) == tcc_expression
4442 && TREE_OPERAND_LENGTH (exp
) > 1))
4443 arg1
= TREE_OPERAND (exp
, 1);
4445 if (arg0
== NULL_TREE
)
4448 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4449 &high
, &in_p
, strict_overflow_p
);
4450 if (nexp
== NULL_TREE
)
4455 /* If EXP is a constant, we can evaluate whether this is true or false. */
4456 if (TREE_CODE (exp
) == INTEGER_CST
)
4458 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4460 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4466 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4470 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4471 type, TYPE, return an expression to test if EXP is in (or out of, depending
4472 on IN_P) the range. Return 0 if the test couldn't be created. */
4475 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4476 tree low
, tree high
)
4478 tree etype
= TREE_TYPE (exp
), value
;
4480 /* Disable this optimization for function pointer expressions
4481 on targets that require function pointer canonicalization. */
4482 if (targetm
.have_canonicalize_funcptr_for_compare ()
4483 && TREE_CODE (etype
) == POINTER_TYPE
4484 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4489 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4491 return invert_truthvalue_loc (loc
, value
);
4496 if (low
== 0 && high
== 0)
4497 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4500 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4501 fold_convert_loc (loc
, etype
, high
));
4504 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4505 fold_convert_loc (loc
, etype
, low
));
4507 if (operand_equal_p (low
, high
, 0))
4508 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4509 fold_convert_loc (loc
, etype
, low
));
4511 if (integer_zerop (low
))
4513 if (! TYPE_UNSIGNED (etype
))
4515 etype
= unsigned_type_for (etype
);
4516 high
= fold_convert_loc (loc
, etype
, high
);
4517 exp
= fold_convert_loc (loc
, etype
, exp
);
4519 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4522 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4523 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4525 int prec
= TYPE_PRECISION (etype
);
4527 if (wi::mask (prec
- 1, false, prec
) == high
)
4529 if (TYPE_UNSIGNED (etype
))
4531 tree signed_etype
= signed_type_for (etype
);
4532 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4534 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4536 etype
= signed_etype
;
4537 exp
= fold_convert_loc (loc
, etype
, exp
);
4539 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4540 build_int_cst (etype
, 0));
4544 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4545 This requires wrap-around arithmetics for the type of the expression.
4546 First make sure that arithmetics in this type is valid, then make sure
4547 that it wraps around. */
4548 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4549 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4550 TYPE_UNSIGNED (etype
));
4552 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4554 tree utype
, minv
, maxv
;
4556 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4557 for the type in question, as we rely on this here. */
4558 utype
= unsigned_type_for (etype
);
4559 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4560 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4561 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4562 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4564 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4571 high
= fold_convert_loc (loc
, etype
, high
);
4572 low
= fold_convert_loc (loc
, etype
, low
);
4573 exp
= fold_convert_loc (loc
, etype
, exp
);
4575 value
= const_binop (MINUS_EXPR
, high
, low
);
4578 if (POINTER_TYPE_P (etype
))
4580 if (value
!= 0 && !TREE_OVERFLOW (value
))
4582 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4583 return build_range_check (loc
, type
,
4584 fold_build_pointer_plus_loc (loc
, exp
, low
),
4585 1, build_int_cst (etype
, 0), value
);
4590 if (value
!= 0 && !TREE_OVERFLOW (value
))
4591 return build_range_check (loc
, type
,
4592 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4593 1, build_int_cst (etype
, 0), value
);
4598 /* Return the predecessor of VAL in its type, handling the infinite case. */
4601 range_predecessor (tree val
)
4603 tree type
= TREE_TYPE (val
);
4605 if (INTEGRAL_TYPE_P (type
)
4606 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4609 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4610 build_int_cst (TREE_TYPE (val
), 1), 0);
4613 /* Return the successor of VAL in its type, handling the infinite case. */
4616 range_successor (tree val
)
4618 tree type
= TREE_TYPE (val
);
4620 if (INTEGRAL_TYPE_P (type
)
4621 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4624 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4625 build_int_cst (TREE_TYPE (val
), 1), 0);
4628 /* Given two ranges, see if we can merge them into one. Return 1 if we
4629 can, 0 if we can't. Set the output range into the specified parameters. */
4632 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4633 tree high0
, int in1_p
, tree low1
, tree high1
)
4641 int lowequal
= ((low0
== 0 && low1
== 0)
4642 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4643 low0
, 0, low1
, 0)));
4644 int highequal
= ((high0
== 0 && high1
== 0)
4645 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4646 high0
, 1, high1
, 1)));
4648 /* Make range 0 be the range that starts first, or ends last if they
4649 start at the same value. Swap them if it isn't. */
4650 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4653 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4654 high1
, 1, high0
, 1))))
4656 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4657 tem
= low0
, low0
= low1
, low1
= tem
;
4658 tem
= high0
, high0
= high1
, high1
= tem
;
4661 /* Now flag two cases, whether the ranges are disjoint or whether the
4662 second range is totally subsumed in the first. Note that the tests
4663 below are simplified by the ones above. */
4664 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4665 high0
, 1, low1
, 0));
4666 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4667 high1
, 1, high0
, 1));
4669 /* We now have four cases, depending on whether we are including or
4670 excluding the two ranges. */
4673 /* If they don't overlap, the result is false. If the second range
4674 is a subset it is the result. Otherwise, the range is from the start
4675 of the second to the end of the first. */
4677 in_p
= 0, low
= high
= 0;
4679 in_p
= 1, low
= low1
, high
= high1
;
4681 in_p
= 1, low
= low1
, high
= high0
;
4684 else if (in0_p
&& ! in1_p
)
4686 /* If they don't overlap, the result is the first range. If they are
4687 equal, the result is false. If the second range is a subset of the
4688 first, and the ranges begin at the same place, we go from just after
4689 the end of the second range to the end of the first. If the second
4690 range is not a subset of the first, or if it is a subset and both
4691 ranges end at the same place, the range starts at the start of the
4692 first range and ends just before the second range.
4693 Otherwise, we can't describe this as a single range. */
4695 in_p
= 1, low
= low0
, high
= high0
;
4696 else if (lowequal
&& highequal
)
4697 in_p
= 0, low
= high
= 0;
4698 else if (subset
&& lowequal
)
4700 low
= range_successor (high1
);
4705 /* We are in the weird situation where high0 > high1 but
4706 high1 has no successor. Punt. */
4710 else if (! subset
|| highequal
)
4713 high
= range_predecessor (low1
);
4717 /* low0 < low1 but low1 has no predecessor. Punt. */
4725 else if (! in0_p
&& in1_p
)
4727 /* If they don't overlap, the result is the second range. If the second
4728 is a subset of the first, the result is false. Otherwise,
4729 the range starts just after the first range and ends at the
4730 end of the second. */
4732 in_p
= 1, low
= low1
, high
= high1
;
4733 else if (subset
|| highequal
)
4734 in_p
= 0, low
= high
= 0;
4737 low
= range_successor (high0
);
4742 /* high1 > high0 but high0 has no successor. Punt. */
4750 /* The case where we are excluding both ranges. Here the complex case
4751 is if they don't overlap. In that case, the only time we have a
4752 range is if they are adjacent. If the second is a subset of the
4753 first, the result is the first. Otherwise, the range to exclude
4754 starts at the beginning of the first range and ends at the end of the
4758 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4759 range_successor (high0
),
4761 in_p
= 0, low
= low0
, high
= high1
;
4764 /* Canonicalize - [min, x] into - [-, x]. */
4765 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4766 switch (TREE_CODE (TREE_TYPE (low0
)))
4769 if (TYPE_PRECISION (TREE_TYPE (low0
))
4770 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4774 if (tree_int_cst_equal (low0
,
4775 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4779 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4780 && integer_zerop (low0
))
4787 /* Canonicalize - [x, max] into - [x, -]. */
4788 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4789 switch (TREE_CODE (TREE_TYPE (high1
)))
4792 if (TYPE_PRECISION (TREE_TYPE (high1
))
4793 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4797 if (tree_int_cst_equal (high1
,
4798 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4802 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4803 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4805 build_int_cst (TREE_TYPE (high1
), 1),
4813 /* The ranges might be also adjacent between the maximum and
4814 minimum values of the given type. For
4815 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4816 return + [x + 1, y - 1]. */
4817 if (low0
== 0 && high1
== 0)
4819 low
= range_successor (high0
);
4820 high
= range_predecessor (low1
);
4821 if (low
== 0 || high
== 0)
4831 in_p
= 0, low
= low0
, high
= high0
;
4833 in_p
= 0, low
= low0
, high
= high1
;
4836 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4841 /* Subroutine of fold, looking inside expressions of the form
4842 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4843 of the COND_EXPR. This function is being used also to optimize
4844 A op B ? C : A, by reversing the comparison first.
4846 Return a folded expression whose code is not a COND_EXPR
4847 anymore, or NULL_TREE if no folding opportunity is found. */
4850 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4851 tree arg0
, tree arg1
, tree arg2
)
4853 enum tree_code comp_code
= TREE_CODE (arg0
);
4854 tree arg00
= TREE_OPERAND (arg0
, 0);
4855 tree arg01
= TREE_OPERAND (arg0
, 1);
4856 tree arg1_type
= TREE_TYPE (arg1
);
4862 /* If we have A op 0 ? A : -A, consider applying the following
4865 A == 0? A : -A same as -A
4866 A != 0? A : -A same as A
4867 A >= 0? A : -A same as abs (A)
4868 A > 0? A : -A same as abs (A)
4869 A <= 0? A : -A same as -abs (A)
4870 A < 0? A : -A same as -abs (A)
4872 None of these transformations work for modes with signed
4873 zeros. If A is +/-0, the first two transformations will
4874 change the sign of the result (from +0 to -0, or vice
4875 versa). The last four will fix the sign of the result,
4876 even though the original expressions could be positive or
4877 negative, depending on the sign of A.
4879 Note that all these transformations are correct if A is
4880 NaN, since the two alternatives (A and -A) are also NaNs. */
4881 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4882 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4883 ? real_zerop (arg01
)
4884 : integer_zerop (arg01
))
4885 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4886 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4887 /* In the case that A is of the form X-Y, '-A' (arg2) may
4888 have already been folded to Y-X, check for that. */
4889 || (TREE_CODE (arg1
) == MINUS_EXPR
4890 && TREE_CODE (arg2
) == MINUS_EXPR
4891 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4892 TREE_OPERAND (arg2
, 1), 0)
4893 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4894 TREE_OPERAND (arg2
, 0), 0))))
4899 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4900 return pedantic_non_lvalue_loc (loc
,
4901 fold_convert_loc (loc
, type
,
4902 negate_expr (tem
)));
4905 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4908 if (flag_trapping_math
)
4913 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4914 arg1
= fold_convert_loc (loc
, signed_type_for
4915 (TREE_TYPE (arg1
)), arg1
);
4916 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4917 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4920 if (flag_trapping_math
)
4924 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4925 arg1
= fold_convert_loc (loc
, signed_type_for
4926 (TREE_TYPE (arg1
)), arg1
);
4927 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4928 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4930 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4934 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4935 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4936 both transformations are correct when A is NaN: A != 0
4937 is then true, and A == 0 is false. */
4939 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4940 && integer_zerop (arg01
) && integer_zerop (arg2
))
4942 if (comp_code
== NE_EXPR
)
4943 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4944 else if (comp_code
== EQ_EXPR
)
4945 return build_zero_cst (type
);
4948 /* Try some transformations of A op B ? A : B.
4950 A == B? A : B same as B
4951 A != B? A : B same as A
4952 A >= B? A : B same as max (A, B)
4953 A > B? A : B same as max (B, A)
4954 A <= B? A : B same as min (A, B)
4955 A < B? A : B same as min (B, A)
4957 As above, these transformations don't work in the presence
4958 of signed zeros. For example, if A and B are zeros of
4959 opposite sign, the first two transformations will change
4960 the sign of the result. In the last four, the original
4961 expressions give different results for (A=+0, B=-0) and
4962 (A=-0, B=+0), but the transformed expressions do not.
4964 The first two transformations are correct if either A or B
4965 is a NaN. In the first transformation, the condition will
4966 be false, and B will indeed be chosen. In the case of the
4967 second transformation, the condition A != B will be true,
4968 and A will be chosen.
4970 The conversions to max() and min() are not correct if B is
4971 a number and A is not. The conditions in the original
4972 expressions will be false, so all four give B. The min()
4973 and max() versions would give a NaN instead. */
4974 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4975 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4976 /* Avoid these transformations if the COND_EXPR may be used
4977 as an lvalue in the C++ front-end. PR c++/19199. */
4979 || VECTOR_TYPE_P (type
)
4980 || (! lang_GNU_CXX ()
4981 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4982 || ! maybe_lvalue_p (arg1
)
4983 || ! maybe_lvalue_p (arg2
)))
4985 tree comp_op0
= arg00
;
4986 tree comp_op1
= arg01
;
4987 tree comp_type
= TREE_TYPE (comp_op0
);
4989 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4990 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5000 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5002 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5007 /* In C++ a ?: expression can be an lvalue, so put the
5008 operand which will be used if they are equal first
5009 so that we can convert this back to the
5010 corresponding COND_EXPR. */
5011 if (!HONOR_NANS (arg1
))
5013 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5014 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5015 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5016 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5017 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5018 comp_op1
, comp_op0
);
5019 return pedantic_non_lvalue_loc (loc
,
5020 fold_convert_loc (loc
, type
, tem
));
5027 if (!HONOR_NANS (arg1
))
5029 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5030 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5031 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5032 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5033 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5034 comp_op1
, comp_op0
);
5035 return pedantic_non_lvalue_loc (loc
,
5036 fold_convert_loc (loc
, type
, tem
));
5040 if (!HONOR_NANS (arg1
))
5041 return pedantic_non_lvalue_loc (loc
,
5042 fold_convert_loc (loc
, type
, arg2
));
5045 if (!HONOR_NANS (arg1
))
5046 return pedantic_non_lvalue_loc (loc
,
5047 fold_convert_loc (loc
, type
, arg1
));
5050 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5055 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5056 we might still be able to simplify this. For example,
5057 if C1 is one less or one more than C2, this might have started
5058 out as a MIN or MAX and been transformed by this function.
5059 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5061 if (INTEGRAL_TYPE_P (type
)
5062 && TREE_CODE (arg01
) == INTEGER_CST
5063 && TREE_CODE (arg2
) == INTEGER_CST
)
5067 if (TREE_CODE (arg1
) == INTEGER_CST
)
5069 /* We can replace A with C1 in this case. */
5070 arg1
= fold_convert_loc (loc
, type
, arg01
);
5071 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5074 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5075 MIN_EXPR, to preserve the signedness of the comparison. */
5076 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5078 && operand_equal_p (arg01
,
5079 const_binop (PLUS_EXPR
, arg2
,
5080 build_int_cst (type
, 1)),
5083 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5084 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5086 return pedantic_non_lvalue_loc (loc
,
5087 fold_convert_loc (loc
, type
, tem
));
5092 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5094 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5096 && operand_equal_p (arg01
,
5097 const_binop (MINUS_EXPR
, arg2
,
5098 build_int_cst (type
, 1)),
5101 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5102 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5104 return pedantic_non_lvalue_loc (loc
,
5105 fold_convert_loc (loc
, type
, tem
));
5110 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5111 MAX_EXPR, to preserve the signedness of the comparison. */
5112 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5114 && operand_equal_p (arg01
,
5115 const_binop (MINUS_EXPR
, arg2
,
5116 build_int_cst (type
, 1)),
5119 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5120 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5122 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5127 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5128 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5130 && operand_equal_p (arg01
,
5131 const_binop (PLUS_EXPR
, arg2
,
5132 build_int_cst (type
, 1)),
5135 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5136 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5138 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5152 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5153 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5154 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5158 /* EXP is some logical combination of boolean tests. See if we can
5159 merge it into some range test. Return the new tree if so. */
5162 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5165 int or_op
= (code
== TRUTH_ORIF_EXPR
5166 || code
== TRUTH_OR_EXPR
);
5167 int in0_p
, in1_p
, in_p
;
5168 tree low0
, low1
, low
, high0
, high1
, high
;
5169 bool strict_overflow_p
= false;
5171 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5172 "when simplifying range test");
5174 if (!INTEGRAL_TYPE_P (type
))
5177 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5178 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5180 /* If this is an OR operation, invert both sides; we will invert
5181 again at the end. */
5183 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5185 /* If both expressions are the same, if we can merge the ranges, and we
5186 can build the range test, return it or it inverted. If one of the
5187 ranges is always true or always false, consider it to be the same
5188 expression as the other. */
5189 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5190 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5192 && 0 != (tem
= (build_range_check (loc
, type
,
5194 : rhs
!= 0 ? rhs
: integer_zero_node
,
5197 if (strict_overflow_p
)
5198 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5199 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5202 /* On machines where the branch cost is expensive, if this is a
5203 short-circuited branch and the underlying object on both sides
5204 is the same, make a non-short-circuit operation. */
5205 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5206 && lhs
!= 0 && rhs
!= 0
5207 && (code
== TRUTH_ANDIF_EXPR
5208 || code
== TRUTH_ORIF_EXPR
)
5209 && operand_equal_p (lhs
, rhs
, 0))
5211 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5212 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5213 which cases we can't do this. */
5214 if (simple_operand_p (lhs
))
5215 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5216 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5219 else if (!lang_hooks
.decls
.global_bindings_p ()
5220 && !CONTAINS_PLACEHOLDER_P (lhs
))
5222 tree common
= save_expr (lhs
);
5224 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5225 or_op
? ! in0_p
: in0_p
,
5227 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5228 or_op
? ! in1_p
: in1_p
,
5231 if (strict_overflow_p
)
5232 fold_overflow_warning (warnmsg
,
5233 WARN_STRICT_OVERFLOW_COMPARISON
);
5234 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5235 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5244 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5245 bit value. Arrange things so the extra bits will be set to zero if and
5246 only if C is signed-extended to its full width. If MASK is nonzero,
5247 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5250 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5252 tree type
= TREE_TYPE (c
);
5253 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5256 if (p
== modesize
|| unsignedp
)
5259 /* We work by getting just the sign bit into the low-order bit, then
5260 into the high-order bit, then sign-extend. We then XOR that value
5262 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5264 /* We must use a signed type in order to get an arithmetic right shift.
5265 However, we must also avoid introducing accidental overflows, so that
5266 a subsequent call to integer_zerop will work. Hence we must
5267 do the type conversion here. At this point, the constant is either
5268 zero or one, and the conversion to a signed type can never overflow.
5269 We could get an overflow if this conversion is done anywhere else. */
5270 if (TYPE_UNSIGNED (type
))
5271 temp
= fold_convert (signed_type_for (type
), temp
);
5273 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5274 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5276 temp
= const_binop (BIT_AND_EXPR
, temp
,
5277 fold_convert (TREE_TYPE (c
), mask
));
5278 /* If necessary, convert the type back to match the type of C. */
5279 if (TYPE_UNSIGNED (type
))
5280 temp
= fold_convert (type
, temp
);
5282 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5285 /* For an expression that has the form
5289 we can drop one of the inner expressions and simplify to
5293 LOC is the location of the resulting expression. OP is the inner
5294 logical operation; the left-hand side in the examples above, while CMPOP
5295 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5296 removing a condition that guards another, as in
5297 (A != NULL && A->...) || A == NULL
5298 which we must not transform. If RHS_ONLY is true, only eliminate the
5299 right-most operand of the inner logical operation. */
5302 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5305 tree type
= TREE_TYPE (cmpop
);
5306 enum tree_code code
= TREE_CODE (cmpop
);
5307 enum tree_code truthop_code
= TREE_CODE (op
);
5308 tree lhs
= TREE_OPERAND (op
, 0);
5309 tree rhs
= TREE_OPERAND (op
, 1);
5310 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5311 enum tree_code rhs_code
= TREE_CODE (rhs
);
5312 enum tree_code lhs_code
= TREE_CODE (lhs
);
5313 enum tree_code inv_code
;
5315 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5318 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5321 if (rhs_code
== truthop_code
)
5323 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5324 if (newrhs
!= NULL_TREE
)
5327 rhs_code
= TREE_CODE (rhs
);
5330 if (lhs_code
== truthop_code
&& !rhs_only
)
5332 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5333 if (newlhs
!= NULL_TREE
)
5336 lhs_code
= TREE_CODE (lhs
);
5340 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5341 if (inv_code
== rhs_code
5342 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5343 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5345 if (!rhs_only
&& inv_code
== lhs_code
5346 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5347 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5349 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5350 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5355 /* Find ways of folding logical expressions of LHS and RHS:
5356 Try to merge two comparisons to the same innermost item.
5357 Look for range tests like "ch >= '0' && ch <= '9'".
5358 Look for combinations of simple terms on machines with expensive branches
5359 and evaluate the RHS unconditionally.
5361 For example, if we have p->a == 2 && p->b == 4 and we can make an
5362 object large enough to span both A and B, we can do this with a comparison
5363 against the object ANDed with the a mask.
5365 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5366 operations to do this with one comparison.
5368 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5369 function and the one above.
5371 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5372 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5374 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5377 We return the simplified tree or 0 if no optimization is possible. */
5380 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5383 /* If this is the "or" of two comparisons, we can do something if
5384 the comparisons are NE_EXPR. If this is the "and", we can do something
5385 if the comparisons are EQ_EXPR. I.e.,
5386 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5388 WANTED_CODE is this operation code. For single bit fields, we can
5389 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5390 comparison for one-bit fields. */
5392 enum tree_code wanted_code
;
5393 enum tree_code lcode
, rcode
;
5394 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5395 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5396 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5397 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5398 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5399 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5400 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5401 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5402 machine_mode lnmode
, rnmode
;
5403 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5404 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5405 tree l_const
, r_const
;
5406 tree lntype
, rntype
, result
;
5407 HOST_WIDE_INT first_bit
, end_bit
;
5410 /* Start by getting the comparison codes. Fail if anything is volatile.
5411 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5412 it were surrounded with a NE_EXPR. */
5414 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5417 lcode
= TREE_CODE (lhs
);
5418 rcode
= TREE_CODE (rhs
);
5420 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5422 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5423 build_int_cst (TREE_TYPE (lhs
), 0));
5427 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5429 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5430 build_int_cst (TREE_TYPE (rhs
), 0));
5434 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5435 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5438 ll_arg
= TREE_OPERAND (lhs
, 0);
5439 lr_arg
= TREE_OPERAND (lhs
, 1);
5440 rl_arg
= TREE_OPERAND (rhs
, 0);
5441 rr_arg
= TREE_OPERAND (rhs
, 1);
5443 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5444 if (simple_operand_p (ll_arg
)
5445 && simple_operand_p (lr_arg
))
5447 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5448 && operand_equal_p (lr_arg
, rr_arg
, 0))
5450 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5451 truth_type
, ll_arg
, lr_arg
);
5455 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5456 && operand_equal_p (lr_arg
, rl_arg
, 0))
5458 result
= combine_comparisons (loc
, code
, lcode
,
5459 swap_tree_comparison (rcode
),
5460 truth_type
, ll_arg
, lr_arg
);
5466 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5467 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5469 /* If the RHS can be evaluated unconditionally and its operands are
5470 simple, it wins to evaluate the RHS unconditionally on machines
5471 with expensive branches. In this case, this isn't a comparison
5472 that can be merged. */
5474 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5476 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5477 && simple_operand_p (rl_arg
)
5478 && simple_operand_p (rr_arg
))
5480 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5481 if (code
== TRUTH_OR_EXPR
5482 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5483 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5484 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5485 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5486 return build2_loc (loc
, NE_EXPR
, truth_type
,
5487 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5489 build_int_cst (TREE_TYPE (ll_arg
), 0));
5491 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5492 if (code
== TRUTH_AND_EXPR
5493 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5494 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5495 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5496 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5497 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5498 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5500 build_int_cst (TREE_TYPE (ll_arg
), 0));
5503 /* See if the comparisons can be merged. Then get all the parameters for
5506 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5507 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5511 ll_inner
= decode_field_reference (loc
, ll_arg
,
5512 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5513 &ll_unsignedp
, &volatilep
, &ll_mask
,
5515 lr_inner
= decode_field_reference (loc
, lr_arg
,
5516 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5517 &lr_unsignedp
, &volatilep
, &lr_mask
,
5519 rl_inner
= decode_field_reference (loc
, rl_arg
,
5520 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5521 &rl_unsignedp
, &volatilep
, &rl_mask
,
5523 rr_inner
= decode_field_reference (loc
, rr_arg
,
5524 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5525 &rr_unsignedp
, &volatilep
, &rr_mask
,
5528 /* It must be true that the inner operation on the lhs of each
5529 comparison must be the same if we are to be able to do anything.
5530 Then see if we have constants. If not, the same must be true for
5532 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5533 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5536 if (TREE_CODE (lr_arg
) == INTEGER_CST
5537 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5538 l_const
= lr_arg
, r_const
= rr_arg
;
5539 else if (lr_inner
== 0 || rr_inner
== 0
5540 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5543 l_const
= r_const
= 0;
5545 /* If either comparison code is not correct for our logical operation,
5546 fail. However, we can convert a one-bit comparison against zero into
5547 the opposite comparison against that bit being set in the field. */
5549 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5550 if (lcode
!= wanted_code
)
5552 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5554 /* Make the left operand unsigned, since we are only interested
5555 in the value of one bit. Otherwise we are doing the wrong
5564 /* This is analogous to the code for l_const above. */
5565 if (rcode
!= wanted_code
)
5567 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5576 /* See if we can find a mode that contains both fields being compared on
5577 the left. If we can't, fail. Otherwise, update all constants and masks
5578 to be relative to a field of that size. */
5579 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5580 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5581 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5582 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5584 if (lnmode
== VOIDmode
)
5587 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5588 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5589 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5590 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5592 if (BYTES_BIG_ENDIAN
)
5594 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5595 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5598 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5599 size_int (xll_bitpos
));
5600 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5601 size_int (xrl_bitpos
));
5605 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5606 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5607 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5608 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5609 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5612 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5614 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5619 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5620 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5621 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5622 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5623 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5626 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5628 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5632 /* If the right sides are not constant, do the same for it. Also,
5633 disallow this optimization if a size or signedness mismatch occurs
5634 between the left and right sides. */
5637 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5638 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5639 /* Make sure the two fields on the right
5640 correspond to the left without being swapped. */
5641 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5644 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5645 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5646 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5647 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5649 if (rnmode
== VOIDmode
)
5652 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5653 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5654 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5655 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5657 if (BYTES_BIG_ENDIAN
)
5659 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5660 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5663 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5665 size_int (xlr_bitpos
));
5666 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5668 size_int (xrr_bitpos
));
5670 /* Make a mask that corresponds to both fields being compared.
5671 Do this for both items being compared. If the operands are the
5672 same size and the bits being compared are in the same position
5673 then we can do this by masking both and comparing the masked
5675 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5676 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5677 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5679 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5680 ll_unsignedp
|| rl_unsignedp
);
5681 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5682 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5684 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5685 lr_unsignedp
|| rr_unsignedp
);
5686 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5687 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5689 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5692 /* There is still another way we can do something: If both pairs of
5693 fields being compared are adjacent, we may be able to make a wider
5694 field containing them both.
5696 Note that we still must mask the lhs/rhs expressions. Furthermore,
5697 the mask must be shifted to account for the shift done by
5698 make_bit_field_ref. */
5699 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5700 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5701 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5702 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5706 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5707 ll_bitsize
+ rl_bitsize
,
5708 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5709 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5710 lr_bitsize
+ rr_bitsize
,
5711 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5713 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5714 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5715 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5716 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5718 /* Convert to the smaller type before masking out unwanted bits. */
5720 if (lntype
!= rntype
)
5722 if (lnbitsize
> rnbitsize
)
5724 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5725 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5728 else if (lnbitsize
< rnbitsize
)
5730 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5731 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5736 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5737 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5739 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5740 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5742 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5748 /* Handle the case of comparisons with constants. If there is something in
5749 common between the masks, those bits of the constants must be the same.
5750 If not, the condition is always false. Test for this to avoid generating
5751 incorrect code below. */
5752 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5753 if (! integer_zerop (result
)
5754 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5755 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5757 if (wanted_code
== NE_EXPR
)
5759 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5760 return constant_boolean_node (true, truth_type
);
5764 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5765 return constant_boolean_node (false, truth_type
);
5769 /* Construct the expression we will return. First get the component
5770 reference we will make. Unless the mask is all ones the width of
5771 that field, perform the mask operation. Then compare with the
5773 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5774 ll_unsignedp
|| rl_unsignedp
);
5776 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5777 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5778 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5780 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5781 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5784 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5788 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5792 enum tree_code op_code
;
5795 int consts_equal
, consts_lt
;
5798 STRIP_SIGN_NOPS (arg0
);
5800 op_code
= TREE_CODE (arg0
);
5801 minmax_const
= TREE_OPERAND (arg0
, 1);
5802 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5803 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5804 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5805 inner
= TREE_OPERAND (arg0
, 0);
5807 /* If something does not permit us to optimize, return the original tree. */
5808 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5809 || TREE_CODE (comp_const
) != INTEGER_CST
5810 || TREE_OVERFLOW (comp_const
)
5811 || TREE_CODE (minmax_const
) != INTEGER_CST
5812 || TREE_OVERFLOW (minmax_const
))
5815 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5816 and GT_EXPR, doing the rest with recursive calls using logical
5820 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5823 = optimize_minmax_comparison (loc
,
5824 invert_tree_comparison (code
, false),
5827 return invert_truthvalue_loc (loc
, tem
);
5833 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5834 optimize_minmax_comparison
5835 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5836 optimize_minmax_comparison
5837 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5840 if (op_code
== MAX_EXPR
&& consts_equal
)
5841 /* MAX (X, 0) == 0 -> X <= 0 */
5842 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5844 else if (op_code
== MAX_EXPR
&& consts_lt
)
5845 /* MAX (X, 0) == 5 -> X == 5 */
5846 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5848 else if (op_code
== MAX_EXPR
)
5849 /* MAX (X, 0) == -1 -> false */
5850 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5852 else if (consts_equal
)
5853 /* MIN (X, 0) == 0 -> X >= 0 */
5854 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5857 /* MIN (X, 0) == 5 -> false */
5858 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5861 /* MIN (X, 0) == -1 -> X == -1 */
5862 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5865 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5866 /* MAX (X, 0) > 0 -> X > 0
5867 MAX (X, 0) > 5 -> X > 5 */
5868 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5870 else if (op_code
== MAX_EXPR
)
5871 /* MAX (X, 0) > -1 -> true */
5872 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5874 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5875 /* MIN (X, 0) > 0 -> false
5876 MIN (X, 0) > 5 -> false */
5877 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5880 /* MIN (X, 0) > -1 -> X > -1 */
5881 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5888 /* T is an integer expression that is being multiplied, divided, or taken a
5889 modulus (CODE says which and what kind of divide or modulus) by a
5890 constant C. See if we can eliminate that operation by folding it with
5891 other operations already in T. WIDE_TYPE, if non-null, is a type that
5892 should be used for the computation if wider than our type.
5894 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5895 (X * 2) + (Y * 4). We must, however, be assured that either the original
5896 expression would not overflow or that overflow is undefined for the type
5897 in the language in question.
5899 If we return a non-null expression, it is an equivalent form of the
5900 original computation, but need not be in the original type.
5902 We set *STRICT_OVERFLOW_P to true if the return values depends on
5903 signed overflow being undefined. Otherwise we do not change
5904 *STRICT_OVERFLOW_P. */
5907 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5908 bool *strict_overflow_p
)
5910 /* To avoid exponential search depth, refuse to allow recursion past
5911 three levels. Beyond that (1) it's highly unlikely that we'll find
5912 something interesting and (2) we've probably processed it before
5913 when we built the inner expression. */
5922 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5929 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5930 bool *strict_overflow_p
)
5932 tree type
= TREE_TYPE (t
);
5933 enum tree_code tcode
= TREE_CODE (t
);
5934 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5935 > GET_MODE_SIZE (TYPE_MODE (type
)))
5936 ? wide_type
: type
);
5938 int same_p
= tcode
== code
;
5939 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5940 bool sub_strict_overflow_p
;
5942 /* Don't deal with constants of zero here; they confuse the code below. */
5943 if (integer_zerop (c
))
5946 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5947 op0
= TREE_OPERAND (t
, 0);
5949 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5950 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5952 /* Note that we need not handle conditional operations here since fold
5953 already handles those cases. So just do arithmetic here. */
5957 /* For a constant, we can always simplify if we are a multiply
5958 or (for divide and modulus) if it is a multiple of our constant. */
5959 if (code
== MULT_EXPR
5960 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5961 return const_binop (code
, fold_convert (ctype
, t
),
5962 fold_convert (ctype
, c
));
5965 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5966 /* If op0 is an expression ... */
5967 if ((COMPARISON_CLASS_P (op0
)
5968 || UNARY_CLASS_P (op0
)
5969 || BINARY_CLASS_P (op0
)
5970 || VL_EXP_CLASS_P (op0
)
5971 || EXPRESSION_CLASS_P (op0
))
5972 /* ... and has wrapping overflow, and its type is smaller
5973 than ctype, then we cannot pass through as widening. */
5974 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5975 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
5976 && (TYPE_PRECISION (ctype
)
5977 > TYPE_PRECISION (TREE_TYPE (op0
))))
5978 /* ... or this is a truncation (t is narrower than op0),
5979 then we cannot pass through this narrowing. */
5980 || (TYPE_PRECISION (type
)
5981 < TYPE_PRECISION (TREE_TYPE (op0
)))
5982 /* ... or signedness changes for division or modulus,
5983 then we cannot pass through this conversion. */
5984 || (code
!= MULT_EXPR
5985 && (TYPE_UNSIGNED (ctype
)
5986 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5987 /* ... or has undefined overflow while the converted to
5988 type has not, we cannot do the operation in the inner type
5989 as that would introduce undefined overflow. */
5990 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5991 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
5992 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5995 /* Pass the constant down and see if we can make a simplification. If
5996 we can, replace this expression with the inner simplification for
5997 possible later conversion to our or some other type. */
5998 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5999 && TREE_CODE (t2
) == INTEGER_CST
6000 && !TREE_OVERFLOW (t2
)
6001 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6003 ? ctype
: NULL_TREE
,
6004 strict_overflow_p
))))
6009 /* If widening the type changes it from signed to unsigned, then we
6010 must avoid building ABS_EXPR itself as unsigned. */
6011 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6013 tree cstype
= (*signed_type_for
) (ctype
);
6014 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6017 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6018 return fold_convert (ctype
, t1
);
6022 /* If the constant is negative, we cannot simplify this. */
6023 if (tree_int_cst_sgn (c
) == -1)
6027 /* For division and modulus, type can't be unsigned, as e.g.
6028 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6029 For signed types, even with wrapping overflow, this is fine. */
6030 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6032 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6034 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6037 case MIN_EXPR
: case MAX_EXPR
:
6038 /* If widening the type changes the signedness, then we can't perform
6039 this optimization as that changes the result. */
6040 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6043 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6044 sub_strict_overflow_p
= false;
6045 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6046 &sub_strict_overflow_p
)) != 0
6047 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6048 &sub_strict_overflow_p
)) != 0)
6050 if (tree_int_cst_sgn (c
) < 0)
6051 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6052 if (sub_strict_overflow_p
)
6053 *strict_overflow_p
= true;
6054 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6055 fold_convert (ctype
, t2
));
6059 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6060 /* If the second operand is constant, this is a multiplication
6061 or floor division, by a power of two, so we can treat it that
6062 way unless the multiplier or divisor overflows. Signed
6063 left-shift overflow is implementation-defined rather than
6064 undefined in C90, so do not convert signed left shift into
6066 if (TREE_CODE (op1
) == INTEGER_CST
6067 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6068 /* const_binop may not detect overflow correctly,
6069 so check for it explicitly here. */
6070 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6071 && 0 != (t1
= fold_convert (ctype
,
6072 const_binop (LSHIFT_EXPR
,
6075 && !TREE_OVERFLOW (t1
))
6076 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6077 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6079 fold_convert (ctype
, op0
),
6081 c
, code
, wide_type
, strict_overflow_p
);
6084 case PLUS_EXPR
: case MINUS_EXPR
:
6085 /* See if we can eliminate the operation on both sides. If we can, we
6086 can return a new PLUS or MINUS. If we can't, the only remaining
6087 cases where we can do anything are if the second operand is a
6089 sub_strict_overflow_p
= false;
6090 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6091 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6092 if (t1
!= 0 && t2
!= 0
6093 && (code
== MULT_EXPR
6094 /* If not multiplication, we can only do this if both operands
6095 are divisible by c. */
6096 || (multiple_of_p (ctype
, op0
, c
)
6097 && multiple_of_p (ctype
, op1
, c
))))
6099 if (sub_strict_overflow_p
)
6100 *strict_overflow_p
= true;
6101 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6102 fold_convert (ctype
, t2
));
6105 /* If this was a subtraction, negate OP1 and set it to be an addition.
6106 This simplifies the logic below. */
6107 if (tcode
== MINUS_EXPR
)
6109 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6110 /* If OP1 was not easily negatable, the constant may be OP0. */
6111 if (TREE_CODE (op0
) == INTEGER_CST
)
6113 std::swap (op0
, op1
);
6118 if (TREE_CODE (op1
) != INTEGER_CST
)
6121 /* If either OP1 or C are negative, this optimization is not safe for
6122 some of the division and remainder types while for others we need
6123 to change the code. */
6124 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6126 if (code
== CEIL_DIV_EXPR
)
6127 code
= FLOOR_DIV_EXPR
;
6128 else if (code
== FLOOR_DIV_EXPR
)
6129 code
= CEIL_DIV_EXPR
;
6130 else if (code
!= MULT_EXPR
6131 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6135 /* If it's a multiply or a division/modulus operation of a multiple
6136 of our constant, do the operation and verify it doesn't overflow. */
6137 if (code
== MULT_EXPR
6138 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6140 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6141 fold_convert (ctype
, c
));
6142 /* We allow the constant to overflow with wrapping semantics. */
6144 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6150 /* If we have an unsigned type, we cannot widen the operation since it
6151 will change the result if the original computation overflowed. */
6152 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6155 /* If we were able to eliminate our operation from the first side,
6156 apply our operation to the second side and reform the PLUS. */
6157 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6158 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6160 /* The last case is if we are a multiply. In that case, we can
6161 apply the distributive law to commute the multiply and addition
6162 if the multiplication of the constants doesn't overflow
6163 and overflow is defined. With undefined overflow
6164 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6165 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6166 return fold_build2 (tcode
, ctype
,
6167 fold_build2 (code
, ctype
,
6168 fold_convert (ctype
, op0
),
6169 fold_convert (ctype
, c
)),
6175 /* We have a special case here if we are doing something like
6176 (C * 8) % 4 since we know that's zero. */
6177 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6178 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6179 /* If the multiplication can overflow we cannot optimize this. */
6180 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6181 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6182 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6184 *strict_overflow_p
= true;
6185 return omit_one_operand (type
, integer_zero_node
, op0
);
6188 /* ... fall through ... */
6190 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6191 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6192 /* If we can extract our operation from the LHS, do so and return a
6193 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6194 do something only if the second operand is a constant. */
6196 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6197 strict_overflow_p
)) != 0)
6198 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6199 fold_convert (ctype
, op1
));
6200 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6201 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6202 strict_overflow_p
)) != 0)
6203 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6204 fold_convert (ctype
, t1
));
6205 else if (TREE_CODE (op1
) != INTEGER_CST
)
6208 /* If these are the same operation types, we can associate them
6209 assuming no overflow. */
6212 bool overflow_p
= false;
6213 bool overflow_mul_p
;
6214 signop sign
= TYPE_SIGN (ctype
);
6215 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6216 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6218 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6221 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6222 wide_int_to_tree (ctype
, mul
));
6225 /* If these operations "cancel" each other, we have the main
6226 optimizations of this pass, which occur when either constant is a
6227 multiple of the other, in which case we replace this with either an
6228 operation or CODE or TCODE.
6230 If we have an unsigned type, we cannot do this since it will change
6231 the result if the original computation overflowed. */
6232 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6233 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6234 || (tcode
== MULT_EXPR
6235 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6236 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6237 && code
!= MULT_EXPR
)))
6239 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6241 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6242 *strict_overflow_p
= true;
6243 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6244 fold_convert (ctype
,
6245 const_binop (TRUNC_DIV_EXPR
,
6248 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6250 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6251 *strict_overflow_p
= true;
6252 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6253 fold_convert (ctype
,
6254 const_binop (TRUNC_DIV_EXPR
,
6267 /* Return a node which has the indicated constant VALUE (either 0 or
6268 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6269 and is of the indicated TYPE. */
6272 constant_boolean_node (bool value
, tree type
)
6274 if (type
== integer_type_node
)
6275 return value
? integer_one_node
: integer_zero_node
;
6276 else if (type
== boolean_type_node
)
6277 return value
? boolean_true_node
: boolean_false_node
;
6278 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6279 return build_vector_from_val (type
,
6280 build_int_cst (TREE_TYPE (type
),
6283 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6287 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6288 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6289 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6290 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6291 COND is the first argument to CODE; otherwise (as in the example
6292 given here), it is the second argument. TYPE is the type of the
6293 original expression. Return NULL_TREE if no simplification is
6297 fold_binary_op_with_conditional_arg (location_t loc
,
6298 enum tree_code code
,
6299 tree type
, tree op0
, tree op1
,
6300 tree cond
, tree arg
, int cond_first_p
)
6302 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6303 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6304 tree test
, true_value
, false_value
;
6305 tree lhs
= NULL_TREE
;
6306 tree rhs
= NULL_TREE
;
6307 enum tree_code cond_code
= COND_EXPR
;
6309 if (TREE_CODE (cond
) == COND_EXPR
6310 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6312 test
= TREE_OPERAND (cond
, 0);
6313 true_value
= TREE_OPERAND (cond
, 1);
6314 false_value
= TREE_OPERAND (cond
, 2);
6315 /* If this operand throws an expression, then it does not make
6316 sense to try to perform a logical or arithmetic operation
6318 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6320 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6325 tree testtype
= TREE_TYPE (cond
);
6327 true_value
= constant_boolean_node (true, testtype
);
6328 false_value
= constant_boolean_node (false, testtype
);
6331 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6332 cond_code
= VEC_COND_EXPR
;
6334 /* This transformation is only worthwhile if we don't have to wrap ARG
6335 in a SAVE_EXPR and the operation can be simplified without recursing
6336 on at least one of the branches once its pushed inside the COND_EXPR. */
6337 if (!TREE_CONSTANT (arg
)
6338 && (TREE_SIDE_EFFECTS (arg
)
6339 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6340 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6343 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6346 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6348 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6350 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6354 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6356 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6358 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6361 /* Check that we have simplified at least one of the branches. */
6362 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6365 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6369 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6371 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6372 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6373 ADDEND is the same as X.
6375 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6376 and finite. The problematic cases are when X is zero, and its mode
6377 has signed zeros. In the case of rounding towards -infinity,
6378 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6379 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6382 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6384 if (!real_zerop (addend
))
6387 /* Don't allow the fold with -fsignaling-nans. */
6388 if (HONOR_SNANS (element_mode (type
)))
6391 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6392 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6395 /* In a vector or complex, we would need to check the sign of all zeros. */
6396 if (TREE_CODE (addend
) != REAL_CST
)
6399 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6400 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6403 /* The mode has signed zeros, and we have to honor their sign.
6404 In this situation, there is only one case we can return true for.
6405 X - 0 is the same as X unless rounding towards -infinity is
6407 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6410 /* Subroutine of fold() that optimizes comparisons of a division by
6411 a nonzero integer constant against an integer constant, i.e.
6414 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6415 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6416 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6418 The function returns the constant folded tree if a simplification
6419 can be made, and NULL_TREE otherwise. */
6422 fold_div_compare (location_t loc
,
6423 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6425 tree prod
, tmp
, hi
, lo
;
6426 tree arg00
= TREE_OPERAND (arg0
, 0);
6427 tree arg01
= TREE_OPERAND (arg0
, 1);
6428 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6429 bool neg_overflow
= false;
6432 /* We have to do this the hard way to detect unsigned overflow.
6433 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6434 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6435 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6436 neg_overflow
= false;
6438 if (sign
== UNSIGNED
)
6440 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6441 build_int_cst (TREE_TYPE (arg01
), 1));
6444 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6445 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6446 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6447 -1, overflow
| TREE_OVERFLOW (prod
));
6449 else if (tree_int_cst_sgn (arg01
) >= 0)
6451 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6452 build_int_cst (TREE_TYPE (arg01
), 1));
6453 switch (tree_int_cst_sgn (arg1
))
6456 neg_overflow
= true;
6457 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6462 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6467 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6477 /* A negative divisor reverses the relational operators. */
6478 code
= swap_tree_comparison (code
);
6480 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6481 build_int_cst (TREE_TYPE (arg01
), 1));
6482 switch (tree_int_cst_sgn (arg1
))
6485 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6490 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6495 neg_overflow
= true;
6496 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6508 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6509 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6510 if (TREE_OVERFLOW (hi
))
6511 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6512 if (TREE_OVERFLOW (lo
))
6513 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6514 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6517 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6518 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6519 if (TREE_OVERFLOW (hi
))
6520 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6521 if (TREE_OVERFLOW (lo
))
6522 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6523 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6526 if (TREE_OVERFLOW (lo
))
6528 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6529 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6531 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6534 if (TREE_OVERFLOW (hi
))
6536 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6537 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6539 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6542 if (TREE_OVERFLOW (hi
))
6544 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6545 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6547 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6550 if (TREE_OVERFLOW (lo
))
6552 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6553 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6555 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6565 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6566 equality/inequality test, then return a simplified form of the test
6567 using a sign testing. Otherwise return NULL. TYPE is the desired
6571 fold_single_bit_test_into_sign_test (location_t loc
,
6572 enum tree_code code
, tree arg0
, tree arg1
,
6575 /* If this is testing a single bit, we can optimize the test. */
6576 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6577 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6578 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6580 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6581 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6582 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6584 if (arg00
!= NULL_TREE
6585 /* This is only a win if casting to a signed type is cheap,
6586 i.e. when arg00's type is not a partial mode. */
6587 && TYPE_PRECISION (TREE_TYPE (arg00
))
6588 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6590 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6591 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6593 fold_convert_loc (loc
, stype
, arg00
),
6594 build_int_cst (stype
, 0));
6601 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6602 equality/inequality test, then return a simplified form of
6603 the test using shifts and logical operations. Otherwise return
6604 NULL. TYPE is the desired result type. */
6607 fold_single_bit_test (location_t loc
, enum tree_code code
,
6608 tree arg0
, tree arg1
, tree result_type
)
6610 /* If this is testing a single bit, we can optimize the test. */
6611 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6612 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6613 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6615 tree inner
= TREE_OPERAND (arg0
, 0);
6616 tree type
= TREE_TYPE (arg0
);
6617 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6618 machine_mode operand_mode
= TYPE_MODE (type
);
6620 tree signed_type
, unsigned_type
, intermediate_type
;
6623 /* First, see if we can fold the single bit test into a sign-bit
6625 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6630 /* Otherwise we have (A & C) != 0 where C is a single bit,
6631 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6632 Similarly for (A & C) == 0. */
6634 /* If INNER is a right shift of a constant and it plus BITNUM does
6635 not overflow, adjust BITNUM and INNER. */
6636 if (TREE_CODE (inner
) == RSHIFT_EXPR
6637 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6638 && bitnum
< TYPE_PRECISION (type
)
6639 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6640 TYPE_PRECISION (type
) - bitnum
))
6642 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6643 inner
= TREE_OPERAND (inner
, 0);
6646 /* If we are going to be able to omit the AND below, we must do our
6647 operations as unsigned. If we must use the AND, we have a choice.
6648 Normally unsigned is faster, but for some machines signed is. */
6649 #ifdef LOAD_EXTEND_OP
6650 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6651 && !flag_syntax_only
) ? 0 : 1;
6656 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6657 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6658 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6659 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6662 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6663 inner
, size_int (bitnum
));
6665 one
= build_int_cst (intermediate_type
, 1);
6667 if (code
== EQ_EXPR
)
6668 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6670 /* Put the AND last so it can combine with more things. */
6671 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6673 /* Make sure to return the proper type. */
6674 inner
= fold_convert_loc (loc
, result_type
, inner
);
6681 /* Check whether we are allowed to reorder operands arg0 and arg1,
6682 such that the evaluation of arg1 occurs before arg0. */
6685 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6687 if (! flag_evaluation_order
)
6689 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6691 return ! TREE_SIDE_EFFECTS (arg0
)
6692 && ! TREE_SIDE_EFFECTS (arg1
);
6695 /* Test whether it is preferable two swap two operands, ARG0 and
6696 ARG1, for example because ARG0 is an integer constant and ARG1
6697 isn't. If REORDER is true, only recommend swapping if we can
6698 evaluate the operands in reverse order. */
6701 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6703 if (CONSTANT_CLASS_P (arg1
))
6705 if (CONSTANT_CLASS_P (arg0
))
6711 if (TREE_CONSTANT (arg1
))
6713 if (TREE_CONSTANT (arg0
))
6716 if (reorder
&& flag_evaluation_order
6717 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6720 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6721 for commutative and comparison operators. Ensuring a canonical
6722 form allows the optimizers to find additional redundancies without
6723 having to explicitly check for both orderings. */
6724 if (TREE_CODE (arg0
) == SSA_NAME
6725 && TREE_CODE (arg1
) == SSA_NAME
6726 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6729 /* Put SSA_NAMEs last. */
6730 if (TREE_CODE (arg1
) == SSA_NAME
)
6732 if (TREE_CODE (arg0
) == SSA_NAME
)
6735 /* Put variables last. */
6744 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6745 ARG0 is extended to a wider type. */
6748 fold_widened_comparison (location_t loc
, enum tree_code code
,
6749 tree type
, tree arg0
, tree arg1
)
6751 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6753 tree shorter_type
, outer_type
;
6757 if (arg0_unw
== arg0
)
6759 shorter_type
= TREE_TYPE (arg0_unw
);
6761 /* Disable this optimization if we're casting a function pointer
6762 type on targets that require function pointer canonicalization. */
6763 if (targetm
.have_canonicalize_funcptr_for_compare ()
6764 && TREE_CODE (shorter_type
) == POINTER_TYPE
6765 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6768 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6771 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6773 /* If possible, express the comparison in the shorter mode. */
6774 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6775 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6776 && (TREE_TYPE (arg1_unw
) == shorter_type
6777 || ((TYPE_PRECISION (shorter_type
)
6778 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6779 && (TYPE_UNSIGNED (shorter_type
)
6780 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6781 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6782 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6783 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6784 && int_fits_type_p (arg1_unw
, shorter_type
))))
6785 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6786 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6788 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6789 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6790 || !int_fits_type_p (arg1_unw
, shorter_type
))
6793 /* If we are comparing with the integer that does not fit into the range
6794 of the shorter type, the result is known. */
6795 outer_type
= TREE_TYPE (arg1_unw
);
6796 min
= lower_bound_in_type (outer_type
, shorter_type
);
6797 max
= upper_bound_in_type (outer_type
, shorter_type
);
6799 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6801 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6808 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6813 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6819 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6821 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6826 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6828 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6837 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6838 ARG0 just the signedness is changed. */
6841 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6842 tree arg0
, tree arg1
)
6845 tree inner_type
, outer_type
;
6847 if (!CONVERT_EXPR_P (arg0
))
6850 outer_type
= TREE_TYPE (arg0
);
6851 arg0_inner
= TREE_OPERAND (arg0
, 0);
6852 inner_type
= TREE_TYPE (arg0_inner
);
6854 /* Disable this optimization if we're casting a function pointer
6855 type on targets that require function pointer canonicalization. */
6856 if (targetm
.have_canonicalize_funcptr_for_compare ()
6857 && TREE_CODE (inner_type
) == POINTER_TYPE
6858 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6861 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6864 if (TREE_CODE (arg1
) != INTEGER_CST
6865 && !(CONVERT_EXPR_P (arg1
)
6866 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6869 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6874 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6877 if (TREE_CODE (arg1
) == INTEGER_CST
)
6878 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6879 TREE_OVERFLOW (arg1
));
6881 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6883 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6887 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6888 means A >= Y && A != MAX, but in this case we know that
6889 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6892 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6894 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6896 if (TREE_CODE (bound
) == LT_EXPR
)
6897 a
= TREE_OPERAND (bound
, 0);
6898 else if (TREE_CODE (bound
) == GT_EXPR
)
6899 a
= TREE_OPERAND (bound
, 1);
6903 typea
= TREE_TYPE (a
);
6904 if (!INTEGRAL_TYPE_P (typea
)
6905 && !POINTER_TYPE_P (typea
))
6908 if (TREE_CODE (ineq
) == LT_EXPR
)
6910 a1
= TREE_OPERAND (ineq
, 1);
6911 y
= TREE_OPERAND (ineq
, 0);
6913 else if (TREE_CODE (ineq
) == GT_EXPR
)
6915 a1
= TREE_OPERAND (ineq
, 0);
6916 y
= TREE_OPERAND (ineq
, 1);
6921 if (TREE_TYPE (a1
) != typea
)
6924 if (POINTER_TYPE_P (typea
))
6926 /* Convert the pointer types into integer before taking the difference. */
6927 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6928 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6929 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6932 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6934 if (!diff
|| !integer_onep (diff
))
6937 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6940 /* Fold a sum or difference of at least one multiplication.
6941 Returns the folded tree or NULL if no simplification could be made. */
6944 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6945 tree arg0
, tree arg1
)
6947 tree arg00
, arg01
, arg10
, arg11
;
6948 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6950 /* (A * C) +- (B * C) -> (A+-B) * C.
6951 (A * C) +- A -> A * (C+-1).
6952 We are most concerned about the case where C is a constant,
6953 but other combinations show up during loop reduction. Since
6954 it is not difficult, try all four possibilities. */
6956 if (TREE_CODE (arg0
) == MULT_EXPR
)
6958 arg00
= TREE_OPERAND (arg0
, 0);
6959 arg01
= TREE_OPERAND (arg0
, 1);
6961 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6963 arg00
= build_one_cst (type
);
6968 /* We cannot generate constant 1 for fract. */
6969 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6972 arg01
= build_one_cst (type
);
6974 if (TREE_CODE (arg1
) == MULT_EXPR
)
6976 arg10
= TREE_OPERAND (arg1
, 0);
6977 arg11
= TREE_OPERAND (arg1
, 1);
6979 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6981 arg10
= build_one_cst (type
);
6982 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6983 the purpose of this canonicalization. */
6984 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6985 && negate_expr_p (arg1
)
6986 && code
== PLUS_EXPR
)
6988 arg11
= negate_expr (arg1
);
6996 /* We cannot generate constant 1 for fract. */
6997 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7000 arg11
= build_one_cst (type
);
7004 if (operand_equal_p (arg01
, arg11
, 0))
7005 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7006 else if (operand_equal_p (arg00
, arg10
, 0))
7007 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7008 else if (operand_equal_p (arg00
, arg11
, 0))
7009 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7010 else if (operand_equal_p (arg01
, arg10
, 0))
7011 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7013 /* No identical multiplicands; see if we can find a common
7014 power-of-two factor in non-power-of-two multiplies. This
7015 can help in multi-dimensional array access. */
7016 else if (tree_fits_shwi_p (arg01
)
7017 && tree_fits_shwi_p (arg11
))
7019 HOST_WIDE_INT int01
, int11
, tmp
;
7022 int01
= tree_to_shwi (arg01
);
7023 int11
= tree_to_shwi (arg11
);
7025 /* Move min of absolute values to int11. */
7026 if (absu_hwi (int01
) < absu_hwi (int11
))
7028 tmp
= int01
, int01
= int11
, int11
= tmp
;
7029 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7036 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7037 /* The remainder should not be a constant, otherwise we
7038 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7039 increased the number of multiplications necessary. */
7040 && TREE_CODE (arg10
) != INTEGER_CST
)
7042 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7043 build_int_cst (TREE_TYPE (arg00
),
7048 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7053 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7054 fold_build2_loc (loc
, code
, type
,
7055 fold_convert_loc (loc
, type
, alt0
),
7056 fold_convert_loc (loc
, type
, alt1
)),
7057 fold_convert_loc (loc
, type
, same
));
7062 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7063 specified by EXPR into the buffer PTR of length LEN bytes.
7064 Return the number of bytes placed in the buffer, or zero
7068 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7070 tree type
= TREE_TYPE (expr
);
7071 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7072 int byte
, offset
, word
, words
;
7073 unsigned char value
;
7075 if ((off
== -1 && total_bytes
> len
)
7076 || off
>= total_bytes
)
7080 words
= total_bytes
/ UNITS_PER_WORD
;
7082 for (byte
= 0; byte
< total_bytes
; byte
++)
7084 int bitpos
= byte
* BITS_PER_UNIT
;
7085 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7087 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7089 if (total_bytes
> UNITS_PER_WORD
)
7091 word
= byte
/ UNITS_PER_WORD
;
7092 if (WORDS_BIG_ENDIAN
)
7093 word
= (words
- 1) - word
;
7094 offset
= word
* UNITS_PER_WORD
;
7095 if (BYTES_BIG_ENDIAN
)
7096 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7098 offset
+= byte
% UNITS_PER_WORD
;
7101 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7103 && offset
- off
< len
)
7104 ptr
[offset
- off
] = value
;
7106 return MIN (len
, total_bytes
- off
);
7110 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7111 specified by EXPR into the buffer PTR of length LEN bytes.
7112 Return the number of bytes placed in the buffer, or zero
7116 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7118 tree type
= TREE_TYPE (expr
);
7119 machine_mode mode
= TYPE_MODE (type
);
7120 int total_bytes
= GET_MODE_SIZE (mode
);
7121 FIXED_VALUE_TYPE value
;
7122 tree i_value
, i_type
;
7124 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7127 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7129 if (NULL_TREE
== i_type
7130 || TYPE_PRECISION (i_type
) != total_bytes
)
7133 value
= TREE_FIXED_CST (expr
);
7134 i_value
= double_int_to_tree (i_type
, value
.data
);
7136 return native_encode_int (i_value
, ptr
, len
, off
);
7140 /* Subroutine of native_encode_expr. Encode the REAL_CST
7141 specified by EXPR into the buffer PTR of length LEN bytes.
7142 Return the number of bytes placed in the buffer, or zero
7146 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7148 tree type
= TREE_TYPE (expr
);
7149 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7150 int byte
, offset
, word
, words
, bitpos
;
7151 unsigned char value
;
7153 /* There are always 32 bits in each long, no matter the size of
7154 the hosts long. We handle floating point representations with
7158 if ((off
== -1 && total_bytes
> len
)
7159 || off
>= total_bytes
)
7163 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7165 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7167 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7168 bitpos
+= BITS_PER_UNIT
)
7170 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7171 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7173 if (UNITS_PER_WORD
< 4)
7175 word
= byte
/ UNITS_PER_WORD
;
7176 if (WORDS_BIG_ENDIAN
)
7177 word
= (words
- 1) - word
;
7178 offset
= word
* UNITS_PER_WORD
;
7179 if (BYTES_BIG_ENDIAN
)
7180 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7182 offset
+= byte
% UNITS_PER_WORD
;
7185 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7186 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7188 && offset
- off
< len
)
7189 ptr
[offset
- off
] = value
;
7191 return MIN (len
, total_bytes
- off
);
7194 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7195 specified by EXPR into the buffer PTR of length LEN bytes.
7196 Return the number of bytes placed in the buffer, or zero
7200 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7205 part
= TREE_REALPART (expr
);
7206 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7210 part
= TREE_IMAGPART (expr
);
7212 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7213 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7217 return rsize
+ isize
;
7221 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7222 specified by EXPR into the buffer PTR of length LEN bytes.
7223 Return the number of bytes placed in the buffer, or zero
7227 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7234 count
= VECTOR_CST_NELTS (expr
);
7235 itype
= TREE_TYPE (TREE_TYPE (expr
));
7236 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7237 for (i
= 0; i
< count
; i
++)
7244 elem
= VECTOR_CST_ELT (expr
, i
);
7245 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7246 if ((off
== -1 && res
!= size
)
7259 /* Subroutine of native_encode_expr. Encode the STRING_CST
7260 specified by EXPR into the buffer PTR of length LEN bytes.
7261 Return the number of bytes placed in the buffer, or zero
7265 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7267 tree type
= TREE_TYPE (expr
);
7268 HOST_WIDE_INT total_bytes
;
7270 if (TREE_CODE (type
) != ARRAY_TYPE
7271 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7272 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7273 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7275 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7276 if ((off
== -1 && total_bytes
> len
)
7277 || off
>= total_bytes
)
7281 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7284 if (off
< TREE_STRING_LENGTH (expr
))
7286 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7287 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7289 memset (ptr
+ written
, 0,
7290 MIN (total_bytes
- written
, len
- written
));
7293 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7294 return MIN (total_bytes
- off
, len
);
7298 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7299 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7300 buffer PTR of length LEN bytes. If OFF is not -1 then start
7301 the encoding at byte offset OFF and encode at most LEN bytes.
7302 Return the number of bytes placed in the buffer, or zero upon failure. */
7305 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7307 switch (TREE_CODE (expr
))
7310 return native_encode_int (expr
, ptr
, len
, off
);
7313 return native_encode_real (expr
, ptr
, len
, off
);
7316 return native_encode_fixed (expr
, ptr
, len
, off
);
7319 return native_encode_complex (expr
, ptr
, len
, off
);
7322 return native_encode_vector (expr
, ptr
, len
, off
);
7325 return native_encode_string (expr
, ptr
, len
, off
);
7333 /* Subroutine of native_interpret_expr. Interpret the contents of
7334 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7335 If the buffer cannot be interpreted, return NULL_TREE. */
7338 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7340 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7342 if (total_bytes
> len
7343 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7346 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7348 return wide_int_to_tree (type
, result
);
7352 /* Subroutine of native_interpret_expr. Interpret the contents of
7353 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7354 If the buffer cannot be interpreted, return NULL_TREE. */
7357 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7359 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7361 FIXED_VALUE_TYPE fixed_value
;
7363 if (total_bytes
> len
7364 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7367 result
= double_int::from_buffer (ptr
, total_bytes
);
7368 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7370 return build_fixed (type
, fixed_value
);
7374 /* Subroutine of native_interpret_expr. Interpret the contents of
7375 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7376 If the buffer cannot be interpreted, return NULL_TREE. */
7379 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7381 machine_mode mode
= TYPE_MODE (type
);
7382 int total_bytes
= GET_MODE_SIZE (mode
);
7383 int byte
, offset
, word
, words
, bitpos
;
7384 unsigned char value
;
7385 /* There are always 32 bits in each long, no matter the size of
7386 the hosts long. We handle floating point representations with
7391 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7392 if (total_bytes
> len
|| total_bytes
> 24)
7394 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7396 memset (tmp
, 0, sizeof (tmp
));
7397 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7398 bitpos
+= BITS_PER_UNIT
)
7400 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7401 if (UNITS_PER_WORD
< 4)
7403 word
= byte
/ UNITS_PER_WORD
;
7404 if (WORDS_BIG_ENDIAN
)
7405 word
= (words
- 1) - word
;
7406 offset
= word
* UNITS_PER_WORD
;
7407 if (BYTES_BIG_ENDIAN
)
7408 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7410 offset
+= byte
% UNITS_PER_WORD
;
7413 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7414 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7416 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7419 real_from_target (&r
, tmp
, mode
);
7420 return build_real (type
, r
);
7424 /* Subroutine of native_interpret_expr. Interpret the contents of
7425 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7426 If the buffer cannot be interpreted, return NULL_TREE. */
7429 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7431 tree etype
, rpart
, ipart
;
7434 etype
= TREE_TYPE (type
);
7435 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7438 rpart
= native_interpret_expr (etype
, ptr
, size
);
7441 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7444 return build_complex (type
, rpart
, ipart
);
7448 /* Subroutine of native_interpret_expr. Interpret the contents of
7449 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7450 If the buffer cannot be interpreted, return NULL_TREE. */
7453 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7459 etype
= TREE_TYPE (type
);
7460 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7461 count
= TYPE_VECTOR_SUBPARTS (type
);
7462 if (size
* count
> len
)
7465 elements
= XALLOCAVEC (tree
, count
);
7466 for (i
= count
- 1; i
>= 0; i
--)
7468 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7473 return build_vector (type
, elements
);
7477 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7478 the buffer PTR of length LEN as a constant of type TYPE. For
7479 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7480 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7481 return NULL_TREE. */
7484 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7486 switch (TREE_CODE (type
))
7492 case REFERENCE_TYPE
:
7493 return native_interpret_int (type
, ptr
, len
);
7496 return native_interpret_real (type
, ptr
, len
);
7498 case FIXED_POINT_TYPE
:
7499 return native_interpret_fixed (type
, ptr
, len
);
7502 return native_interpret_complex (type
, ptr
, len
);
7505 return native_interpret_vector (type
, ptr
, len
);
7512 /* Returns true if we can interpret the contents of a native encoding
7516 can_native_interpret_type_p (tree type
)
7518 switch (TREE_CODE (type
))
7524 case REFERENCE_TYPE
:
7525 case FIXED_POINT_TYPE
:
7535 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7536 TYPE at compile-time. If we're unable to perform the conversion
7537 return NULL_TREE. */
7540 fold_view_convert_expr (tree type
, tree expr
)
7542 /* We support up to 512-bit values (for V8DFmode). */
7543 unsigned char buffer
[64];
7546 /* Check that the host and target are sane. */
7547 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7550 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7554 return native_interpret_expr (type
, buffer
, len
);
7557 /* Build an expression for the address of T. Folds away INDIRECT_REF
7558 to avoid confusing the gimplify process. */
7561 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7563 /* The size of the object is not relevant when talking about its address. */
7564 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7565 t
= TREE_OPERAND (t
, 0);
7567 if (TREE_CODE (t
) == INDIRECT_REF
)
7569 t
= TREE_OPERAND (t
, 0);
7571 if (TREE_TYPE (t
) != ptrtype
)
7572 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7574 else if (TREE_CODE (t
) == MEM_REF
7575 && integer_zerop (TREE_OPERAND (t
, 1)))
7576 return TREE_OPERAND (t
, 0);
7577 else if (TREE_CODE (t
) == MEM_REF
7578 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7579 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7580 TREE_OPERAND (t
, 0),
7581 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7582 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7584 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7586 if (TREE_TYPE (t
) != ptrtype
)
7587 t
= fold_convert_loc (loc
, ptrtype
, t
);
7590 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7595 /* Build an expression for the address of T. */
7598 build_fold_addr_expr_loc (location_t loc
, tree t
)
7600 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7602 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7605 /* Fold a unary expression of code CODE and type TYPE with operand
7606 OP0. Return the folded expression if folding is successful.
7607 Otherwise, return NULL_TREE. */
7610 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7614 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7616 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7617 && TREE_CODE_LENGTH (code
) == 1);
7622 if (CONVERT_EXPR_CODE_P (code
)
7623 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7625 /* Don't use STRIP_NOPS, because signedness of argument type
7627 STRIP_SIGN_NOPS (arg0
);
7631 /* Strip any conversions that don't change the mode. This
7632 is safe for every expression, except for a comparison
7633 expression because its signedness is derived from its
7636 Note that this is done as an internal manipulation within
7637 the constant folder, in order to find the simplest
7638 representation of the arguments so that their form can be
7639 studied. In any cases, the appropriate type conversions
7640 should be put back in the tree that will get out of the
7645 if (CONSTANT_CLASS_P (arg0
))
7647 tree tem
= const_unop (code
, type
, arg0
);
7650 if (TREE_TYPE (tem
) != type
)
7651 tem
= fold_convert_loc (loc
, type
, tem
);
7657 tem
= generic_simplify (loc
, code
, type
, op0
);
7661 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7663 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7664 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7665 fold_build1_loc (loc
, code
, type
,
7666 fold_convert_loc (loc
, TREE_TYPE (op0
),
7667 TREE_OPERAND (arg0
, 1))));
7668 else if (TREE_CODE (arg0
) == COND_EXPR
)
7670 tree arg01
= TREE_OPERAND (arg0
, 1);
7671 tree arg02
= TREE_OPERAND (arg0
, 2);
7672 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7673 arg01
= fold_build1_loc (loc
, code
, type
,
7674 fold_convert_loc (loc
,
7675 TREE_TYPE (op0
), arg01
));
7676 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7677 arg02
= fold_build1_loc (loc
, code
, type
,
7678 fold_convert_loc (loc
,
7679 TREE_TYPE (op0
), arg02
));
7680 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7683 /* If this was a conversion, and all we did was to move into
7684 inside the COND_EXPR, bring it back out. But leave it if
7685 it is a conversion from integer to integer and the
7686 result precision is no wider than a word since such a
7687 conversion is cheap and may be optimized away by combine,
7688 while it couldn't if it were outside the COND_EXPR. Then return
7689 so we don't get into an infinite recursion loop taking the
7690 conversion out and then back in. */
7692 if ((CONVERT_EXPR_CODE_P (code
)
7693 || code
== NON_LVALUE_EXPR
)
7694 && TREE_CODE (tem
) == COND_EXPR
7695 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7696 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7697 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7698 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7699 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7700 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7701 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7703 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7704 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7705 || flag_syntax_only
))
7706 tem
= build1_loc (loc
, code
, type
,
7708 TREE_TYPE (TREE_OPERAND
7709 (TREE_OPERAND (tem
, 1), 0)),
7710 TREE_OPERAND (tem
, 0),
7711 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7712 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7720 case NON_LVALUE_EXPR
:
7721 if (!maybe_lvalue_p (op0
))
7722 return fold_convert_loc (loc
, type
, op0
);
7727 case FIX_TRUNC_EXPR
:
7728 if (COMPARISON_CLASS_P (op0
))
7730 /* If we have (type) (a CMP b) and type is an integral type, return
7731 new expression involving the new type. Canonicalize
7732 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7734 Do not fold the result as that would not simplify further, also
7735 folding again results in recursions. */
7736 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7737 return build2_loc (loc
, TREE_CODE (op0
), type
,
7738 TREE_OPERAND (op0
, 0),
7739 TREE_OPERAND (op0
, 1));
7740 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7741 && TREE_CODE (type
) != VECTOR_TYPE
)
7742 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7743 constant_boolean_node (true, type
),
7744 constant_boolean_node (false, type
));
7747 /* Handle (T *)&A.B.C for A being of type T and B and C
7748 living at offset zero. This occurs frequently in
7749 C++ upcasting and then accessing the base. */
7750 if (TREE_CODE (op0
) == ADDR_EXPR
7751 && POINTER_TYPE_P (type
)
7752 && handled_component_p (TREE_OPERAND (op0
, 0)))
7754 HOST_WIDE_INT bitsize
, bitpos
;
7757 int unsignedp
, volatilep
;
7758 tree base
= TREE_OPERAND (op0
, 0);
7759 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7760 &mode
, &unsignedp
, &volatilep
, false);
7761 /* If the reference was to a (constant) zero offset, we can use
7762 the address of the base if it has the same base type
7763 as the result type and the pointer type is unqualified. */
7764 if (! offset
&& bitpos
== 0
7765 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7766 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7767 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7768 return fold_convert_loc (loc
, type
,
7769 build_fold_addr_expr_loc (loc
, base
));
7772 if (TREE_CODE (op0
) == MODIFY_EXPR
7773 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7774 /* Detect assigning a bitfield. */
7775 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7777 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7779 /* Don't leave an assignment inside a conversion
7780 unless assigning a bitfield. */
7781 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7782 /* First do the assignment, then return converted constant. */
7783 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7784 TREE_NO_WARNING (tem
) = 1;
7785 TREE_USED (tem
) = 1;
7789 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7790 constants (if x has signed type, the sign bit cannot be set
7791 in c). This folds extension into the BIT_AND_EXPR.
7792 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7793 very likely don't have maximal range for their precision and this
7794 transformation effectively doesn't preserve non-maximal ranges. */
7795 if (TREE_CODE (type
) == INTEGER_TYPE
7796 && TREE_CODE (op0
) == BIT_AND_EXPR
7797 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7799 tree and_expr
= op0
;
7800 tree and0
= TREE_OPERAND (and_expr
, 0);
7801 tree and1
= TREE_OPERAND (and_expr
, 1);
7804 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7805 || (TYPE_PRECISION (type
)
7806 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7808 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7809 <= HOST_BITS_PER_WIDE_INT
7810 && tree_fits_uhwi_p (and1
))
7812 unsigned HOST_WIDE_INT cst
;
7814 cst
= tree_to_uhwi (and1
);
7815 cst
&= HOST_WIDE_INT_M1U
7816 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7817 change
= (cst
== 0);
7818 #ifdef LOAD_EXTEND_OP
7820 && !flag_syntax_only
7821 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7824 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7825 and0
= fold_convert_loc (loc
, uns
, and0
);
7826 and1
= fold_convert_loc (loc
, uns
, and1
);
7832 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7833 TREE_OVERFLOW (and1
));
7834 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7835 fold_convert_loc (loc
, type
, and0
), tem
);
7839 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7840 when one of the new casts will fold away. Conservatively we assume
7841 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7842 if (POINTER_TYPE_P (type
)
7843 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7844 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7845 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7846 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7847 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7849 tree arg00
= TREE_OPERAND (arg0
, 0);
7850 tree arg01
= TREE_OPERAND (arg0
, 1);
7852 return fold_build_pointer_plus_loc
7853 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7856 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7857 of the same precision, and X is an integer type not narrower than
7858 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7859 if (INTEGRAL_TYPE_P (type
)
7860 && TREE_CODE (op0
) == BIT_NOT_EXPR
7861 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7862 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7863 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7865 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7866 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7867 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7868 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7869 fold_convert_loc (loc
, type
, tem
));
7872 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7873 type of X and Y (integer types only). */
7874 if (INTEGRAL_TYPE_P (type
)
7875 && TREE_CODE (op0
) == MULT_EXPR
7876 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7877 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7879 /* Be careful not to introduce new overflows. */
7881 if (TYPE_OVERFLOW_WRAPS (type
))
7884 mult_type
= unsigned_type_for (type
);
7886 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7888 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7889 fold_convert_loc (loc
, mult_type
,
7890 TREE_OPERAND (op0
, 0)),
7891 fold_convert_loc (loc
, mult_type
,
7892 TREE_OPERAND (op0
, 1)));
7893 return fold_convert_loc (loc
, type
, tem
);
7899 case VIEW_CONVERT_EXPR
:
7900 if (TREE_CODE (op0
) == MEM_REF
)
7901 return fold_build2_loc (loc
, MEM_REF
, type
,
7902 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7907 tem
= fold_negate_expr (loc
, arg0
);
7909 return fold_convert_loc (loc
, type
, tem
);
7913 /* Convert fabs((double)float) into (double)fabsf(float). */
7914 if (TREE_CODE (arg0
) == NOP_EXPR
7915 && TREE_CODE (type
) == REAL_TYPE
)
7917 tree targ0
= strip_float_extensions (arg0
);
7919 return fold_convert_loc (loc
, type
,
7920 fold_build1_loc (loc
, ABS_EXPR
,
7925 /* Strip sign ops from argument. */
7926 if (TREE_CODE (type
) == REAL_TYPE
)
7928 tem
= fold_strip_sign_ops (arg0
);
7930 return fold_build1_loc (loc
, ABS_EXPR
, type
,
7931 fold_convert_loc (loc
, type
, tem
));
7936 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7937 return fold_convert_loc (loc
, type
, arg0
);
7938 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7940 tree itype
= TREE_TYPE (type
);
7941 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
7942 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
7943 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
7944 negate_expr (ipart
));
7946 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7947 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
7951 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7952 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7953 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7954 fold_convert_loc (loc
, type
,
7955 TREE_OPERAND (arg0
, 0)))))
7956 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7957 fold_convert_loc (loc
, type
,
7958 TREE_OPERAND (arg0
, 1)));
7959 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7960 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7961 fold_convert_loc (loc
, type
,
7962 TREE_OPERAND (arg0
, 1)))))
7963 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7964 fold_convert_loc (loc
, type
,
7965 TREE_OPERAND (arg0
, 0)), tem
);
7969 case TRUTH_NOT_EXPR
:
7970 /* Note that the operand of this must be an int
7971 and its values must be 0 or 1.
7972 ("true" is a fixed value perhaps depending on the language,
7973 but we don't handle values other than 1 correctly yet.) */
7974 tem
= fold_truth_not_expr (loc
, arg0
);
7977 return fold_convert_loc (loc
, type
, tem
);
7980 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7981 return fold_convert_loc (loc
, type
, arg0
);
7982 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7984 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7985 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7986 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7987 TREE_OPERAND (arg0
, 0)),
7988 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7989 TREE_OPERAND (arg0
, 1)));
7990 return fold_convert_loc (loc
, type
, tem
);
7992 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7994 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7995 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7996 TREE_OPERAND (arg0
, 0));
7997 return fold_convert_loc (loc
, type
, tem
);
7999 if (TREE_CODE (arg0
) == CALL_EXPR
)
8001 tree fn
= get_callee_fndecl (arg0
);
8002 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8003 switch (DECL_FUNCTION_CODE (fn
))
8005 CASE_FLT_FN (BUILT_IN_CEXPI
):
8006 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8008 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8018 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8019 return build_zero_cst (type
);
8020 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8022 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8023 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8024 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8025 TREE_OPERAND (arg0
, 0)),
8026 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8027 TREE_OPERAND (arg0
, 1)));
8028 return fold_convert_loc (loc
, type
, tem
);
8030 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8032 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8033 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8034 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8036 if (TREE_CODE (arg0
) == CALL_EXPR
)
8038 tree fn
= get_callee_fndecl (arg0
);
8039 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8040 switch (DECL_FUNCTION_CODE (fn
))
8042 CASE_FLT_FN (BUILT_IN_CEXPI
):
8043 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8045 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8055 /* Fold *&X to X if X is an lvalue. */
8056 if (TREE_CODE (op0
) == ADDR_EXPR
)
8058 tree op00
= TREE_OPERAND (op0
, 0);
8059 if ((TREE_CODE (op00
) == VAR_DECL
8060 || TREE_CODE (op00
) == PARM_DECL
8061 || TREE_CODE (op00
) == RESULT_DECL
)
8062 && !TREE_READONLY (op00
))
8069 } /* switch (code) */
8073 /* If the operation was a conversion do _not_ mark a resulting constant
8074 with TREE_OVERFLOW if the original constant was not. These conversions
8075 have implementation defined behavior and retaining the TREE_OVERFLOW
8076 flag here would confuse later passes such as VRP. */
8078 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8079 tree type
, tree op0
)
8081 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8083 && TREE_CODE (res
) == INTEGER_CST
8084 && TREE_CODE (op0
) == INTEGER_CST
8085 && CONVERT_EXPR_CODE_P (code
))
8086 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8091 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8092 operands OP0 and OP1. LOC is the location of the resulting expression.
8093 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8094 Return the folded expression if folding is successful. Otherwise,
8095 return NULL_TREE. */
8097 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8098 tree arg0
, tree arg1
, tree op0
, tree op1
)
8102 /* We only do these simplifications if we are optimizing. */
8106 /* Check for things like (A || B) && (A || C). We can convert this
8107 to A || (B && C). Note that either operator can be any of the four
8108 truth and/or operations and the transformation will still be
8109 valid. Also note that we only care about order for the
8110 ANDIF and ORIF operators. If B contains side effects, this
8111 might change the truth-value of A. */
8112 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8113 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8114 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8115 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8116 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8117 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8119 tree a00
= TREE_OPERAND (arg0
, 0);
8120 tree a01
= TREE_OPERAND (arg0
, 1);
8121 tree a10
= TREE_OPERAND (arg1
, 0);
8122 tree a11
= TREE_OPERAND (arg1
, 1);
8123 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8124 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8125 && (code
== TRUTH_AND_EXPR
8126 || code
== TRUTH_OR_EXPR
));
8128 if (operand_equal_p (a00
, a10
, 0))
8129 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8130 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8131 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8132 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8133 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8134 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8135 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8136 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8138 /* This case if tricky because we must either have commutative
8139 operators or else A10 must not have side-effects. */
8141 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8142 && operand_equal_p (a01
, a11
, 0))
8143 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8144 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8148 /* See if we can build a range comparison. */
8149 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8152 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8153 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8155 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8157 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8160 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8161 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8163 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8165 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8168 /* Check for the possibility of merging component references. If our
8169 lhs is another similar operation, try to merge its rhs with our
8170 rhs. Then try to merge our lhs and rhs. */
8171 if (TREE_CODE (arg0
) == code
8172 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8173 TREE_OPERAND (arg0
, 1), arg1
)))
8174 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8176 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8179 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8180 && (code
== TRUTH_AND_EXPR
8181 || code
== TRUTH_ANDIF_EXPR
8182 || code
== TRUTH_OR_EXPR
8183 || code
== TRUTH_ORIF_EXPR
))
8185 enum tree_code ncode
, icode
;
8187 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8188 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8189 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8191 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8192 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8193 We don't want to pack more than two leafs to a non-IF AND/OR
8195 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8196 equal to IF-CODE, then we don't want to add right-hand operand.
8197 If the inner right-hand side of left-hand operand has
8198 side-effects, or isn't simple, then we can't add to it,
8199 as otherwise we might destroy if-sequence. */
8200 if (TREE_CODE (arg0
) == icode
8201 && simple_operand_p_2 (arg1
)
8202 /* Needed for sequence points to handle trappings, and
8204 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8206 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8208 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8211 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8212 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8213 else if (TREE_CODE (arg1
) == icode
8214 && simple_operand_p_2 (arg0
)
8215 /* Needed for sequence points to handle trappings, and
8217 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8219 tem
= fold_build2_loc (loc
, ncode
, type
,
8220 arg0
, TREE_OPERAND (arg1
, 0));
8221 return fold_build2_loc (loc
, icode
, type
, tem
,
8222 TREE_OPERAND (arg1
, 1));
8224 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8226 For sequence point consistancy, we need to check for trapping,
8227 and side-effects. */
8228 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8229 && simple_operand_p_2 (arg1
))
8230 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8236 /* Fold a binary expression of code CODE and type TYPE with operands
8237 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8238 Return the folded expression if folding is successful. Otherwise,
8239 return NULL_TREE. */
8242 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8244 enum tree_code compl_code
;
8246 if (code
== MIN_EXPR
)
8247 compl_code
= MAX_EXPR
;
8248 else if (code
== MAX_EXPR
)
8249 compl_code
= MIN_EXPR
;
8253 /* MIN (MAX (a, b), b) == b. */
8254 if (TREE_CODE (op0
) == compl_code
8255 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8256 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8258 /* MIN (MAX (b, a), b) == b. */
8259 if (TREE_CODE (op0
) == compl_code
8260 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8261 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8262 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8264 /* MIN (a, MAX (a, b)) == a. */
8265 if (TREE_CODE (op1
) == compl_code
8266 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8267 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8268 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8270 /* MIN (a, MAX (b, a)) == a. */
8271 if (TREE_CODE (op1
) == compl_code
8272 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8273 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8274 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8279 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8280 by changing CODE to reduce the magnitude of constants involved in
8281 ARG0 of the comparison.
8282 Returns a canonicalized comparison tree if a simplification was
8283 possible, otherwise returns NULL_TREE.
8284 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8285 valid if signed overflow is undefined. */
8288 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8289 tree arg0
, tree arg1
,
8290 bool *strict_overflow_p
)
8292 enum tree_code code0
= TREE_CODE (arg0
);
8293 tree t
, cst0
= NULL_TREE
;
8297 /* Match A +- CST code arg1 and CST code arg1. We can change the
8298 first form only if overflow is undefined. */
8299 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8300 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8301 /* In principle pointers also have undefined overflow behavior,
8302 but that causes problems elsewhere. */
8303 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8304 && (code0
== MINUS_EXPR
8305 || code0
== PLUS_EXPR
)
8306 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8307 || code0
== INTEGER_CST
))
8310 /* Identify the constant in arg0 and its sign. */
8311 if (code0
== INTEGER_CST
)
8314 cst0
= TREE_OPERAND (arg0
, 1);
8315 sgn0
= tree_int_cst_sgn (cst0
);
8317 /* Overflowed constants and zero will cause problems. */
8318 if (integer_zerop (cst0
)
8319 || TREE_OVERFLOW (cst0
))
8322 /* See if we can reduce the magnitude of the constant in
8323 arg0 by changing the comparison code. */
8324 if (code0
== INTEGER_CST
)
8326 /* CST <= arg1 -> CST-1 < arg1. */
8327 if (code
== LE_EXPR
&& sgn0
== 1)
8329 /* -CST < arg1 -> -CST-1 <= arg1. */
8330 else if (code
== LT_EXPR
&& sgn0
== -1)
8332 /* CST > arg1 -> CST-1 >= arg1. */
8333 else if (code
== GT_EXPR
&& sgn0
== 1)
8335 /* -CST >= arg1 -> -CST-1 > arg1. */
8336 else if (code
== GE_EXPR
&& sgn0
== -1)
8340 /* arg1 code' CST' might be more canonical. */
8345 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8347 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8349 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8350 else if (code
== GT_EXPR
8351 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8353 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8354 else if (code
== LE_EXPR
8355 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8357 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8358 else if (code
== GE_EXPR
8359 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8363 *strict_overflow_p
= true;
8366 /* Now build the constant reduced in magnitude. But not if that
8367 would produce one outside of its types range. */
8368 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8370 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8371 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8373 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8374 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8375 /* We cannot swap the comparison here as that would cause us to
8376 endlessly recurse. */
8379 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8380 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8381 if (code0
!= INTEGER_CST
)
8382 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8383 t
= fold_convert (TREE_TYPE (arg1
), t
);
8385 /* If swapping might yield to a more canonical form, do so. */
8387 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8389 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8392 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8393 overflow further. Try to decrease the magnitude of constants involved
8394 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8395 and put sole constants at the second argument position.
8396 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8399 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8400 tree arg0
, tree arg1
)
8403 bool strict_overflow_p
;
8404 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8405 "when reducing constant in comparison");
8407 /* Try canonicalization by simplifying arg0. */
8408 strict_overflow_p
= false;
8409 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8410 &strict_overflow_p
);
8413 if (strict_overflow_p
)
8414 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8418 /* Try canonicalization by simplifying arg1 using the swapped
8420 code
= swap_tree_comparison (code
);
8421 strict_overflow_p
= false;
8422 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8423 &strict_overflow_p
);
8424 if (t
&& strict_overflow_p
)
8425 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8429 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8430 space. This is used to avoid issuing overflow warnings for
8431 expressions like &p->x which can not wrap. */
8434 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8436 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8443 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8444 if (offset
== NULL_TREE
)
8445 wi_offset
= wi::zero (precision
);
8446 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8452 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8453 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8457 if (!wi::fits_uhwi_p (total
))
8460 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8464 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8466 if (TREE_CODE (base
) == ADDR_EXPR
)
8468 HOST_WIDE_INT base_size
;
8470 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8471 if (base_size
> 0 && size
< base_size
)
8475 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8478 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8479 kind INTEGER_CST. This makes sure to properly sign-extend the
8482 static HOST_WIDE_INT
8483 size_low_cst (const_tree t
)
8485 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8486 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8487 if (prec
< HOST_BITS_PER_WIDE_INT
)
8488 return sext_hwi (w
, prec
);
8492 /* Subroutine of fold_binary. This routine performs all of the
8493 transformations that are common to the equality/inequality
8494 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8495 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8496 fold_binary should call fold_binary. Fold a comparison with
8497 tree code CODE and type TYPE with operands OP0 and OP1. Return
8498 the folded comparison or NULL_TREE. */
8501 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8504 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8505 tree arg0
, arg1
, tem
;
8510 STRIP_SIGN_NOPS (arg0
);
8511 STRIP_SIGN_NOPS (arg1
);
8513 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8514 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8516 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8517 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8518 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8519 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8520 && TREE_CODE (arg1
) == INTEGER_CST
8521 && !TREE_OVERFLOW (arg1
))
8523 const enum tree_code
8524 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8525 tree const1
= TREE_OPERAND (arg0
, 1);
8526 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8527 tree variable
= TREE_OPERAND (arg0
, 0);
8528 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8530 /* If the constant operation overflowed this can be
8531 simplified as a comparison against INT_MAX/INT_MIN. */
8532 if (TREE_OVERFLOW (new_const
)
8533 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8535 int const1_sgn
= tree_int_cst_sgn (const1
);
8536 enum tree_code code2
= code
;
8538 /* Get the sign of the constant on the lhs if the
8539 operation were VARIABLE + CONST1. */
8540 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8541 const1_sgn
= -const1_sgn
;
8543 /* The sign of the constant determines if we overflowed
8544 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8545 Canonicalize to the INT_MIN overflow by swapping the comparison
8547 if (const1_sgn
== -1)
8548 code2
= swap_tree_comparison (code
);
8550 /* We now can look at the canonicalized case
8551 VARIABLE + 1 CODE2 INT_MIN
8552 and decide on the result. */
8559 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8565 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8574 fold_overflow_warning ("assuming signed overflow does not occur "
8575 "when changing X +- C1 cmp C2 to "
8577 WARN_STRICT_OVERFLOW_COMPARISON
);
8578 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8582 /* For comparisons of pointers we can decompose it to a compile time
8583 comparison of the base objects and the offsets into the object.
8584 This requires at least one operand being an ADDR_EXPR or a
8585 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8586 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8587 && (TREE_CODE (arg0
) == ADDR_EXPR
8588 || TREE_CODE (arg1
) == ADDR_EXPR
8589 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8590 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8592 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8593 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8595 int volatilep
, unsignedp
;
8596 bool indirect_base0
= false, indirect_base1
= false;
8598 /* Get base and offset for the access. Strip ADDR_EXPR for
8599 get_inner_reference, but put it back by stripping INDIRECT_REF
8600 off the base object if possible. indirect_baseN will be true
8601 if baseN is not an address but refers to the object itself. */
8603 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8605 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8606 &bitsize
, &bitpos0
, &offset0
, &mode
,
8607 &unsignedp
, &volatilep
, false);
8608 if (TREE_CODE (base0
) == INDIRECT_REF
)
8609 base0
= TREE_OPERAND (base0
, 0);
8611 indirect_base0
= true;
8613 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8615 base0
= TREE_OPERAND (arg0
, 0);
8616 STRIP_SIGN_NOPS (base0
);
8617 if (TREE_CODE (base0
) == ADDR_EXPR
)
8619 base0
= TREE_OPERAND (base0
, 0);
8620 indirect_base0
= true;
8622 offset0
= TREE_OPERAND (arg0
, 1);
8623 if (tree_fits_shwi_p (offset0
))
8625 HOST_WIDE_INT off
= size_low_cst (offset0
);
8626 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8628 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8630 bitpos0
= off
* BITS_PER_UNIT
;
8631 offset0
= NULL_TREE
;
8637 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8639 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8640 &bitsize
, &bitpos1
, &offset1
, &mode
,
8641 &unsignedp
, &volatilep
, false);
8642 if (TREE_CODE (base1
) == INDIRECT_REF
)
8643 base1
= TREE_OPERAND (base1
, 0);
8645 indirect_base1
= true;
8647 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8649 base1
= TREE_OPERAND (arg1
, 0);
8650 STRIP_SIGN_NOPS (base1
);
8651 if (TREE_CODE (base1
) == ADDR_EXPR
)
8653 base1
= TREE_OPERAND (base1
, 0);
8654 indirect_base1
= true;
8656 offset1
= TREE_OPERAND (arg1
, 1);
8657 if (tree_fits_shwi_p (offset1
))
8659 HOST_WIDE_INT off
= size_low_cst (offset1
);
8660 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8662 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8664 bitpos1
= off
* BITS_PER_UNIT
;
8665 offset1
= NULL_TREE
;
8670 /* A local variable can never be pointed to by
8671 the default SSA name of an incoming parameter. */
8672 if ((TREE_CODE (arg0
) == ADDR_EXPR
8674 && TREE_CODE (base0
) == VAR_DECL
8675 && auto_var_in_fn_p (base0
, current_function_decl
)
8677 && TREE_CODE (base1
) == SSA_NAME
8678 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8679 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8680 || (TREE_CODE (arg1
) == ADDR_EXPR
8682 && TREE_CODE (base1
) == VAR_DECL
8683 && auto_var_in_fn_p (base1
, current_function_decl
)
8685 && TREE_CODE (base0
) == SSA_NAME
8686 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8687 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8689 if (code
== NE_EXPR
)
8690 return constant_boolean_node (1, type
);
8691 else if (code
== EQ_EXPR
)
8692 return constant_boolean_node (0, type
);
8694 /* If we have equivalent bases we might be able to simplify. */
8695 else if (indirect_base0
== indirect_base1
8696 && operand_equal_p (base0
, base1
, 0))
8698 /* We can fold this expression to a constant if the non-constant
8699 offset parts are equal. */
8700 if ((offset0
== offset1
8701 || (offset0
&& offset1
8702 && operand_equal_p (offset0
, offset1
, 0)))
8705 || (indirect_base0
&& DECL_P (base0
))
8706 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8710 && bitpos0
!= bitpos1
8711 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8712 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8713 fold_overflow_warning (("assuming pointer wraparound does not "
8714 "occur when comparing P +- C1 with "
8716 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8721 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8723 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8725 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8727 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8729 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8731 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8735 /* We can simplify the comparison to a comparison of the variable
8736 offset parts if the constant offset parts are equal.
8737 Be careful to use signed sizetype here because otherwise we
8738 mess with array offsets in the wrong way. This is possible
8739 because pointer arithmetic is restricted to retain within an
8740 object and overflow on pointer differences is undefined as of
8741 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8742 else if (bitpos0
== bitpos1
8744 || (indirect_base0
&& DECL_P (base0
))
8745 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8747 /* By converting to signed sizetype we cover middle-end pointer
8748 arithmetic which operates on unsigned pointer types of size
8749 type size and ARRAY_REF offsets which are properly sign or
8750 zero extended from their type in case it is narrower than
8752 if (offset0
== NULL_TREE
)
8753 offset0
= build_int_cst (ssizetype
, 0);
8755 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8756 if (offset1
== NULL_TREE
)
8757 offset1
= build_int_cst (ssizetype
, 0);
8759 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8762 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8763 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8764 fold_overflow_warning (("assuming pointer wraparound does not "
8765 "occur when comparing P +- C1 with "
8767 WARN_STRICT_OVERFLOW_COMPARISON
);
8769 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8772 /* For non-equal bases we can simplify if they are addresses
8773 declarations with different addresses. */
8774 else if (indirect_base0
&& indirect_base1
8775 /* We know that !operand_equal_p (base0, base1, 0)
8776 because the if condition was false. But make
8777 sure two decls are not the same. */
8779 && TREE_CODE (arg0
) == ADDR_EXPR
8780 && TREE_CODE (arg1
) == ADDR_EXPR
8783 /* Watch for aliases. */
8784 && (!decl_in_symtab_p (base0
)
8785 || !decl_in_symtab_p (base1
)
8786 || !symtab_node::get_create (base0
)->equal_address_to
8787 (symtab_node::get_create (base1
))))
8789 if (code
== EQ_EXPR
)
8790 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8792 else if (code
== NE_EXPR
)
8793 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8796 /* For equal offsets we can simplify to a comparison of the
8798 else if (bitpos0
== bitpos1
8800 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8802 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8803 && ((offset0
== offset1
)
8804 || (offset0
&& offset1
8805 && operand_equal_p (offset0
, offset1
, 0))))
8808 base0
= build_fold_addr_expr_loc (loc
, base0
);
8810 base1
= build_fold_addr_expr_loc (loc
, base1
);
8811 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8815 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8816 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8817 the resulting offset is smaller in absolute value than the
8818 original one and has the same sign. */
8819 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8820 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8821 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8822 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8823 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8824 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8825 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8826 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8828 tree const1
= TREE_OPERAND (arg0
, 1);
8829 tree const2
= TREE_OPERAND (arg1
, 1);
8830 tree variable1
= TREE_OPERAND (arg0
, 0);
8831 tree variable2
= TREE_OPERAND (arg1
, 0);
8833 const char * const warnmsg
= G_("assuming signed overflow does not "
8834 "occur when combining constants around "
8837 /* Put the constant on the side where it doesn't overflow and is
8838 of lower absolute value and of same sign than before. */
8839 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8840 ? MINUS_EXPR
: PLUS_EXPR
,
8842 if (!TREE_OVERFLOW (cst
)
8843 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8844 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8846 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8847 return fold_build2_loc (loc
, code
, type
,
8849 fold_build2_loc (loc
, TREE_CODE (arg1
),
8854 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8855 ? MINUS_EXPR
: PLUS_EXPR
,
8857 if (!TREE_OVERFLOW (cst
)
8858 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8859 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8861 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8862 return fold_build2_loc (loc
, code
, type
,
8863 fold_build2_loc (loc
, TREE_CODE (arg0
),
8870 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8874 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8875 && CONVERT_EXPR_P (arg0
))
8877 /* If we are widening one operand of an integer comparison,
8878 see if the other operand is similarly being widened. Perhaps we
8879 can do the comparison in the narrower type. */
8880 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
8884 /* Or if we are changing signedness. */
8885 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
8890 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8891 constant, we can simplify it. */
8892 if (TREE_CODE (arg1
) == INTEGER_CST
8893 && (TREE_CODE (arg0
) == MIN_EXPR
8894 || TREE_CODE (arg0
) == MAX_EXPR
)
8895 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8897 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
8902 /* If we are comparing an expression that just has comparisons
8903 of two integer values, arithmetic expressions of those comparisons,
8904 and constants, we can simplify it. There are only three cases
8905 to check: the two values can either be equal, the first can be
8906 greater, or the second can be greater. Fold the expression for
8907 those three values. Since each value must be 0 or 1, we have
8908 eight possibilities, each of which corresponds to the constant 0
8909 or 1 or one of the six possible comparisons.
8911 This handles common cases like (a > b) == 0 but also handles
8912 expressions like ((x > y) - (y > x)) > 0, which supposedly
8913 occur in macroized code. */
8915 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8917 tree cval1
= 0, cval2
= 0;
8920 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8921 /* Don't handle degenerate cases here; they should already
8922 have been handled anyway. */
8923 && cval1
!= 0 && cval2
!= 0
8924 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8925 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8926 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8927 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8928 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8929 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8930 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8932 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8933 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8935 /* We can't just pass T to eval_subst in case cval1 or cval2
8936 was the same as ARG1. */
8939 = fold_build2_loc (loc
, code
, type
,
8940 eval_subst (loc
, arg0
, cval1
, maxval
,
8944 = fold_build2_loc (loc
, code
, type
,
8945 eval_subst (loc
, arg0
, cval1
, maxval
,
8949 = fold_build2_loc (loc
, code
, type
,
8950 eval_subst (loc
, arg0
, cval1
, minval
,
8954 /* All three of these results should be 0 or 1. Confirm they are.
8955 Then use those values to select the proper code to use. */
8957 if (TREE_CODE (high_result
) == INTEGER_CST
8958 && TREE_CODE (equal_result
) == INTEGER_CST
8959 && TREE_CODE (low_result
) == INTEGER_CST
)
8961 /* Make a 3-bit mask with the high-order bit being the
8962 value for `>', the next for '=', and the low for '<'. */
8963 switch ((integer_onep (high_result
) * 4)
8964 + (integer_onep (equal_result
) * 2)
8965 + integer_onep (low_result
))
8969 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8990 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8995 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8996 SET_EXPR_LOCATION (tem
, loc
);
8999 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9004 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9005 into a single range test. */
9006 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9007 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9008 && TREE_CODE (arg1
) == INTEGER_CST
9009 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9010 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9011 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9012 && !TREE_OVERFLOW (arg1
))
9014 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9015 if (tem
!= NULL_TREE
)
9023 /* Subroutine of fold_binary. Optimize complex multiplications of the
9024 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9025 argument EXPR represents the expression "z" of type TYPE. */
9028 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9030 tree itype
= TREE_TYPE (type
);
9031 tree rpart
, ipart
, tem
;
9033 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9035 rpart
= TREE_OPERAND (expr
, 0);
9036 ipart
= TREE_OPERAND (expr
, 1);
9038 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9040 rpart
= TREE_REALPART (expr
);
9041 ipart
= TREE_IMAGPART (expr
);
9045 expr
= save_expr (expr
);
9046 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9047 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9050 rpart
= save_expr (rpart
);
9051 ipart
= save_expr (ipart
);
9052 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9053 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9054 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9055 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9056 build_zero_cst (itype
));
9060 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9061 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9064 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9066 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9068 if (TREE_CODE (arg
) == VECTOR_CST
)
9070 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9071 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9073 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9075 constructor_elt
*elt
;
9077 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9078 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9081 elts
[i
] = elt
->value
;
9085 for (; i
< nelts
; i
++)
9087 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9091 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9092 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9093 NULL_TREE otherwise. */
9096 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9098 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9100 bool need_ctor
= false;
9102 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9103 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9104 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9105 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9108 elts
= XALLOCAVEC (tree
, nelts
* 3);
9109 if (!vec_cst_ctor_to_array (arg0
, elts
)
9110 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9113 for (i
= 0; i
< nelts
; i
++)
9115 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9117 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9122 vec
<constructor_elt
, va_gc
> *v
;
9123 vec_alloc (v
, nelts
);
9124 for (i
= 0; i
< nelts
; i
++)
9125 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9126 return build_constructor (type
, v
);
9129 return build_vector (type
, &elts
[2 * nelts
]);
9132 /* Try to fold a pointer difference of type TYPE two address expressions of
9133 array references AREF0 and AREF1 using location LOC. Return a
9134 simplified expression for the difference or NULL_TREE. */
9137 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9138 tree aref0
, tree aref1
)
9140 tree base0
= TREE_OPERAND (aref0
, 0);
9141 tree base1
= TREE_OPERAND (aref1
, 0);
9142 tree base_offset
= build_int_cst (type
, 0);
9144 /* If the bases are array references as well, recurse. If the bases
9145 are pointer indirections compute the difference of the pointers.
9146 If the bases are equal, we are set. */
9147 if ((TREE_CODE (base0
) == ARRAY_REF
9148 && TREE_CODE (base1
) == ARRAY_REF
9150 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9151 || (INDIRECT_REF_P (base0
)
9152 && INDIRECT_REF_P (base1
)
9153 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9154 TREE_OPERAND (base0
, 0),
9155 TREE_OPERAND (base1
, 0))))
9156 || operand_equal_p (base0
, base1
, 0))
9158 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9159 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9160 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9161 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9162 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9164 fold_build2_loc (loc
, MULT_EXPR
, type
,
9170 /* If the real or vector real constant CST of type TYPE has an exact
9171 inverse, return it, else return NULL. */
9174 exact_inverse (tree type
, tree cst
)
9177 tree unit_type
, *elts
;
9179 unsigned vec_nelts
, i
;
9181 switch (TREE_CODE (cst
))
9184 r
= TREE_REAL_CST (cst
);
9186 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9187 return build_real (type
, r
);
9192 vec_nelts
= VECTOR_CST_NELTS (cst
);
9193 elts
= XALLOCAVEC (tree
, vec_nelts
);
9194 unit_type
= TREE_TYPE (type
);
9195 mode
= TYPE_MODE (unit_type
);
9197 for (i
= 0; i
< vec_nelts
; i
++)
9199 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9200 if (!exact_real_inverse (mode
, &r
))
9202 elts
[i
] = build_real (unit_type
, r
);
9205 return build_vector (type
, elts
);
9212 /* Mask out the tz least significant bits of X of type TYPE where
9213 tz is the number of trailing zeroes in Y. */
9215 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9217 int tz
= wi::ctz (y
);
9219 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9223 /* Return true when T is an address and is known to be nonzero.
9224 For floating point we further ensure that T is not denormal.
9225 Similar logic is present in nonzero_address in rtlanal.h.
9227 If the return value is based on the assumption that signed overflow
9228 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9229 change *STRICT_OVERFLOW_P. */
9232 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9234 tree type
= TREE_TYPE (t
);
9235 enum tree_code code
;
9237 /* Doing something useful for floating point would need more work. */
9238 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9241 code
= TREE_CODE (t
);
9242 switch (TREE_CODE_CLASS (code
))
9245 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9248 case tcc_comparison
:
9249 return tree_binary_nonzero_warnv_p (code
, type
,
9250 TREE_OPERAND (t
, 0),
9251 TREE_OPERAND (t
, 1),
9254 case tcc_declaration
:
9256 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9264 case TRUTH_NOT_EXPR
:
9265 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9268 case TRUTH_AND_EXPR
:
9270 case TRUTH_XOR_EXPR
:
9271 return tree_binary_nonzero_warnv_p (code
, type
,
9272 TREE_OPERAND (t
, 0),
9273 TREE_OPERAND (t
, 1),
9281 case WITH_SIZE_EXPR
:
9283 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9288 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9292 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9297 tree fndecl
= get_callee_fndecl (t
);
9298 if (!fndecl
) return false;
9299 if (flag_delete_null_pointer_checks
&& !flag_check_new
9300 && DECL_IS_OPERATOR_NEW (fndecl
)
9301 && !TREE_NOTHROW (fndecl
))
9303 if (flag_delete_null_pointer_checks
9304 && lookup_attribute ("returns_nonnull",
9305 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9307 return alloca_call_p (t
);
9316 /* Return true when T is an address and is known to be nonzero.
9317 Handle warnings about undefined signed overflow. */
9320 tree_expr_nonzero_p (tree t
)
9322 bool ret
, strict_overflow_p
;
9324 strict_overflow_p
= false;
9325 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9326 if (strict_overflow_p
)
9327 fold_overflow_warning (("assuming signed overflow does not occur when "
9328 "determining that expression is always "
9330 WARN_STRICT_OVERFLOW_MISC
);
9334 /* Fold a binary expression of code CODE and type TYPE with operands
9335 OP0 and OP1. LOC is the location of the resulting expression.
9336 Return the folded expression if folding is successful. Otherwise,
9337 return NULL_TREE. */
9340 fold_binary_loc (location_t loc
,
9341 enum tree_code code
, tree type
, tree op0
, tree op1
)
9343 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9344 tree arg0
, arg1
, tem
;
9345 tree t1
= NULL_TREE
;
9346 bool strict_overflow_p
;
9349 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9350 && TREE_CODE_LENGTH (code
) == 2
9352 && op1
!= NULL_TREE
);
9357 /* Strip any conversions that don't change the mode. This is
9358 safe for every expression, except for a comparison expression
9359 because its signedness is derived from its operands. So, in
9360 the latter case, only strip conversions that don't change the
9361 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9364 Note that this is done as an internal manipulation within the
9365 constant folder, in order to find the simplest representation
9366 of the arguments so that their form can be studied. In any
9367 cases, the appropriate type conversions should be put back in
9368 the tree that will get out of the constant folder. */
9370 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9372 STRIP_SIGN_NOPS (arg0
);
9373 STRIP_SIGN_NOPS (arg1
);
9381 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9382 constant but we can't do arithmetic on them. */
9383 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9385 tem
= const_binop (code
, type
, arg0
, arg1
);
9386 if (tem
!= NULL_TREE
)
9388 if (TREE_TYPE (tem
) != type
)
9389 tem
= fold_convert_loc (loc
, type
, tem
);
9394 /* If this is a commutative operation, and ARG0 is a constant, move it
9395 to ARG1 to reduce the number of tests below. */
9396 if (commutative_tree_code (code
)
9397 && tree_swap_operands_p (arg0
, arg1
, true))
9398 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9400 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9401 to ARG1 to reduce the number of tests below. */
9402 if (kind
== tcc_comparison
9403 && tree_swap_operands_p (arg0
, arg1
, true))
9404 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9406 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9410 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9412 First check for cases where an arithmetic operation is applied to a
9413 compound, conditional, or comparison operation. Push the arithmetic
9414 operation inside the compound or conditional to see if any folding
9415 can then be done. Convert comparison to conditional for this purpose.
9416 The also optimizes non-constant cases that used to be done in
9419 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9420 one of the operands is a comparison and the other is a comparison, a
9421 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9422 code below would make the expression more complex. Change it to a
9423 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9424 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9426 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9427 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9428 && TREE_CODE (type
) != VECTOR_TYPE
9429 && ((truth_value_p (TREE_CODE (arg0
))
9430 && (truth_value_p (TREE_CODE (arg1
))
9431 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9432 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9433 || (truth_value_p (TREE_CODE (arg1
))
9434 && (truth_value_p (TREE_CODE (arg0
))
9435 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9436 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9438 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9439 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9442 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9443 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9445 if (code
== EQ_EXPR
)
9446 tem
= invert_truthvalue_loc (loc
, tem
);
9448 return fold_convert_loc (loc
, type
, tem
);
9451 if (TREE_CODE_CLASS (code
) == tcc_binary
9452 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9454 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9456 tem
= fold_build2_loc (loc
, code
, type
,
9457 fold_convert_loc (loc
, TREE_TYPE (op0
),
9458 TREE_OPERAND (arg0
, 1)), op1
);
9459 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9462 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9463 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9465 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9466 fold_convert_loc (loc
, TREE_TYPE (op1
),
9467 TREE_OPERAND (arg1
, 1)));
9468 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9472 if (TREE_CODE (arg0
) == COND_EXPR
9473 || TREE_CODE (arg0
) == VEC_COND_EXPR
9474 || COMPARISON_CLASS_P (arg0
))
9476 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9478 /*cond_first_p=*/1);
9479 if (tem
!= NULL_TREE
)
9483 if (TREE_CODE (arg1
) == COND_EXPR
9484 || TREE_CODE (arg1
) == VEC_COND_EXPR
9485 || COMPARISON_CLASS_P (arg1
))
9487 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9489 /*cond_first_p=*/0);
9490 if (tem
!= NULL_TREE
)
9498 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9499 if (TREE_CODE (arg0
) == ADDR_EXPR
9500 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9502 tree iref
= TREE_OPERAND (arg0
, 0);
9503 return fold_build2 (MEM_REF
, type
,
9504 TREE_OPERAND (iref
, 0),
9505 int_const_binop (PLUS_EXPR
, arg1
,
9506 TREE_OPERAND (iref
, 1)));
9509 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9510 if (TREE_CODE (arg0
) == ADDR_EXPR
9511 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9514 HOST_WIDE_INT coffset
;
9515 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9519 return fold_build2 (MEM_REF
, type
,
9520 build_fold_addr_expr (base
),
9521 int_const_binop (PLUS_EXPR
, arg1
,
9522 size_int (coffset
)));
9527 case POINTER_PLUS_EXPR
:
9528 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9529 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9530 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9531 return fold_convert_loc (loc
, type
,
9532 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9533 fold_convert_loc (loc
, sizetype
,
9535 fold_convert_loc (loc
, sizetype
,
9541 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9543 /* X + (X / CST) * -CST is X % CST. */
9544 if (TREE_CODE (arg1
) == MULT_EXPR
9545 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9546 && operand_equal_p (arg0
,
9547 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9549 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9550 tree cst1
= TREE_OPERAND (arg1
, 1);
9551 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9553 if (sum
&& integer_zerop (sum
))
9554 return fold_convert_loc (loc
, type
,
9555 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9556 TREE_TYPE (arg0
), arg0
,
9561 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9562 one. Make sure the type is not saturating and has the signedness of
9563 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9564 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9565 if ((TREE_CODE (arg0
) == MULT_EXPR
9566 || TREE_CODE (arg1
) == MULT_EXPR
)
9567 && !TYPE_SATURATING (type
)
9568 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9569 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9570 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9572 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9577 if (! FLOAT_TYPE_P (type
))
9579 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9580 with a constant, and the two constants have no bits in common,
9581 we should treat this as a BIT_IOR_EXPR since this may produce more
9583 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9584 && TREE_CODE (arg1
) == BIT_AND_EXPR
9585 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9586 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9587 && wi::bit_and (TREE_OPERAND (arg0
, 1),
9588 TREE_OPERAND (arg1
, 1)) == 0)
9590 code
= BIT_IOR_EXPR
;
9594 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9595 (plus (plus (mult) (mult)) (foo)) so that we can
9596 take advantage of the factoring cases below. */
9597 if (ANY_INTEGRAL_TYPE_P (type
)
9598 && TYPE_OVERFLOW_WRAPS (type
)
9599 && (((TREE_CODE (arg0
) == PLUS_EXPR
9600 || TREE_CODE (arg0
) == MINUS_EXPR
)
9601 && TREE_CODE (arg1
) == MULT_EXPR
)
9602 || ((TREE_CODE (arg1
) == PLUS_EXPR
9603 || TREE_CODE (arg1
) == MINUS_EXPR
)
9604 && TREE_CODE (arg0
) == MULT_EXPR
)))
9606 tree parg0
, parg1
, parg
, marg
;
9607 enum tree_code pcode
;
9609 if (TREE_CODE (arg1
) == MULT_EXPR
)
9610 parg
= arg0
, marg
= arg1
;
9612 parg
= arg1
, marg
= arg0
;
9613 pcode
= TREE_CODE (parg
);
9614 parg0
= TREE_OPERAND (parg
, 0);
9615 parg1
= TREE_OPERAND (parg
, 1);
9619 if (TREE_CODE (parg0
) == MULT_EXPR
9620 && TREE_CODE (parg1
) != MULT_EXPR
)
9621 return fold_build2_loc (loc
, pcode
, type
,
9622 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9623 fold_convert_loc (loc
, type
,
9625 fold_convert_loc (loc
, type
,
9627 fold_convert_loc (loc
, type
, parg1
));
9628 if (TREE_CODE (parg0
) != MULT_EXPR
9629 && TREE_CODE (parg1
) == MULT_EXPR
)
9631 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9632 fold_convert_loc (loc
, type
, parg0
),
9633 fold_build2_loc (loc
, pcode
, type
,
9634 fold_convert_loc (loc
, type
, marg
),
9635 fold_convert_loc (loc
, type
,
9641 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9642 to __complex__ ( x, y ). This is not the same for SNaNs or
9643 if signed zeros are involved. */
9644 if (!HONOR_SNANS (element_mode (arg0
))
9645 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9646 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9648 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9649 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9650 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9651 bool arg0rz
= false, arg0iz
= false;
9652 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9653 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9655 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9656 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9657 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9659 tree rp
= arg1r
? arg1r
9660 : build1 (REALPART_EXPR
, rtype
, arg1
);
9661 tree ip
= arg0i
? arg0i
9662 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9663 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9665 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9667 tree rp
= arg0r
? arg0r
9668 : build1 (REALPART_EXPR
, rtype
, arg0
);
9669 tree ip
= arg1i
? arg1i
9670 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9671 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9676 if (flag_unsafe_math_optimizations
9677 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9678 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9679 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9682 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9683 We associate floats only if the user has specified
9684 -fassociative-math. */
9685 if (flag_associative_math
9686 && TREE_CODE (arg1
) == PLUS_EXPR
9687 && TREE_CODE (arg0
) != MULT_EXPR
)
9689 tree tree10
= TREE_OPERAND (arg1
, 0);
9690 tree tree11
= TREE_OPERAND (arg1
, 1);
9691 if (TREE_CODE (tree11
) == MULT_EXPR
9692 && TREE_CODE (tree10
) == MULT_EXPR
)
9695 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9696 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9699 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9700 We associate floats only if the user has specified
9701 -fassociative-math. */
9702 if (flag_associative_math
9703 && TREE_CODE (arg0
) == PLUS_EXPR
9704 && TREE_CODE (arg1
) != MULT_EXPR
)
9706 tree tree00
= TREE_OPERAND (arg0
, 0);
9707 tree tree01
= TREE_OPERAND (arg0
, 1);
9708 if (TREE_CODE (tree01
) == MULT_EXPR
9709 && TREE_CODE (tree00
) == MULT_EXPR
)
9712 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9713 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9719 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9720 is a rotate of A by C1 bits. */
9721 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9722 is a rotate of A by B bits. */
9724 enum tree_code code0
, code1
;
9726 code0
= TREE_CODE (arg0
);
9727 code1
= TREE_CODE (arg1
);
9728 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9729 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9730 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9731 TREE_OPERAND (arg1
, 0), 0)
9732 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9733 TYPE_UNSIGNED (rtype
))
9734 /* Only create rotates in complete modes. Other cases are not
9735 expanded properly. */
9736 && (element_precision (rtype
)
9737 == element_precision (TYPE_MODE (rtype
))))
9739 tree tree01
, tree11
;
9740 enum tree_code code01
, code11
;
9742 tree01
= TREE_OPERAND (arg0
, 1);
9743 tree11
= TREE_OPERAND (arg1
, 1);
9744 STRIP_NOPS (tree01
);
9745 STRIP_NOPS (tree11
);
9746 code01
= TREE_CODE (tree01
);
9747 code11
= TREE_CODE (tree11
);
9748 if (code01
== INTEGER_CST
9749 && code11
== INTEGER_CST
9750 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9751 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9753 tem
= build2_loc (loc
, LROTATE_EXPR
,
9754 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9755 TREE_OPERAND (arg0
, 0),
9756 code0
== LSHIFT_EXPR
9757 ? TREE_OPERAND (arg0
, 1)
9758 : TREE_OPERAND (arg1
, 1));
9759 return fold_convert_loc (loc
, type
, tem
);
9761 else if (code11
== MINUS_EXPR
)
9763 tree tree110
, tree111
;
9764 tree110
= TREE_OPERAND (tree11
, 0);
9765 tree111
= TREE_OPERAND (tree11
, 1);
9766 STRIP_NOPS (tree110
);
9767 STRIP_NOPS (tree111
);
9768 if (TREE_CODE (tree110
) == INTEGER_CST
9769 && 0 == compare_tree_int (tree110
,
9771 (TREE_TYPE (TREE_OPERAND
9773 && operand_equal_p (tree01
, tree111
, 0))
9775 fold_convert_loc (loc
, type
,
9776 build2 ((code0
== LSHIFT_EXPR
9779 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9780 TREE_OPERAND (arg0
, 0),
9781 TREE_OPERAND (arg0
, 1)));
9783 else if (code01
== MINUS_EXPR
)
9785 tree tree010
, tree011
;
9786 tree010
= TREE_OPERAND (tree01
, 0);
9787 tree011
= TREE_OPERAND (tree01
, 1);
9788 STRIP_NOPS (tree010
);
9789 STRIP_NOPS (tree011
);
9790 if (TREE_CODE (tree010
) == INTEGER_CST
9791 && 0 == compare_tree_int (tree010
,
9793 (TREE_TYPE (TREE_OPERAND
9795 && operand_equal_p (tree11
, tree011
, 0))
9796 return fold_convert_loc
9798 build2 ((code0
!= LSHIFT_EXPR
9801 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9802 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9808 /* In most languages, can't associate operations on floats through
9809 parentheses. Rather than remember where the parentheses were, we
9810 don't associate floats at all, unless the user has specified
9812 And, we need to make sure type is not saturating. */
9814 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9815 && !TYPE_SATURATING (type
))
9817 tree var0
, con0
, lit0
, minus_lit0
;
9818 tree var1
, con1
, lit1
, minus_lit1
;
9822 /* Split both trees into variables, constants, and literals. Then
9823 associate each group together, the constants with literals,
9824 then the result with variables. This increases the chances of
9825 literals being recombined later and of generating relocatable
9826 expressions for the sum of a constant and literal. */
9827 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9828 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9829 code
== MINUS_EXPR
);
9831 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9832 if (code
== MINUS_EXPR
)
9835 /* With undefined overflow prefer doing association in a type
9836 which wraps on overflow, if that is one of the operand types. */
9837 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9838 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9840 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9841 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9842 atype
= TREE_TYPE (arg0
);
9843 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9844 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9845 atype
= TREE_TYPE (arg1
);
9846 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9849 /* With undefined overflow we can only associate constants with one
9850 variable, and constants whose association doesn't overflow. */
9851 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9852 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9859 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9860 tmp0
= TREE_OPERAND (tmp0
, 0);
9861 if (CONVERT_EXPR_P (tmp0
)
9862 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9863 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9864 <= TYPE_PRECISION (atype
)))
9865 tmp0
= TREE_OPERAND (tmp0
, 0);
9866 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9867 tmp1
= TREE_OPERAND (tmp1
, 0);
9868 if (CONVERT_EXPR_P (tmp1
)
9869 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9870 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9871 <= TYPE_PRECISION (atype
)))
9872 tmp1
= TREE_OPERAND (tmp1
, 0);
9873 /* The only case we can still associate with two variables
9874 is if they are the same, modulo negation and bit-pattern
9875 preserving conversions. */
9876 if (!operand_equal_p (tmp0
, tmp1
, 0))
9881 /* Only do something if we found more than two objects. Otherwise,
9882 nothing has changed and we risk infinite recursion. */
9884 && (2 < ((var0
!= 0) + (var1
!= 0)
9885 + (con0
!= 0) + (con1
!= 0)
9886 + (lit0
!= 0) + (lit1
!= 0)
9887 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9889 bool any_overflows
= false;
9890 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9891 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9892 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9893 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9894 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9895 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9896 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9897 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9900 /* Preserve the MINUS_EXPR if the negative part of the literal is
9901 greater than the positive part. Otherwise, the multiplicative
9902 folding code (i.e extract_muldiv) may be fooled in case
9903 unsigned constants are subtracted, like in the following
9904 example: ((X*2 + 4) - 8U)/2. */
9905 if (minus_lit0
&& lit0
)
9907 if (TREE_CODE (lit0
) == INTEGER_CST
9908 && TREE_CODE (minus_lit0
) == INTEGER_CST
9909 && tree_int_cst_lt (lit0
, minus_lit0
))
9911 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9917 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9923 /* Don't introduce overflows through reassociation. */
9925 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9926 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9933 fold_convert_loc (loc
, type
,
9934 associate_trees (loc
, var0
, minus_lit0
,
9935 MINUS_EXPR
, atype
));
9938 con0
= associate_trees (loc
, con0
, minus_lit0
,
9941 fold_convert_loc (loc
, type
,
9942 associate_trees (loc
, var0
, con0
,
9947 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9949 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9957 /* Pointer simplifications for subtraction, simple reassociations. */
9958 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
9960 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9961 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9962 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9964 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9965 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
9966 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
9967 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
9968 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9969 fold_build2_loc (loc
, MINUS_EXPR
, type
,
9971 fold_build2_loc (loc
, MINUS_EXPR
, type
,
9974 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9975 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9977 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9978 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
9979 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
9980 fold_convert_loc (loc
, type
, arg1
));
9982 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
9984 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9986 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9988 tree arg10
= fold_convert_loc (loc
, type
,
9989 TREE_OPERAND (arg1
, 0));
9990 tree arg11
= fold_convert_loc (loc
, type
,
9991 TREE_OPERAND (arg1
, 1));
9992 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9993 fold_convert_loc (loc
, type
, arg0
),
9996 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
9999 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10000 if (TREE_CODE (arg0
) == NEGATE_EXPR
10001 && negate_expr_p (arg1
)
10002 && reorder_operands_p (arg0
, arg1
))
10003 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10004 fold_convert_loc (loc
, type
,
10005 negate_expr (arg1
)),
10006 fold_convert_loc (loc
, type
,
10007 TREE_OPERAND (arg0
, 0)));
10009 if (! FLOAT_TYPE_P (type
))
10011 /* Fold A - (A & B) into ~B & A. */
10012 if (!TREE_SIDE_EFFECTS (arg0
)
10013 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10015 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10017 tree arg10
= fold_convert_loc (loc
, type
,
10018 TREE_OPERAND (arg1
, 0));
10019 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10020 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10022 fold_convert_loc (loc
, type
, arg0
));
10024 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10026 tree arg11
= fold_convert_loc (loc
,
10027 type
, TREE_OPERAND (arg1
, 1));
10028 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10029 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10031 fold_convert_loc (loc
, type
, arg0
));
10035 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10036 any power of 2 minus 1. */
10037 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10038 && TREE_CODE (arg1
) == BIT_AND_EXPR
10039 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10040 TREE_OPERAND (arg1
, 0), 0))
10042 tree mask0
= TREE_OPERAND (arg0
, 1);
10043 tree mask1
= TREE_OPERAND (arg1
, 1);
10044 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10046 if (operand_equal_p (tem
, mask1
, 0))
10048 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10049 TREE_OPERAND (arg0
, 0), mask1
);
10050 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10055 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10056 __complex__ ( x, -y ). This is not the same for SNaNs or if
10057 signed zeros are involved. */
10058 if (!HONOR_SNANS (element_mode (arg0
))
10059 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10060 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10062 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10063 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10064 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10065 bool arg0rz
= false, arg0iz
= false;
10066 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10067 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10069 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10070 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10071 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10073 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10075 : build1 (REALPART_EXPR
, rtype
, arg1
));
10076 tree ip
= arg0i
? arg0i
10077 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10078 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10080 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10082 tree rp
= arg0r
? arg0r
10083 : build1 (REALPART_EXPR
, rtype
, arg0
);
10084 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10086 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10087 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10092 /* A - B -> A + (-B) if B is easily negatable. */
10093 if (negate_expr_p (arg1
)
10094 && !TYPE_OVERFLOW_SANITIZED (type
)
10095 && ((FLOAT_TYPE_P (type
)
10096 /* Avoid this transformation if B is a positive REAL_CST. */
10097 && (TREE_CODE (arg1
) != REAL_CST
10098 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10099 || INTEGRAL_TYPE_P (type
)))
10100 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10101 fold_convert_loc (loc
, type
, arg0
),
10102 fold_convert_loc (loc
, type
,
10103 negate_expr (arg1
)));
10105 /* Fold &a[i] - &a[j] to i-j. */
10106 if (TREE_CODE (arg0
) == ADDR_EXPR
10107 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10108 && TREE_CODE (arg1
) == ADDR_EXPR
10109 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10111 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10112 TREE_OPERAND (arg0
, 0),
10113 TREE_OPERAND (arg1
, 0));
10118 if (FLOAT_TYPE_P (type
)
10119 && flag_unsafe_math_optimizations
10120 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10121 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10122 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10125 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10126 one. Make sure the type is not saturating and has the signedness of
10127 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10128 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10129 if ((TREE_CODE (arg0
) == MULT_EXPR
10130 || TREE_CODE (arg1
) == MULT_EXPR
)
10131 && !TYPE_SATURATING (type
)
10132 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10133 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10134 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10136 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10144 /* (-A) * (-B) -> A * B */
10145 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10146 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10147 fold_convert_loc (loc
, type
,
10148 TREE_OPERAND (arg0
, 0)),
10149 fold_convert_loc (loc
, type
,
10150 negate_expr (arg1
)));
10151 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10152 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10153 fold_convert_loc (loc
, type
,
10154 negate_expr (arg0
)),
10155 fold_convert_loc (loc
, type
,
10156 TREE_OPERAND (arg1
, 0)));
10158 if (! FLOAT_TYPE_P (type
))
10160 /* Transform x * -C into -x * C if x is easily negatable. */
10161 if (TREE_CODE (arg1
) == INTEGER_CST
10162 && tree_int_cst_sgn (arg1
) == -1
10163 && negate_expr_p (arg0
)
10164 && (tem
= negate_expr (arg1
)) != arg1
10165 && !TREE_OVERFLOW (tem
))
10166 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10167 fold_convert_loc (loc
, type
,
10168 negate_expr (arg0
)),
10171 /* (a * (1 << b)) is (a << b) */
10172 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10173 && integer_onep (TREE_OPERAND (arg1
, 0)))
10174 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10175 TREE_OPERAND (arg1
, 1));
10176 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10177 && integer_onep (TREE_OPERAND (arg0
, 0)))
10178 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10179 TREE_OPERAND (arg0
, 1));
10181 /* (A + A) * C -> A * 2 * C */
10182 if (TREE_CODE (arg0
) == PLUS_EXPR
10183 && TREE_CODE (arg1
) == INTEGER_CST
10184 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10185 TREE_OPERAND (arg0
, 1), 0))
10186 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10187 omit_one_operand_loc (loc
, type
,
10188 TREE_OPERAND (arg0
, 0),
10189 TREE_OPERAND (arg0
, 1)),
10190 fold_build2_loc (loc
, MULT_EXPR
, type
,
10191 build_int_cst (type
, 2) , arg1
));
10193 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10194 sign-changing only. */
10195 if (TREE_CODE (arg1
) == INTEGER_CST
10196 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10197 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10198 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10200 strict_overflow_p
= false;
10201 if (TREE_CODE (arg1
) == INTEGER_CST
10202 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10203 &strict_overflow_p
)))
10205 if (strict_overflow_p
)
10206 fold_overflow_warning (("assuming signed overflow does not "
10207 "occur when simplifying "
10209 WARN_STRICT_OVERFLOW_MISC
);
10210 return fold_convert_loc (loc
, type
, tem
);
10213 /* Optimize z * conj(z) for integer complex numbers. */
10214 if (TREE_CODE (arg0
) == CONJ_EXPR
10215 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10216 return fold_mult_zconjz (loc
, type
, arg1
);
10217 if (TREE_CODE (arg1
) == CONJ_EXPR
10218 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10219 return fold_mult_zconjz (loc
, type
, arg0
);
10223 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10224 the result for floating point types due to rounding so it is applied
10225 only if -fassociative-math was specify. */
10226 if (flag_associative_math
10227 && TREE_CODE (arg0
) == RDIV_EXPR
10228 && TREE_CODE (arg1
) == REAL_CST
10229 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10231 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10234 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10235 TREE_OPERAND (arg0
, 1));
10238 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10239 if (operand_equal_p (arg0
, arg1
, 0))
10241 tree tem
= fold_strip_sign_ops (arg0
);
10242 if (tem
!= NULL_TREE
)
10244 tem
= fold_convert_loc (loc
, type
, tem
);
10245 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10249 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10250 This is not the same for NaNs or if signed zeros are
10252 if (!HONOR_NANS (arg0
)
10253 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10254 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10255 && TREE_CODE (arg1
) == COMPLEX_CST
10256 && real_zerop (TREE_REALPART (arg1
)))
10258 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10259 if (real_onep (TREE_IMAGPART (arg1
)))
10261 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10262 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10264 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10265 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10267 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10268 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10269 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10273 /* Optimize z * conj(z) for floating point complex numbers.
10274 Guarded by flag_unsafe_math_optimizations as non-finite
10275 imaginary components don't produce scalar results. */
10276 if (flag_unsafe_math_optimizations
10277 && TREE_CODE (arg0
) == CONJ_EXPR
10278 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10279 return fold_mult_zconjz (loc
, type
, arg1
);
10280 if (flag_unsafe_math_optimizations
10281 && TREE_CODE (arg1
) == CONJ_EXPR
10282 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10283 return fold_mult_zconjz (loc
, type
, arg0
);
10285 if (flag_unsafe_math_optimizations
)
10287 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10288 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10290 /* Optimizations of root(...)*root(...). */
10291 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10294 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10295 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10297 /* Optimize sqrt(x)*sqrt(x) as x. */
10298 if (BUILTIN_SQRT_P (fcode0
)
10299 && operand_equal_p (arg00
, arg10
, 0)
10300 && ! HONOR_SNANS (element_mode (type
)))
10303 /* Optimize root(x)*root(y) as root(x*y). */
10304 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10305 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10306 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10309 /* Optimize expN(x)*expN(y) as expN(x+y). */
10310 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10312 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10313 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10314 CALL_EXPR_ARG (arg0
, 0),
10315 CALL_EXPR_ARG (arg1
, 0));
10316 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10319 /* Optimizations of pow(...)*pow(...). */
10320 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10321 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10322 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10324 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10325 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10326 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10327 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10329 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10330 if (operand_equal_p (arg01
, arg11
, 0))
10332 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10333 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10335 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10338 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10339 if (operand_equal_p (arg00
, arg10
, 0))
10341 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10342 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10344 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10348 /* Optimize tan(x)*cos(x) as sin(x). */
10349 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10350 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10351 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10352 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10353 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10354 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10355 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10356 CALL_EXPR_ARG (arg1
, 0), 0))
10358 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10360 if (sinfn
!= NULL_TREE
)
10361 return build_call_expr_loc (loc
, sinfn
, 1,
10362 CALL_EXPR_ARG (arg0
, 0));
10365 /* Optimize x*pow(x,c) as pow(x,c+1). */
10366 if (fcode1
== BUILT_IN_POW
10367 || fcode1
== BUILT_IN_POWF
10368 || fcode1
== BUILT_IN_POWL
)
10370 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10371 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10372 if (TREE_CODE (arg11
) == REAL_CST
10373 && !TREE_OVERFLOW (arg11
)
10374 && operand_equal_p (arg0
, arg10
, 0))
10376 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10380 c
= TREE_REAL_CST (arg11
);
10381 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10382 arg
= build_real (type
, c
);
10383 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10387 /* Optimize pow(x,c)*x as pow(x,c+1). */
10388 if (fcode0
== BUILT_IN_POW
10389 || fcode0
== BUILT_IN_POWF
10390 || fcode0
== BUILT_IN_POWL
)
10392 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10393 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10394 if (TREE_CODE (arg01
) == REAL_CST
10395 && !TREE_OVERFLOW (arg01
)
10396 && operand_equal_p (arg1
, arg00
, 0))
10398 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10402 c
= TREE_REAL_CST (arg01
);
10403 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10404 arg
= build_real (type
, c
);
10405 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10409 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10410 if (!in_gimple_form
10412 && operand_equal_p (arg0
, arg1
, 0))
10414 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10418 tree arg
= build_real (type
, dconst2
);
10419 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10428 /* Canonicalize (X & C1) | C2. */
10429 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10430 && TREE_CODE (arg1
) == INTEGER_CST
10431 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10433 int width
= TYPE_PRECISION (type
), w
;
10434 wide_int c1
= TREE_OPERAND (arg0
, 1);
10435 wide_int c2
= arg1
;
10437 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10438 if ((c1
& c2
) == c1
)
10439 return omit_one_operand_loc (loc
, type
, arg1
,
10440 TREE_OPERAND (arg0
, 0));
10442 wide_int msk
= wi::mask (width
, false,
10443 TYPE_PRECISION (TREE_TYPE (arg1
)));
10445 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10446 if (msk
.and_not (c1
| c2
) == 0)
10447 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10448 TREE_OPERAND (arg0
, 0), arg1
);
10450 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10451 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10452 mode which allows further optimizations. */
10455 wide_int c3
= c1
.and_not (c2
);
10456 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10458 wide_int mask
= wi::mask (w
, false,
10459 TYPE_PRECISION (type
));
10460 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10468 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10469 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10470 TREE_OPERAND (arg0
, 0),
10471 wide_int_to_tree (type
,
10476 /* (X & ~Y) | (~X & Y) is X ^ Y */
10477 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10478 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10480 tree a0
, a1
, l0
, l1
, n0
, n1
;
10482 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10483 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10485 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10486 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10488 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10489 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10491 if ((operand_equal_p (n0
, a0
, 0)
10492 && operand_equal_p (n1
, a1
, 0))
10493 || (operand_equal_p (n0
, a1
, 0)
10494 && operand_equal_p (n1
, a0
, 0)))
10495 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10498 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10499 if (t1
!= NULL_TREE
)
10502 /* See if this can be simplified into a rotate first. If that
10503 is unsuccessful continue in the association code. */
10507 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10508 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10509 && INTEGRAL_TYPE_P (type
)
10510 && integer_onep (TREE_OPERAND (arg0
, 1))
10511 && integer_onep (arg1
))
10512 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10513 build_zero_cst (TREE_TYPE (arg0
)));
10515 /* See if this can be simplified into a rotate first. If that
10516 is unsuccessful continue in the association code. */
10520 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10521 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
10522 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10523 || (TREE_CODE (arg0
) == EQ_EXPR
10524 && integer_zerop (TREE_OPERAND (arg0
, 1))))
10525 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10526 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10528 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10529 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
10530 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10531 || (TREE_CODE (arg1
) == EQ_EXPR
10532 && integer_zerop (TREE_OPERAND (arg1
, 1))))
10533 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10534 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10536 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10537 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10538 && INTEGRAL_TYPE_P (type
)
10539 && integer_onep (TREE_OPERAND (arg0
, 1))
10540 && integer_onep (arg1
))
10543 tem
= TREE_OPERAND (arg0
, 0);
10544 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10545 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10547 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10548 build_zero_cst (TREE_TYPE (tem
)));
10550 /* Fold ~X & 1 as (X & 1) == 0. */
10551 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10552 && INTEGRAL_TYPE_P (type
)
10553 && integer_onep (arg1
))
10556 tem
= TREE_OPERAND (arg0
, 0);
10557 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10558 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10560 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10561 build_zero_cst (TREE_TYPE (tem
)));
10563 /* Fold !X & 1 as X == 0. */
10564 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10565 && integer_onep (arg1
))
10567 tem
= TREE_OPERAND (arg0
, 0);
10568 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
10569 build_zero_cst (TREE_TYPE (tem
)));
10572 /* Fold (X ^ Y) & Y as ~X & Y. */
10573 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10574 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10576 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10577 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10578 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10579 fold_convert_loc (loc
, type
, arg1
));
10581 /* Fold (X ^ Y) & X as ~Y & X. */
10582 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10583 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10584 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10586 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10587 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10588 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10589 fold_convert_loc (loc
, type
, arg1
));
10591 /* Fold X & (X ^ Y) as X & ~Y. */
10592 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10593 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10595 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10596 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10597 fold_convert_loc (loc
, type
, arg0
),
10598 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10600 /* Fold X & (Y ^ X) as ~Y & X. */
10601 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10602 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10603 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10605 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10606 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10607 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10608 fold_convert_loc (loc
, type
, arg0
));
10611 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10612 multiple of 1 << CST. */
10613 if (TREE_CODE (arg1
) == INTEGER_CST
)
10615 wide_int cst1
= arg1
;
10616 wide_int ncst1
= -cst1
;
10617 if ((cst1
& ncst1
) == ncst1
10618 && multiple_of_p (type
, arg0
,
10619 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10620 return fold_convert_loc (loc
, type
, arg0
);
10623 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10625 if (TREE_CODE (arg1
) == INTEGER_CST
10626 && TREE_CODE (arg0
) == MULT_EXPR
10627 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10629 wide_int warg1
= arg1
;
10630 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10633 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10635 else if (masked
!= warg1
)
10637 /* Avoid the transform if arg1 is a mask of some
10638 mode which allows further optimizations. */
10639 int pop
= wi::popcount (warg1
);
10640 if (!(pop
>= BITS_PER_UNIT
10641 && exact_log2 (pop
) != -1
10642 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10643 return fold_build2_loc (loc
, code
, type
, op0
,
10644 wide_int_to_tree (type
, masked
));
10648 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10649 ((A & N) + B) & M -> (A + B) & M
10650 Similarly if (N & M) == 0,
10651 ((A | N) + B) & M -> (A + B) & M
10652 and for - instead of + (or unary - instead of +)
10653 and/or ^ instead of |.
10654 If B is constant and (B & M) == 0, fold into A & M. */
10655 if (TREE_CODE (arg1
) == INTEGER_CST
)
10657 wide_int cst1
= arg1
;
10658 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10659 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10660 && (TREE_CODE (arg0
) == PLUS_EXPR
10661 || TREE_CODE (arg0
) == MINUS_EXPR
10662 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10663 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10664 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10670 /* Now we know that arg0 is (C + D) or (C - D) or
10671 -C and arg1 (M) is == (1LL << cst) - 1.
10672 Store C into PMOP[0] and D into PMOP[1]. */
10673 pmop
[0] = TREE_OPERAND (arg0
, 0);
10675 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10677 pmop
[1] = TREE_OPERAND (arg0
, 1);
10681 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10684 for (; which
>= 0; which
--)
10685 switch (TREE_CODE (pmop
[which
]))
10690 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10693 cst0
= TREE_OPERAND (pmop
[which
], 1);
10695 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10700 else if (cst0
!= 0)
10702 /* If C or D is of the form (A & N) where
10703 (N & M) == M, or of the form (A | N) or
10704 (A ^ N) where (N & M) == 0, replace it with A. */
10705 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10708 /* If C or D is a N where (N & M) == 0, it can be
10709 omitted (assumed 0). */
10710 if ((TREE_CODE (arg0
) == PLUS_EXPR
10711 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10712 && (cst1
& pmop
[which
]) == 0)
10713 pmop
[which
] = NULL
;
10719 /* Only build anything new if we optimized one or both arguments
10721 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10722 || (TREE_CODE (arg0
) != NEGATE_EXPR
10723 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10725 tree utype
= TREE_TYPE (arg0
);
10726 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10728 /* Perform the operations in a type that has defined
10729 overflow behavior. */
10730 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10731 if (pmop
[0] != NULL
)
10732 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10733 if (pmop
[1] != NULL
)
10734 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10737 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10738 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10739 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10741 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10742 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10744 else if (pmop
[0] != NULL
)
10746 else if (pmop
[1] != NULL
)
10749 return build_int_cst (type
, 0);
10751 else if (pmop
[0] == NULL
)
10752 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10754 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10756 /* TEM is now the new binary +, - or unary - replacement. */
10757 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10758 fold_convert_loc (loc
, utype
, arg1
));
10759 return fold_convert_loc (loc
, type
, tem
);
10764 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10765 if (t1
!= NULL_TREE
)
10767 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10768 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10769 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10771 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10773 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10776 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10782 /* Don't touch a floating-point divide by zero unless the mode
10783 of the constant can represent infinity. */
10784 if (TREE_CODE (arg1
) == REAL_CST
10785 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10786 && real_zerop (arg1
))
10789 /* (-A) / (-B) -> A / B */
10790 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10791 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10792 TREE_OPERAND (arg0
, 0),
10793 negate_expr (arg1
));
10794 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10795 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10796 negate_expr (arg0
),
10797 TREE_OPERAND (arg1
, 0));
10799 /* Convert A/B/C to A/(B*C). */
10800 if (flag_reciprocal_math
10801 && TREE_CODE (arg0
) == RDIV_EXPR
)
10802 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10803 fold_build2_loc (loc
, MULT_EXPR
, type
,
10804 TREE_OPERAND (arg0
, 1), arg1
));
10806 /* Convert A/(B/C) to (A/B)*C. */
10807 if (flag_reciprocal_math
10808 && TREE_CODE (arg1
) == RDIV_EXPR
)
10809 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10810 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
10811 TREE_OPERAND (arg1
, 0)),
10812 TREE_OPERAND (arg1
, 1));
10814 /* Convert C1/(X*C2) into (C1/C2)/X. */
10815 if (flag_reciprocal_math
10816 && TREE_CODE (arg1
) == MULT_EXPR
10817 && TREE_CODE (arg0
) == REAL_CST
10818 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10820 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10821 TREE_OPERAND (arg1
, 1));
10823 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10824 TREE_OPERAND (arg1
, 0));
10827 if (flag_unsafe_math_optimizations
)
10829 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10830 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10832 /* Optimize sin(x)/cos(x) as tan(x). */
10833 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10834 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10835 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10836 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10837 CALL_EXPR_ARG (arg1
, 0), 0))
10839 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10841 if (tanfn
!= NULL_TREE
)
10842 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10845 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10846 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10847 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10848 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10849 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10850 CALL_EXPR_ARG (arg1
, 0), 0))
10852 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10854 if (tanfn
!= NULL_TREE
)
10856 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
10857 CALL_EXPR_ARG (arg0
, 0));
10858 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10859 build_real (type
, dconst1
), tmp
);
10863 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10864 NaNs or Infinities. */
10865 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10866 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10867 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
10869 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10870 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10872 if (! HONOR_NANS (arg00
)
10873 && ! HONOR_INFINITIES (element_mode (arg00
))
10874 && operand_equal_p (arg00
, arg01
, 0))
10876 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10878 if (cosfn
!= NULL_TREE
)
10879 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
10883 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10884 NaNs or Infinities. */
10885 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
10886 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
10887 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
10889 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10890 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10892 if (! HONOR_NANS (arg00
)
10893 && ! HONOR_INFINITIES (element_mode (arg00
))
10894 && operand_equal_p (arg00
, arg01
, 0))
10896 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10898 if (cosfn
!= NULL_TREE
)
10900 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
10901 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10902 build_real (type
, dconst1
),
10908 /* Optimize pow(x,c)/x as pow(x,c-1). */
10909 if (fcode0
== BUILT_IN_POW
10910 || fcode0
== BUILT_IN_POWF
10911 || fcode0
== BUILT_IN_POWL
)
10913 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10914 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10915 if (TREE_CODE (arg01
) == REAL_CST
10916 && !TREE_OVERFLOW (arg01
)
10917 && operand_equal_p (arg1
, arg00
, 0))
10919 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10923 c
= TREE_REAL_CST (arg01
);
10924 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
10925 arg
= build_real (type
, c
);
10926 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10930 /* Optimize a/root(b/c) into a*root(c/b). */
10931 if (BUILTIN_ROOT_P (fcode1
))
10933 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
10935 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
10937 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10938 tree b
= TREE_OPERAND (rootarg
, 0);
10939 tree c
= TREE_OPERAND (rootarg
, 1);
10941 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
10943 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
10944 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
10948 /* Optimize x/expN(y) into x*expN(-y). */
10949 if (BUILTIN_EXPONENT_P (fcode1
))
10951 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10952 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
10953 arg1
= build_call_expr_loc (loc
,
10955 fold_convert_loc (loc
, type
, arg
));
10956 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
10959 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10960 if (fcode1
== BUILT_IN_POW
10961 || fcode1
== BUILT_IN_POWF
10962 || fcode1
== BUILT_IN_POWL
)
10964 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10965 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10966 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10967 tree neg11
= fold_convert_loc (loc
, type
,
10968 negate_expr (arg11
));
10969 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
10970 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
10975 case TRUNC_DIV_EXPR
:
10976 /* Optimize (X & (-A)) / A where A is a power of 2,
10978 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10979 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
10980 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
10982 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
10983 arg1
, TREE_OPERAND (arg0
, 1));
10984 if (sum
&& integer_zerop (sum
)) {
10985 tree pow2
= build_int_cst (integer_type_node
,
10986 wi::exact_log2 (arg1
));
10987 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10988 TREE_OPERAND (arg0
, 0), pow2
);
10994 case FLOOR_DIV_EXPR
:
10995 /* Simplify A / (B << N) where A and B are positive and B is
10996 a power of 2, to A >> (N + log2(B)). */
10997 strict_overflow_p
= false;
10998 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10999 && (TYPE_UNSIGNED (type
)
11000 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11002 tree sval
= TREE_OPERAND (arg1
, 0);
11003 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11005 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11006 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11007 wi::exact_log2 (sval
));
11009 if (strict_overflow_p
)
11010 fold_overflow_warning (("assuming signed overflow does not "
11011 "occur when simplifying A / (B << N)"),
11012 WARN_STRICT_OVERFLOW_MISC
);
11014 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11016 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11017 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11023 case ROUND_DIV_EXPR
:
11024 case CEIL_DIV_EXPR
:
11025 case EXACT_DIV_EXPR
:
11026 if (integer_zerop (arg1
))
11029 /* Convert -A / -B to A / B when the type is signed and overflow is
11031 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11032 && TREE_CODE (arg0
) == NEGATE_EXPR
11033 && negate_expr_p (arg1
))
11035 if (INTEGRAL_TYPE_P (type
))
11036 fold_overflow_warning (("assuming signed overflow does not occur "
11037 "when distributing negation across "
11039 WARN_STRICT_OVERFLOW_MISC
);
11040 return fold_build2_loc (loc
, code
, type
,
11041 fold_convert_loc (loc
, type
,
11042 TREE_OPERAND (arg0
, 0)),
11043 fold_convert_loc (loc
, type
,
11044 negate_expr (arg1
)));
11046 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11047 && TREE_CODE (arg1
) == NEGATE_EXPR
11048 && negate_expr_p (arg0
))
11050 if (INTEGRAL_TYPE_P (type
))
11051 fold_overflow_warning (("assuming signed overflow does not occur "
11052 "when distributing negation across "
11054 WARN_STRICT_OVERFLOW_MISC
);
11055 return fold_build2_loc (loc
, code
, type
,
11056 fold_convert_loc (loc
, type
,
11057 negate_expr (arg0
)),
11058 fold_convert_loc (loc
, type
,
11059 TREE_OPERAND (arg1
, 0)));
11062 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11063 operation, EXACT_DIV_EXPR.
11065 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11066 At one time others generated faster code, it's not clear if they do
11067 after the last round to changes to the DIV code in expmed.c. */
11068 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11069 && multiple_of_p (type
, arg0
, arg1
))
11070 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11072 strict_overflow_p
= false;
11073 if (TREE_CODE (arg1
) == INTEGER_CST
11074 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11075 &strict_overflow_p
)))
11077 if (strict_overflow_p
)
11078 fold_overflow_warning (("assuming signed overflow does not occur "
11079 "when simplifying division"),
11080 WARN_STRICT_OVERFLOW_MISC
);
11081 return fold_convert_loc (loc
, type
, tem
);
11086 case CEIL_MOD_EXPR
:
11087 case FLOOR_MOD_EXPR
:
11088 case ROUND_MOD_EXPR
:
11089 case TRUNC_MOD_EXPR
:
11090 strict_overflow_p
= false;
11091 if (TREE_CODE (arg1
) == INTEGER_CST
11092 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11093 &strict_overflow_p
)))
11095 if (strict_overflow_p
)
11096 fold_overflow_warning (("assuming signed overflow does not occur "
11097 "when simplifying modulus"),
11098 WARN_STRICT_OVERFLOW_MISC
);
11099 return fold_convert_loc (loc
, type
, tem
);
11108 /* Since negative shift count is not well-defined,
11109 don't try to compute it in the compiler. */
11110 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11113 prec
= element_precision (type
);
11115 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11116 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
11117 && tree_to_uhwi (arg1
) < prec
11118 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11119 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11121 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11122 + tree_to_uhwi (arg1
));
11124 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11125 being well defined. */
11128 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11130 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11131 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
11132 TREE_OPERAND (arg0
, 0));
11137 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11138 build_int_cst (TREE_TYPE (arg1
), low
));
11141 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11142 into x & ((unsigned)-1 >> c) for unsigned types. */
11143 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11144 || (TYPE_UNSIGNED (type
)
11145 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11146 && tree_fits_uhwi_p (arg1
)
11147 && tree_to_uhwi (arg1
) < prec
11148 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11149 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11151 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11152 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
11158 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11160 lshift
= build_minus_one_cst (type
);
11161 lshift
= const_binop (code
, lshift
, arg1
);
11163 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11167 /* If we have a rotate of a bit operation with the rotate count and
11168 the second operand of the bit operation both constant,
11169 permute the two operations. */
11170 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11171 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11172 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11173 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11174 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11175 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11176 fold_build2_loc (loc
, code
, type
,
11177 TREE_OPERAND (arg0
, 0), arg1
),
11178 fold_build2_loc (loc
, code
, type
,
11179 TREE_OPERAND (arg0
, 1), arg1
));
11181 /* Two consecutive rotates adding up to the some integer
11182 multiple of the precision of the type can be ignored. */
11183 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11184 && TREE_CODE (arg0
) == RROTATE_EXPR
11185 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11186 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
11188 return TREE_OPERAND (arg0
, 0);
11193 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11199 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11204 case TRUTH_ANDIF_EXPR
:
11205 /* Note that the operands of this must be ints
11206 and their values must be 0 or 1.
11207 ("true" is a fixed value perhaps depending on the language.) */
11208 /* If first arg is constant zero, return it. */
11209 if (integer_zerop (arg0
))
11210 return fold_convert_loc (loc
, type
, arg0
);
11211 case TRUTH_AND_EXPR
:
11212 /* If either arg is constant true, drop it. */
11213 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11214 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11215 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11216 /* Preserve sequence points. */
11217 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11218 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11219 /* If second arg is constant zero, result is zero, but first arg
11220 must be evaluated. */
11221 if (integer_zerop (arg1
))
11222 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11223 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11224 case will be handled here. */
11225 if (integer_zerop (arg0
))
11226 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11228 /* !X && X is always false. */
11229 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11230 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11231 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11232 /* X && !X is always false. */
11233 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11234 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11235 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11237 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11238 means A >= Y && A != MAX, but in this case we know that
11241 if (!TREE_SIDE_EFFECTS (arg0
)
11242 && !TREE_SIDE_EFFECTS (arg1
))
11244 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11245 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11246 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11248 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11249 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11250 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11253 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11259 case TRUTH_ORIF_EXPR
:
11260 /* Note that the operands of this must be ints
11261 and their values must be 0 or true.
11262 ("true" is a fixed value perhaps depending on the language.) */
11263 /* If first arg is constant true, return it. */
11264 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11265 return fold_convert_loc (loc
, type
, arg0
);
11266 case TRUTH_OR_EXPR
:
11267 /* If either arg is constant zero, drop it. */
11268 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11269 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11270 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11271 /* Preserve sequence points. */
11272 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11273 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11274 /* If second arg is constant true, result is true, but we must
11275 evaluate first arg. */
11276 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11277 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11278 /* Likewise for first arg, but note this only occurs here for
11280 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11281 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11283 /* !X || X is always true. */
11284 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11285 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11286 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11287 /* X || !X is always true. */
11288 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11289 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11290 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11292 /* (X && !Y) || (!X && Y) is X ^ Y */
11293 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
11294 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
11296 tree a0
, a1
, l0
, l1
, n0
, n1
;
11298 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11299 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11301 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11302 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11304 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
11305 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
11307 if ((operand_equal_p (n0
, a0
, 0)
11308 && operand_equal_p (n1
, a1
, 0))
11309 || (operand_equal_p (n0
, a1
, 0)
11310 && operand_equal_p (n1
, a0
, 0)))
11311 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
11314 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11320 case TRUTH_XOR_EXPR
:
11321 /* If the second arg is constant zero, drop it. */
11322 if (integer_zerop (arg1
))
11323 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11324 /* If the second arg is constant true, this is a logical inversion. */
11325 if (integer_onep (arg1
))
11327 tem
= invert_truthvalue_loc (loc
, arg0
);
11328 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
11330 /* Identical arguments cancel to zero. */
11331 if (operand_equal_p (arg0
, arg1
, 0))
11332 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11334 /* !X ^ X is always true. */
11335 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11336 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11337 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11339 /* X ^ !X is always true. */
11340 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11341 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11342 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11351 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11352 if (tem
!= NULL_TREE
)
11355 /* bool_var != 0 becomes bool_var. */
11356 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11357 && code
== NE_EXPR
)
11358 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11360 /* bool_var == 1 becomes bool_var. */
11361 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11362 && code
== EQ_EXPR
)
11363 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11365 /* bool_var != 1 becomes !bool_var. */
11366 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11367 && code
== NE_EXPR
)
11368 return fold_convert_loc (loc
, type
,
11369 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11370 TREE_TYPE (arg0
), arg0
));
11372 /* bool_var == 0 becomes !bool_var. */
11373 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11374 && code
== EQ_EXPR
)
11375 return fold_convert_loc (loc
, type
,
11376 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11377 TREE_TYPE (arg0
), arg0
));
11379 /* !exp != 0 becomes !exp */
11380 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
11381 && code
== NE_EXPR
)
11382 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11384 /* If this is an equality comparison of the address of two non-weak,
11385 unaliased symbols neither of which are extern (since we do not
11386 have access to attributes for externs), then we know the result. */
11387 if (TREE_CODE (arg0
) == ADDR_EXPR
11388 && DECL_P (TREE_OPERAND (arg0
, 0))
11389 && TREE_CODE (arg1
) == ADDR_EXPR
11390 && DECL_P (TREE_OPERAND (arg1
, 0)))
11394 if (decl_in_symtab_p (TREE_OPERAND (arg0
, 0))
11395 && decl_in_symtab_p (TREE_OPERAND (arg1
, 0)))
11396 equal
= symtab_node::get_create (TREE_OPERAND (arg0
, 0))
11397 ->equal_address_to (symtab_node::get_create
11398 (TREE_OPERAND (arg1
, 0)));
11400 equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11402 return constant_boolean_node (equal
11403 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11407 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11408 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11409 && TREE_CODE (arg1
) == INTEGER_CST
11410 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11411 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11412 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11413 fold_convert_loc (loc
,
11416 TREE_OPERAND (arg0
, 1)));
11418 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
11419 if ((TREE_CODE (arg0
) == PLUS_EXPR
11420 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
11421 || TREE_CODE (arg0
) == MINUS_EXPR
)
11422 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
11425 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11426 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11428 tree val
= TREE_OPERAND (arg0
, 1);
11429 return omit_two_operands_loc (loc
, type
,
11430 fold_build2_loc (loc
, code
, type
,
11432 build_int_cst (TREE_TYPE (val
),
11434 TREE_OPERAND (arg0
, 0), arg1
);
11437 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
11438 if (TREE_CODE (arg0
) == MINUS_EXPR
11439 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
11440 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
11443 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
11445 return omit_two_operands_loc (loc
, type
,
11447 ? boolean_true_node
: boolean_false_node
,
11448 TREE_OPERAND (arg0
, 1), arg1
);
11451 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11452 if (TREE_CODE (arg0
) == ABS_EXPR
11453 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11454 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11456 /* If this is an EQ or NE comparison with zero and ARG0 is
11457 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11458 two operations, but the latter can be done in one less insn
11459 on machines that have only two-operand insns or on which a
11460 constant cannot be the first operand. */
11461 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11462 && integer_zerop (arg1
))
11464 tree arg00
= TREE_OPERAND (arg0
, 0);
11465 tree arg01
= TREE_OPERAND (arg0
, 1);
11466 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11467 && integer_onep (TREE_OPERAND (arg00
, 0)))
11469 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
11470 arg01
, TREE_OPERAND (arg00
, 1));
11471 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11472 build_int_cst (TREE_TYPE (arg0
), 1));
11473 return fold_build2_loc (loc
, code
, type
,
11474 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11477 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11478 && integer_onep (TREE_OPERAND (arg01
, 0)))
11480 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
11481 arg00
, TREE_OPERAND (arg01
, 1));
11482 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11483 build_int_cst (TREE_TYPE (arg0
), 1));
11484 return fold_build2_loc (loc
, code
, type
,
11485 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11490 /* If this is an NE or EQ comparison of zero against the result of a
11491 signed MOD operation whose second operand is a power of 2, make
11492 the MOD operation unsigned since it is simpler and equivalent. */
11493 if (integer_zerop (arg1
)
11494 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11495 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11496 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11497 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11498 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11499 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11501 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
11502 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
11503 fold_convert_loc (loc
, newtype
,
11504 TREE_OPERAND (arg0
, 0)),
11505 fold_convert_loc (loc
, newtype
,
11506 TREE_OPERAND (arg0
, 1)));
11508 return fold_build2_loc (loc
, code
, type
, newmod
,
11509 fold_convert_loc (loc
, newtype
, arg1
));
11512 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11513 C1 is a valid shift constant, and C2 is a power of two, i.e.
11515 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11516 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11517 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11519 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11520 && integer_zerop (arg1
))
11522 tree itype
= TREE_TYPE (arg0
);
11523 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11524 prec
= TYPE_PRECISION (itype
);
11526 /* Check for a valid shift count. */
11527 if (wi::ltu_p (arg001
, prec
))
11529 tree arg01
= TREE_OPERAND (arg0
, 1);
11530 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11531 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11532 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11533 can be rewritten as (X & (C2 << C1)) != 0. */
11534 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11536 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
11537 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
11538 return fold_build2_loc (loc
, code
, type
, tem
,
11539 fold_convert_loc (loc
, itype
, arg1
));
11541 /* Otherwise, for signed (arithmetic) shifts,
11542 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11543 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11544 else if (!TYPE_UNSIGNED (itype
))
11545 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11546 arg000
, build_int_cst (itype
, 0));
11547 /* Otherwise, of unsigned (logical) shifts,
11548 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11549 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11551 return omit_one_operand_loc (loc
, type
,
11552 code
== EQ_EXPR
? integer_one_node
11553 : integer_zero_node
,
11558 /* If we have (A & C) == C where C is a power of 2, convert this into
11559 (A & C) != 0. Similarly for NE_EXPR. */
11560 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11561 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11562 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11563 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11564 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
11565 integer_zero_node
));
11567 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11568 bit, then fold the expression into A < 0 or A >= 0. */
11569 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
11573 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11574 Similarly for NE_EXPR. */
11575 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11576 && TREE_CODE (arg1
) == INTEGER_CST
11577 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11579 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
11580 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11581 TREE_OPERAND (arg0
, 1));
11583 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11584 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
11586 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11587 if (integer_nonzerop (dandnotc
))
11588 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
11591 /* If this is a comparison of a field, we may be able to simplify it. */
11592 if ((TREE_CODE (arg0
) == COMPONENT_REF
11593 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11594 /* Handle the constant case even without -O
11595 to make sure the warnings are given. */
11596 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11598 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
11603 /* Optimize comparisons of strlen vs zero to a compare of the
11604 first character of the string vs zero. To wit,
11605 strlen(ptr) == 0 => *ptr == 0
11606 strlen(ptr) != 0 => *ptr != 0
11607 Other cases should reduce to one of these two (or a constant)
11608 due to the return value of strlen being unsigned. */
11609 if (TREE_CODE (arg0
) == CALL_EXPR
11610 && integer_zerop (arg1
))
11612 tree fndecl
= get_callee_fndecl (arg0
);
11615 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11616 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11617 && call_expr_nargs (arg0
) == 1
11618 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
11620 tree iref
= build_fold_indirect_ref_loc (loc
,
11621 CALL_EXPR_ARG (arg0
, 0));
11622 return fold_build2_loc (loc
, code
, type
, iref
,
11623 build_int_cst (TREE_TYPE (iref
), 0));
11627 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11628 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11629 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11630 && integer_zerop (arg1
)
11631 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11633 tree arg00
= TREE_OPERAND (arg0
, 0);
11634 tree arg01
= TREE_OPERAND (arg0
, 1);
11635 tree itype
= TREE_TYPE (arg00
);
11636 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
11638 if (TYPE_UNSIGNED (itype
))
11640 itype
= signed_type_for (itype
);
11641 arg00
= fold_convert_loc (loc
, itype
, arg00
);
11643 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11644 type
, arg00
, build_zero_cst (itype
));
11648 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11649 (X & C) == 0 when C is a single bit. */
11650 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11651 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11652 && integer_zerop (arg1
)
11653 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11655 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11656 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11657 TREE_OPERAND (arg0
, 1));
11658 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11660 fold_convert_loc (loc
, TREE_TYPE (arg0
),
11664 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11665 constant C is a power of two, i.e. a single bit. */
11666 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11667 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11668 && integer_zerop (arg1
)
11669 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11670 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11671 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11673 tree arg00
= TREE_OPERAND (arg0
, 0);
11674 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11675 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11678 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11679 when is C is a power of two, i.e. a single bit. */
11680 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11681 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11682 && integer_zerop (arg1
)
11683 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11684 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11685 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11687 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11688 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
11689 arg000
, TREE_OPERAND (arg0
, 1));
11690 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11691 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11694 if (integer_zerop (arg1
)
11695 && tree_expr_nonzero_p (arg0
))
11697 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11698 return omit_one_operand_loc (loc
, type
, res
, arg0
);
11701 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11702 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11703 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11705 tree arg00
= TREE_OPERAND (arg0
, 0);
11706 tree arg01
= TREE_OPERAND (arg0
, 1);
11707 tree arg10
= TREE_OPERAND (arg1
, 0);
11708 tree arg11
= TREE_OPERAND (arg1
, 1);
11709 tree itype
= TREE_TYPE (arg0
);
11711 if (operand_equal_p (arg01
, arg11
, 0))
11712 return fold_build2_loc (loc
, code
, type
,
11713 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11714 fold_build2_loc (loc
,
11715 BIT_XOR_EXPR
, itype
,
11718 build_zero_cst (itype
));
11720 if (operand_equal_p (arg01
, arg10
, 0))
11721 return fold_build2_loc (loc
, code
, type
,
11722 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11723 fold_build2_loc (loc
,
11724 BIT_XOR_EXPR
, itype
,
11727 build_zero_cst (itype
));
11729 if (operand_equal_p (arg00
, arg11
, 0))
11730 return fold_build2_loc (loc
, code
, type
,
11731 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11732 fold_build2_loc (loc
,
11733 BIT_XOR_EXPR
, itype
,
11736 build_zero_cst (itype
));
11738 if (operand_equal_p (arg00
, arg10
, 0))
11739 return fold_build2_loc (loc
, code
, type
,
11740 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11741 fold_build2_loc (loc
,
11742 BIT_XOR_EXPR
, itype
,
11745 build_zero_cst (itype
));
11748 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11749 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11751 tree arg00
= TREE_OPERAND (arg0
, 0);
11752 tree arg01
= TREE_OPERAND (arg0
, 1);
11753 tree arg10
= TREE_OPERAND (arg1
, 0);
11754 tree arg11
= TREE_OPERAND (arg1
, 1);
11755 tree itype
= TREE_TYPE (arg0
);
11757 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11758 operand_equal_p guarantees no side-effects so we don't need
11759 to use omit_one_operand on Z. */
11760 if (operand_equal_p (arg01
, arg11
, 0))
11761 return fold_build2_loc (loc
, code
, type
, arg00
,
11762 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11764 if (operand_equal_p (arg01
, arg10
, 0))
11765 return fold_build2_loc (loc
, code
, type
, arg00
,
11766 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11768 if (operand_equal_p (arg00
, arg11
, 0))
11769 return fold_build2_loc (loc
, code
, type
, arg01
,
11770 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11772 if (operand_equal_p (arg00
, arg10
, 0))
11773 return fold_build2_loc (loc
, code
, type
, arg01
,
11774 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11777 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11778 if (TREE_CODE (arg01
) == INTEGER_CST
11779 && TREE_CODE (arg11
) == INTEGER_CST
)
11781 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
11782 fold_convert_loc (loc
, itype
, arg11
));
11783 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
11784 return fold_build2_loc (loc
, code
, type
, tem
,
11785 fold_convert_loc (loc
, itype
, arg10
));
11789 /* Attempt to simplify equality/inequality comparisons of complex
11790 values. Only lower the comparison if the result is known or
11791 can be simplified to a single scalar comparison. */
11792 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
11793 || TREE_CODE (arg0
) == COMPLEX_CST
)
11794 && (TREE_CODE (arg1
) == COMPLEX_EXPR
11795 || TREE_CODE (arg1
) == COMPLEX_CST
))
11797 tree real0
, imag0
, real1
, imag1
;
11800 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
11802 real0
= TREE_OPERAND (arg0
, 0);
11803 imag0
= TREE_OPERAND (arg0
, 1);
11807 real0
= TREE_REALPART (arg0
);
11808 imag0
= TREE_IMAGPART (arg0
);
11811 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
11813 real1
= TREE_OPERAND (arg1
, 0);
11814 imag1
= TREE_OPERAND (arg1
, 1);
11818 real1
= TREE_REALPART (arg1
);
11819 imag1
= TREE_IMAGPART (arg1
);
11822 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
11823 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
11825 if (integer_zerop (rcond
))
11827 if (code
== EQ_EXPR
)
11828 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11830 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
11834 if (code
== NE_EXPR
)
11835 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11837 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11841 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11842 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11844 if (integer_zerop (icond
))
11846 if (code
== EQ_EXPR
)
11847 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11849 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11853 if (code
== NE_EXPR
)
11854 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11856 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11867 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11868 if (tem
!= NULL_TREE
)
11871 /* Transform comparisons of the form X +- C CMP X. */
11872 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11873 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11874 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11875 && !HONOR_SNANS (arg0
))
11876 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11877 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11879 tree arg01
= TREE_OPERAND (arg0
, 1);
11880 enum tree_code code0
= TREE_CODE (arg0
);
11883 if (TREE_CODE (arg01
) == REAL_CST
)
11884 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11886 is_positive
= tree_int_cst_sgn (arg01
);
11888 /* (X - c) > X becomes false. */
11889 if (code
== GT_EXPR
11890 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11891 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11893 if (TREE_CODE (arg01
) == INTEGER_CST
11894 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11895 fold_overflow_warning (("assuming signed overflow does not "
11896 "occur when assuming that (X - c) > X "
11897 "is always false"),
11898 WARN_STRICT_OVERFLOW_ALL
);
11899 return constant_boolean_node (0, type
);
11902 /* Likewise (X + c) < X becomes false. */
11903 if (code
== LT_EXPR
11904 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11905 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11907 if (TREE_CODE (arg01
) == INTEGER_CST
11908 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11909 fold_overflow_warning (("assuming signed overflow does not "
11910 "occur when assuming that "
11911 "(X + c) < X is always false"),
11912 WARN_STRICT_OVERFLOW_ALL
);
11913 return constant_boolean_node (0, type
);
11916 /* Convert (X - c) <= X to true. */
11917 if (!HONOR_NANS (arg1
)
11919 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11920 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11922 if (TREE_CODE (arg01
) == INTEGER_CST
11923 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11924 fold_overflow_warning (("assuming signed overflow does not "
11925 "occur when assuming that "
11926 "(X - c) <= X is always true"),
11927 WARN_STRICT_OVERFLOW_ALL
);
11928 return constant_boolean_node (1, type
);
11931 /* Convert (X + c) >= X to true. */
11932 if (!HONOR_NANS (arg1
)
11934 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11935 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11937 if (TREE_CODE (arg01
) == INTEGER_CST
11938 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11939 fold_overflow_warning (("assuming signed overflow does not "
11940 "occur when assuming that "
11941 "(X + c) >= X is always true"),
11942 WARN_STRICT_OVERFLOW_ALL
);
11943 return constant_boolean_node (1, type
);
11946 if (TREE_CODE (arg01
) == INTEGER_CST
)
11948 /* Convert X + c > X and X - c < X to true for integers. */
11949 if (code
== GT_EXPR
11950 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11951 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11954 fold_overflow_warning (("assuming signed overflow does "
11955 "not occur when assuming that "
11956 "(X + c) > X is always true"),
11957 WARN_STRICT_OVERFLOW_ALL
);
11958 return constant_boolean_node (1, type
);
11961 if (code
== LT_EXPR
11962 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11963 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11966 fold_overflow_warning (("assuming signed overflow does "
11967 "not occur when assuming that "
11968 "(X - c) < X is always true"),
11969 WARN_STRICT_OVERFLOW_ALL
);
11970 return constant_boolean_node (1, type
);
11973 /* Convert X + c <= X and X - c >= X to false for integers. */
11974 if (code
== LE_EXPR
11975 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11976 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11978 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11979 fold_overflow_warning (("assuming signed overflow does "
11980 "not occur when assuming that "
11981 "(X + c) <= X is always false"),
11982 WARN_STRICT_OVERFLOW_ALL
);
11983 return constant_boolean_node (0, type
);
11986 if (code
== GE_EXPR
11987 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11988 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11990 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11991 fold_overflow_warning (("assuming signed overflow does "
11992 "not occur when assuming that "
11993 "(X - c) >= X is always false"),
11994 WARN_STRICT_OVERFLOW_ALL
);
11995 return constant_boolean_node (0, type
);
12000 /* Comparisons with the highest or lowest possible integer of
12001 the specified precision will have known values. */
12003 tree arg1_type
= TREE_TYPE (arg1
);
12004 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12006 if (TREE_CODE (arg1
) == INTEGER_CST
12007 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12009 wide_int max
= wi::max_value (arg1_type
);
12010 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12011 wide_int min
= wi::min_value (arg1_type
);
12013 if (wi::eq_p (arg1
, max
))
12017 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12020 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12023 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12026 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12028 /* The GE_EXPR and LT_EXPR cases above are not normally
12029 reached because of previous transformations. */
12034 else if (wi::eq_p (arg1
, max
- 1))
12038 arg1
= const_binop (PLUS_EXPR
, arg1
,
12039 build_int_cst (TREE_TYPE (arg1
), 1));
12040 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12041 fold_convert_loc (loc
,
12042 TREE_TYPE (arg1
), arg0
),
12045 arg1
= const_binop (PLUS_EXPR
, arg1
,
12046 build_int_cst (TREE_TYPE (arg1
), 1));
12047 return fold_build2_loc (loc
, NE_EXPR
, type
,
12048 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12054 else if (wi::eq_p (arg1
, min
))
12058 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12061 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12064 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12067 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12072 else if (wi::eq_p (arg1
, min
+ 1))
12076 arg1
= const_binop (MINUS_EXPR
, arg1
,
12077 build_int_cst (TREE_TYPE (arg1
), 1));
12078 return fold_build2_loc (loc
, NE_EXPR
, type
,
12079 fold_convert_loc (loc
,
12080 TREE_TYPE (arg1
), arg0
),
12083 arg1
= const_binop (MINUS_EXPR
, arg1
,
12084 build_int_cst (TREE_TYPE (arg1
), 1));
12085 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12086 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12093 else if (wi::eq_p (arg1
, signed_max
)
12094 && TYPE_UNSIGNED (arg1_type
)
12095 /* We will flip the signedness of the comparison operator
12096 associated with the mode of arg1, so the sign bit is
12097 specified by this mode. Check that arg1 is the signed
12098 max associated with this sign bit. */
12099 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
12100 /* signed_type does not work on pointer types. */
12101 && INTEGRAL_TYPE_P (arg1_type
))
12103 /* The following case also applies to X < signed_max+1
12104 and X >= signed_max+1 because previous transformations. */
12105 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12107 tree st
= signed_type_for (arg1_type
);
12108 return fold_build2_loc (loc
,
12109 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12110 type
, fold_convert_loc (loc
, st
, arg0
),
12111 build_int_cst (st
, 0));
12117 /* If we are comparing an ABS_EXPR with a constant, we can
12118 convert all the cases into explicit comparisons, but they may
12119 well not be faster than doing the ABS and one comparison.
12120 But ABS (X) <= C is a range comparison, which becomes a subtraction
12121 and a comparison, and is probably faster. */
12122 if (code
== LE_EXPR
12123 && TREE_CODE (arg1
) == INTEGER_CST
12124 && TREE_CODE (arg0
) == ABS_EXPR
12125 && ! TREE_SIDE_EFFECTS (arg0
)
12126 && (0 != (tem
= negate_expr (arg1
)))
12127 && TREE_CODE (tem
) == INTEGER_CST
12128 && !TREE_OVERFLOW (tem
))
12129 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12130 build2 (GE_EXPR
, type
,
12131 TREE_OPERAND (arg0
, 0), tem
),
12132 build2 (LE_EXPR
, type
,
12133 TREE_OPERAND (arg0
, 0), arg1
));
12135 /* Convert ABS_EXPR<x> >= 0 to true. */
12136 strict_overflow_p
= false;
12137 if (code
== GE_EXPR
12138 && (integer_zerop (arg1
)
12139 || (! HONOR_NANS (arg0
)
12140 && real_zerop (arg1
)))
12141 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12143 if (strict_overflow_p
)
12144 fold_overflow_warning (("assuming signed overflow does not occur "
12145 "when simplifying comparison of "
12146 "absolute value and zero"),
12147 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12148 return omit_one_operand_loc (loc
, type
,
12149 constant_boolean_node (true, type
),
12153 /* Convert ABS_EXPR<x> < 0 to false. */
12154 strict_overflow_p
= false;
12155 if (code
== LT_EXPR
12156 && (integer_zerop (arg1
) || real_zerop (arg1
))
12157 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12159 if (strict_overflow_p
)
12160 fold_overflow_warning (("assuming signed overflow does not occur "
12161 "when simplifying comparison of "
12162 "absolute value and zero"),
12163 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12164 return omit_one_operand_loc (loc
, type
,
12165 constant_boolean_node (false, type
),
12169 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12170 and similarly for >= into !=. */
12171 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12172 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12173 && TREE_CODE (arg1
) == LSHIFT_EXPR
12174 && integer_onep (TREE_OPERAND (arg1
, 0)))
12175 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12176 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12177 TREE_OPERAND (arg1
, 1)),
12178 build_zero_cst (TREE_TYPE (arg0
)));
12180 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12181 otherwise Y might be >= # of bits in X's type and thus e.g.
12182 (unsigned char) (1 << Y) for Y 15 might be 0.
12183 If the cast is widening, then 1 << Y should have unsigned type,
12184 otherwise if Y is number of bits in the signed shift type minus 1,
12185 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12186 31 might be 0xffffffff80000000. */
12187 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12188 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12189 && CONVERT_EXPR_P (arg1
)
12190 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12191 && (element_precision (TREE_TYPE (arg1
))
12192 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12193 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12194 || (element_precision (TREE_TYPE (arg1
))
12195 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12196 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12198 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12199 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12200 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12201 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
12202 build_zero_cst (TREE_TYPE (arg0
)));
12207 case UNORDERED_EXPR
:
12215 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12217 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12218 if (t1
!= NULL_TREE
)
12222 /* If the first operand is NaN, the result is constant. */
12223 if (TREE_CODE (arg0
) == REAL_CST
12224 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12225 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12227 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12228 ? integer_zero_node
12229 : integer_one_node
;
12230 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
12233 /* If the second operand is NaN, the result is constant. */
12234 if (TREE_CODE (arg1
) == REAL_CST
12235 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12236 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12238 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12239 ? integer_zero_node
12240 : integer_one_node
;
12241 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
12244 /* Simplify unordered comparison of something with itself. */
12245 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12246 && operand_equal_p (arg0
, arg1
, 0))
12247 return constant_boolean_node (1, type
);
12249 if (code
== LTGT_EXPR
12250 && !flag_trapping_math
12251 && operand_equal_p (arg0
, arg1
, 0))
12252 return constant_boolean_node (0, type
);
12254 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12256 tree targ0
= strip_float_extensions (arg0
);
12257 tree targ1
= strip_float_extensions (arg1
);
12258 tree newtype
= TREE_TYPE (targ0
);
12260 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12261 newtype
= TREE_TYPE (targ1
);
12263 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12264 return fold_build2_loc (loc
, code
, type
,
12265 fold_convert_loc (loc
, newtype
, targ0
),
12266 fold_convert_loc (loc
, newtype
, targ1
));
12271 case COMPOUND_EXPR
:
12272 /* When pedantic, a compound expression can be neither an lvalue
12273 nor an integer constant expression. */
12274 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12276 /* Don't let (0, 0) be null pointer constant. */
12277 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12278 : fold_convert_loc (loc
, type
, arg1
);
12279 return pedantic_non_lvalue_loc (loc
, tem
);
12282 /* An ASSERT_EXPR should never be passed to fold_binary. */
12283 gcc_unreachable ();
12287 } /* switch (code) */
12290 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12291 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
12295 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
12297 switch (TREE_CODE (*tp
))
12303 *walk_subtrees
= 0;
12305 /* ... fall through ... */
12312 /* Return whether the sub-tree ST contains a label which is accessible from
12313 outside the sub-tree. */
12316 contains_label_p (tree st
)
12319 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
12322 /* Fold a ternary expression of code CODE and type TYPE with operands
12323 OP0, OP1, and OP2. Return the folded expression if folding is
12324 successful. Otherwise, return NULL_TREE. */
12327 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
12328 tree op0
, tree op1
, tree op2
)
12331 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
12332 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12334 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12335 && TREE_CODE_LENGTH (code
) == 3);
12337 /* If this is a commutative operation, and OP0 is a constant, move it
12338 to OP1 to reduce the number of tests below. */
12339 if (commutative_ternary_tree_code (code
)
12340 && tree_swap_operands_p (op0
, op1
, true))
12341 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
12343 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
12347 /* Strip any conversions that don't change the mode. This is safe
12348 for every expression, except for a comparison expression because
12349 its signedness is derived from its operands. So, in the latter
12350 case, only strip conversions that don't change the signedness.
12352 Note that this is done as an internal manipulation within the
12353 constant folder, in order to find the simplest representation of
12354 the arguments so that their form can be studied. In any cases,
12355 the appropriate type conversions should be put back in the tree
12356 that will get out of the constant folder. */
12377 case COMPONENT_REF
:
12378 if (TREE_CODE (arg0
) == CONSTRUCTOR
12379 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12381 unsigned HOST_WIDE_INT idx
;
12383 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12390 case VEC_COND_EXPR
:
12391 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12392 so all simple results must be passed through pedantic_non_lvalue. */
12393 if (TREE_CODE (arg0
) == INTEGER_CST
)
12395 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12396 tem
= integer_zerop (arg0
) ? op2
: op1
;
12397 /* Only optimize constant conditions when the selected branch
12398 has the same type as the COND_EXPR. This avoids optimizing
12399 away "c ? x : throw", where the throw has a void type.
12400 Avoid throwing away that operand which contains label. */
12401 if ((!TREE_SIDE_EFFECTS (unused_op
)
12402 || !contains_label_p (unused_op
))
12403 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12404 || VOID_TYPE_P (type
)))
12405 return pedantic_non_lvalue_loc (loc
, tem
);
12408 else if (TREE_CODE (arg0
) == VECTOR_CST
)
12410 if ((TREE_CODE (arg1
) == VECTOR_CST
12411 || TREE_CODE (arg1
) == CONSTRUCTOR
)
12412 && (TREE_CODE (arg2
) == VECTOR_CST
12413 || TREE_CODE (arg2
) == CONSTRUCTOR
))
12415 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
12416 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
12417 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
12418 for (i
= 0; i
< nelts
; i
++)
12420 tree val
= VECTOR_CST_ELT (arg0
, i
);
12421 if (integer_all_onesp (val
))
12423 else if (integer_zerop (val
))
12424 sel
[i
] = nelts
+ i
;
12425 else /* Currently unreachable. */
12428 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
12429 if (t
!= NULL_TREE
)
12434 /* If we have A op B ? A : C, we may be able to convert this to a
12435 simpler expression, depending on the operation and the values
12436 of B and C. Signed zeros prevent all of these transformations,
12437 for reasons given above each one.
12439 Also try swapping the arguments and inverting the conditional. */
12440 if (COMPARISON_CLASS_P (arg0
)
12441 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12442 arg1
, TREE_OPERAND (arg0
, 1))
12443 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
12445 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
12450 if (COMPARISON_CLASS_P (arg0
)
12451 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12453 TREE_OPERAND (arg0
, 1))
12454 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
12456 location_t loc0
= expr_location_or (arg0
, loc
);
12457 tem
= fold_invert_truthvalue (loc0
, arg0
);
12458 if (tem
&& COMPARISON_CLASS_P (tem
))
12460 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
12466 /* If the second operand is simpler than the third, swap them
12467 since that produces better jump optimization results. */
12468 if (truth_value_p (TREE_CODE (arg0
))
12469 && tree_swap_operands_p (op1
, op2
, false))
12471 location_t loc0
= expr_location_or (arg0
, loc
);
12472 /* See if this can be inverted. If it can't, possibly because
12473 it was a floating-point inequality comparison, don't do
12475 tem
= fold_invert_truthvalue (loc0
, arg0
);
12477 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
12480 /* Convert A ? 1 : 0 to simply A. */
12481 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
12482 : (integer_onep (op1
)
12483 && !VECTOR_TYPE_P (type
)))
12484 && integer_zerop (op2
)
12485 /* If we try to convert OP0 to our type, the
12486 call to fold will try to move the conversion inside
12487 a COND, which will recurse. In that case, the COND_EXPR
12488 is probably the best choice, so leave it alone. */
12489 && type
== TREE_TYPE (arg0
))
12490 return pedantic_non_lvalue_loc (loc
, arg0
);
12492 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12493 over COND_EXPR in cases such as floating point comparisons. */
12494 if (integer_zerop (op1
)
12495 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
12496 : (integer_onep (op2
)
12497 && !VECTOR_TYPE_P (type
)))
12498 && truth_value_p (TREE_CODE (arg0
)))
12499 return pedantic_non_lvalue_loc (loc
,
12500 fold_convert_loc (loc
, type
,
12501 invert_truthvalue_loc (loc
,
12504 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12505 if (TREE_CODE (arg0
) == LT_EXPR
12506 && integer_zerop (TREE_OPERAND (arg0
, 1))
12507 && integer_zerop (op2
)
12508 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12510 /* sign_bit_p looks through both zero and sign extensions,
12511 but for this optimization only sign extensions are
12513 tree tem2
= TREE_OPERAND (arg0
, 0);
12514 while (tem
!= tem2
)
12516 if (TREE_CODE (tem2
) != NOP_EXPR
12517 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
12522 tem2
= TREE_OPERAND (tem2
, 0);
12524 /* sign_bit_p only checks ARG1 bits within A's precision.
12525 If <sign bit of A> has wider type than A, bits outside
12526 of A's precision in <sign bit of A> need to be checked.
12527 If they are all 0, this optimization needs to be done
12528 in unsigned A's type, if they are all 1 in signed A's type,
12529 otherwise this can't be done. */
12531 && TYPE_PRECISION (TREE_TYPE (tem
))
12532 < TYPE_PRECISION (TREE_TYPE (arg1
))
12533 && TYPE_PRECISION (TREE_TYPE (tem
))
12534 < TYPE_PRECISION (type
))
12536 int inner_width
, outer_width
;
12539 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12540 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12541 if (outer_width
> TYPE_PRECISION (type
))
12542 outer_width
= TYPE_PRECISION (type
);
12544 wide_int mask
= wi::shifted_mask
12545 (inner_width
, outer_width
- inner_width
, false,
12546 TYPE_PRECISION (TREE_TYPE (arg1
)));
12548 wide_int common
= mask
& arg1
;
12549 if (common
== mask
)
12551 tem_type
= signed_type_for (TREE_TYPE (tem
));
12552 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12554 else if (common
== 0)
12556 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12557 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12565 fold_convert_loc (loc
, type
,
12566 fold_build2_loc (loc
, BIT_AND_EXPR
,
12567 TREE_TYPE (tem
), tem
,
12568 fold_convert_loc (loc
,
12573 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12574 already handled above. */
12575 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12576 && integer_onep (TREE_OPERAND (arg0
, 1))
12577 && integer_zerop (op2
)
12578 && integer_pow2p (arg1
))
12580 tree tem
= TREE_OPERAND (arg0
, 0);
12582 if (TREE_CODE (tem
) == RSHIFT_EXPR
12583 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
12584 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
12585 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
12586 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12587 TREE_OPERAND (tem
, 0), arg1
);
12590 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12591 is probably obsolete because the first operand should be a
12592 truth value (that's why we have the two cases above), but let's
12593 leave it in until we can confirm this for all front-ends. */
12594 if (integer_zerop (op2
)
12595 && TREE_CODE (arg0
) == NE_EXPR
12596 && integer_zerop (TREE_OPERAND (arg0
, 1))
12597 && integer_pow2p (arg1
)
12598 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12599 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12600 arg1
, OEP_ONLY_CONST
))
12601 return pedantic_non_lvalue_loc (loc
,
12602 fold_convert_loc (loc
, type
,
12603 TREE_OPERAND (arg0
, 0)));
12605 /* Disable the transformations below for vectors, since
12606 fold_binary_op_with_conditional_arg may undo them immediately,
12607 yielding an infinite loop. */
12608 if (code
== VEC_COND_EXPR
)
12611 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12612 if (integer_zerop (op2
)
12613 && truth_value_p (TREE_CODE (arg0
))
12614 && truth_value_p (TREE_CODE (arg1
))
12615 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12616 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
12617 : TRUTH_ANDIF_EXPR
,
12618 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
12620 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12621 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
12622 && truth_value_p (TREE_CODE (arg0
))
12623 && truth_value_p (TREE_CODE (arg1
))
12624 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12626 location_t loc0
= expr_location_or (arg0
, loc
);
12627 /* Only perform transformation if ARG0 is easily inverted. */
12628 tem
= fold_invert_truthvalue (loc0
, arg0
);
12630 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12633 type
, fold_convert_loc (loc
, type
, tem
),
12637 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12638 if (integer_zerop (arg1
)
12639 && truth_value_p (TREE_CODE (arg0
))
12640 && truth_value_p (TREE_CODE (op2
))
12641 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12643 location_t loc0
= expr_location_or (arg0
, loc
);
12644 /* Only perform transformation if ARG0 is easily inverted. */
12645 tem
= fold_invert_truthvalue (loc0
, arg0
);
12647 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12648 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
12649 type
, fold_convert_loc (loc
, type
, tem
),
12653 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12654 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
12655 && truth_value_p (TREE_CODE (arg0
))
12656 && truth_value_p (TREE_CODE (op2
))
12657 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12658 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12659 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
12660 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
12665 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12666 of fold_ternary on them. */
12667 gcc_unreachable ();
12669 case BIT_FIELD_REF
:
12670 if ((TREE_CODE (arg0
) == VECTOR_CST
12671 || (TREE_CODE (arg0
) == CONSTRUCTOR
12672 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
12673 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
12674 || (TREE_CODE (type
) == VECTOR_TYPE
12675 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
12677 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
12678 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
12679 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
12680 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
12683 && (idx
% width
) == 0
12684 && (n
% width
) == 0
12685 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
12690 if (TREE_CODE (arg0
) == VECTOR_CST
)
12693 return VECTOR_CST_ELT (arg0
, idx
);
12695 tree
*vals
= XALLOCAVEC (tree
, n
);
12696 for (unsigned i
= 0; i
< n
; ++i
)
12697 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
12698 return build_vector (type
, vals
);
12701 /* Constructor elements can be subvectors. */
12702 unsigned HOST_WIDE_INT k
= 1;
12703 if (CONSTRUCTOR_NELTS (arg0
) != 0)
12705 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
12706 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
12707 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
12710 /* We keep an exact subset of the constructor elements. */
12711 if ((idx
% k
) == 0 && (n
% k
) == 0)
12713 if (CONSTRUCTOR_NELTS (arg0
) == 0)
12714 return build_constructor (type
, NULL
);
12719 if (idx
< CONSTRUCTOR_NELTS (arg0
))
12720 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
12721 return build_zero_cst (type
);
12724 vec
<constructor_elt
, va_gc
> *vals
;
12725 vec_alloc (vals
, n
);
12726 for (unsigned i
= 0;
12727 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
12729 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
12731 (arg0
, idx
+ i
)->value
);
12732 return build_constructor (type
, vals
);
12734 /* The bitfield references a single constructor element. */
12735 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
12737 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
12738 return build_zero_cst (type
);
12740 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
12742 return fold_build3_loc (loc
, code
, type
,
12743 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
12744 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
12749 /* A bit-field-ref that referenced the full argument can be stripped. */
12750 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12751 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
12752 && integer_zerop (op2
))
12753 return fold_convert_loc (loc
, type
, arg0
);
12755 /* On constants we can use native encode/interpret to constant
12756 fold (nearly) all BIT_FIELD_REFs. */
12757 if (CONSTANT_CLASS_P (arg0
)
12758 && can_native_interpret_type_p (type
)
12759 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
12760 /* This limitation should not be necessary, we just need to
12761 round this up to mode size. */
12762 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
12763 /* Need bit-shifting of the buffer to relax the following. */
12764 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
12766 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12767 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
12768 unsigned HOST_WIDE_INT clen
;
12769 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
12770 /* ??? We cannot tell native_encode_expr to start at
12771 some random byte only. So limit us to a reasonable amount
12775 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
12776 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
12778 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
12780 tree v
= native_interpret_expr (type
,
12781 b
+ bitpos
/ BITS_PER_UNIT
,
12782 bitsize
/ BITS_PER_UNIT
);
12792 /* For integers we can decompose the FMA if possible. */
12793 if (TREE_CODE (arg0
) == INTEGER_CST
12794 && TREE_CODE (arg1
) == INTEGER_CST
)
12795 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
12796 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
12797 if (integer_zerop (arg2
))
12798 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12800 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
12802 case VEC_PERM_EXPR
:
12803 if (TREE_CODE (arg2
) == VECTOR_CST
)
12805 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
12806 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
12807 unsigned char *sel2
= sel
+ nelts
;
12808 bool need_mask_canon
= false;
12809 bool need_mask_canon2
= false;
12810 bool all_in_vec0
= true;
12811 bool all_in_vec1
= true;
12812 bool maybe_identity
= true;
12813 bool single_arg
= (op0
== op1
);
12814 bool changed
= false;
12816 mask2
= 2 * nelts
- 1;
12817 mask
= single_arg
? (nelts
- 1) : mask2
;
12818 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
12819 for (i
= 0; i
< nelts
; i
++)
12821 tree val
= VECTOR_CST_ELT (arg2
, i
);
12822 if (TREE_CODE (val
) != INTEGER_CST
)
12825 /* Make sure that the perm value is in an acceptable
12828 need_mask_canon
|= wi::gtu_p (t
, mask
);
12829 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
12830 sel
[i
] = t
.to_uhwi () & mask
;
12831 sel2
[i
] = t
.to_uhwi () & mask2
;
12833 if (sel
[i
] < nelts
)
12834 all_in_vec1
= false;
12836 all_in_vec0
= false;
12838 if ((sel
[i
] & (nelts
-1)) != i
)
12839 maybe_identity
= false;
12842 if (maybe_identity
)
12852 else if (all_in_vec1
)
12855 for (i
= 0; i
< nelts
; i
++)
12857 need_mask_canon
= true;
12860 if ((TREE_CODE (op0
) == VECTOR_CST
12861 || TREE_CODE (op0
) == CONSTRUCTOR
)
12862 && (TREE_CODE (op1
) == VECTOR_CST
12863 || TREE_CODE (op1
) == CONSTRUCTOR
))
12865 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
12866 if (t
!= NULL_TREE
)
12870 if (op0
== op1
&& !single_arg
)
12873 /* Some targets are deficient and fail to expand a single
12874 argument permutation while still allowing an equivalent
12875 2-argument version. */
12876 if (need_mask_canon
&& arg2
== op2
12877 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
12878 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
12880 need_mask_canon
= need_mask_canon2
;
12884 if (need_mask_canon
&& arg2
== op2
)
12886 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
12887 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
12888 for (i
= 0; i
< nelts
; i
++)
12889 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
12890 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
12895 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
12901 } /* switch (code) */
12904 /* Perform constant folding and related simplification of EXPR.
12905 The related simplifications include x*1 => x, x*0 => 0, etc.,
12906 and application of the associative law.
12907 NOP_EXPR conversions may be removed freely (as long as we
12908 are careful not to change the type of the overall expression).
12909 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12910 but we can constant-fold them if they have constant operands. */
12912 #ifdef ENABLE_FOLD_CHECKING
12913 # define fold(x) fold_1 (x)
12914 static tree
fold_1 (tree
);
12920 const tree t
= expr
;
12921 enum tree_code code
= TREE_CODE (t
);
12922 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12924 location_t loc
= EXPR_LOCATION (expr
);
12926 /* Return right away if a constant. */
12927 if (kind
== tcc_constant
)
12930 /* CALL_EXPR-like objects with variable numbers of operands are
12931 treated specially. */
12932 if (kind
== tcc_vl_exp
)
12934 if (code
== CALL_EXPR
)
12936 tem
= fold_call_expr (loc
, expr
, false);
12937 return tem
? tem
: expr
;
12942 if (IS_EXPR_CODE_CLASS (kind
))
12944 tree type
= TREE_TYPE (t
);
12945 tree op0
, op1
, op2
;
12947 switch (TREE_CODE_LENGTH (code
))
12950 op0
= TREE_OPERAND (t
, 0);
12951 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12952 return tem
? tem
: expr
;
12954 op0
= TREE_OPERAND (t
, 0);
12955 op1
= TREE_OPERAND (t
, 1);
12956 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12957 return tem
? tem
: expr
;
12959 op0
= TREE_OPERAND (t
, 0);
12960 op1
= TREE_OPERAND (t
, 1);
12961 op2
= TREE_OPERAND (t
, 2);
12962 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12963 return tem
? tem
: expr
;
12973 tree op0
= TREE_OPERAND (t
, 0);
12974 tree op1
= TREE_OPERAND (t
, 1);
12976 if (TREE_CODE (op1
) == INTEGER_CST
12977 && TREE_CODE (op0
) == CONSTRUCTOR
12978 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
12980 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
12981 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
12982 unsigned HOST_WIDE_INT begin
= 0;
12984 /* Find a matching index by means of a binary search. */
12985 while (begin
!= end
)
12987 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
12988 tree index
= (*elts
)[middle
].index
;
12990 if (TREE_CODE (index
) == INTEGER_CST
12991 && tree_int_cst_lt (index
, op1
))
12992 begin
= middle
+ 1;
12993 else if (TREE_CODE (index
) == INTEGER_CST
12994 && tree_int_cst_lt (op1
, index
))
12996 else if (TREE_CODE (index
) == RANGE_EXPR
12997 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
12998 begin
= middle
+ 1;
12999 else if (TREE_CODE (index
) == RANGE_EXPR
13000 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13003 return (*elts
)[middle
].value
;
13010 /* Return a VECTOR_CST if possible. */
13013 tree type
= TREE_TYPE (t
);
13014 if (TREE_CODE (type
) != VECTOR_TYPE
)
13017 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13018 unsigned HOST_WIDE_INT idx
, pos
= 0;
13021 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13023 if (!CONSTANT_CLASS_P (value
))
13025 if (TREE_CODE (value
) == VECTOR_CST
)
13027 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
13028 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
13031 vec
[pos
++] = value
;
13033 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
13034 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
13036 return build_vector (type
, vec
);
13040 return fold (DECL_INITIAL (t
));
13044 } /* switch (code) */
13047 #ifdef ENABLE_FOLD_CHECKING
13050 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13051 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
13052 static void fold_check_failed (const_tree
, const_tree
);
13053 void print_fold_checksum (const_tree
);
13055 /* When --enable-checking=fold, compute a digest of expr before
13056 and after actual fold call to see if fold did not accidentally
13057 change original expr. */
13063 struct md5_ctx ctx
;
13064 unsigned char checksum_before
[16], checksum_after
[16];
13065 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13067 md5_init_ctx (&ctx
);
13068 fold_checksum_tree (expr
, &ctx
, &ht
);
13069 md5_finish_ctx (&ctx
, checksum_before
);
13072 ret
= fold_1 (expr
);
13074 md5_init_ctx (&ctx
);
13075 fold_checksum_tree (expr
, &ctx
, &ht
);
13076 md5_finish_ctx (&ctx
, checksum_after
);
13078 if (memcmp (checksum_before
, checksum_after
, 16))
13079 fold_check_failed (expr
, ret
);
13085 print_fold_checksum (const_tree expr
)
13087 struct md5_ctx ctx
;
13088 unsigned char checksum
[16], cnt
;
13089 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13091 md5_init_ctx (&ctx
);
13092 fold_checksum_tree (expr
, &ctx
, &ht
);
13093 md5_finish_ctx (&ctx
, checksum
);
13094 for (cnt
= 0; cnt
< 16; ++cnt
)
13095 fprintf (stderr
, "%02x", checksum
[cnt
]);
13096 putc ('\n', stderr
);
13100 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13102 internal_error ("fold check: original tree changed by fold");
13106 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
13107 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
13109 const tree_node
**slot
;
13110 enum tree_code code
;
13111 union tree_node buf
;
13117 slot
= ht
->find_slot (expr
, INSERT
);
13121 code
= TREE_CODE (expr
);
13122 if (TREE_CODE_CLASS (code
) == tcc_declaration
13123 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
13125 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13126 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13127 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13128 buf
.decl_with_vis
.symtab_node
= NULL
;
13129 expr
= (tree
) &buf
;
13131 else if (TREE_CODE_CLASS (code
) == tcc_type
13132 && (TYPE_POINTER_TO (expr
)
13133 || TYPE_REFERENCE_TO (expr
)
13134 || TYPE_CACHED_VALUES_P (expr
)
13135 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13136 || TYPE_NEXT_VARIANT (expr
)))
13138 /* Allow these fields to be modified. */
13140 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13141 expr
= tmp
= (tree
) &buf
;
13142 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13143 TYPE_POINTER_TO (tmp
) = NULL
;
13144 TYPE_REFERENCE_TO (tmp
) = NULL
;
13145 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13146 if (TYPE_CACHED_VALUES_P (tmp
))
13148 TYPE_CACHED_VALUES_P (tmp
) = 0;
13149 TYPE_CACHED_VALUES (tmp
) = NULL
;
13152 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13153 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
13154 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13155 if (TREE_CODE_CLASS (code
) != tcc_type
13156 && TREE_CODE_CLASS (code
) != tcc_declaration
13157 && code
!= TREE_LIST
13158 && code
!= SSA_NAME
13159 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13160 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13161 switch (TREE_CODE_CLASS (code
))
13167 md5_process_bytes (TREE_STRING_POINTER (expr
),
13168 TREE_STRING_LENGTH (expr
), ctx
);
13171 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13172 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13175 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
13176 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
13182 case tcc_exceptional
:
13186 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13187 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13188 expr
= TREE_CHAIN (expr
);
13189 goto recursive_label
;
13192 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13193 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13199 case tcc_expression
:
13200 case tcc_reference
:
13201 case tcc_comparison
:
13204 case tcc_statement
:
13206 len
= TREE_OPERAND_LENGTH (expr
);
13207 for (i
= 0; i
< len
; ++i
)
13208 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13210 case tcc_declaration
:
13211 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13212 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13213 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13215 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13216 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13217 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13218 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13219 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13222 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13224 if (TREE_CODE (expr
) == FUNCTION_DECL
)
13226 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13227 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
13229 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13233 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13234 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13235 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13236 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13237 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13238 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13239 if (INTEGRAL_TYPE_P (expr
)
13240 || SCALAR_FLOAT_TYPE_P (expr
))
13242 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13243 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13245 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13246 if (TREE_CODE (expr
) == RECORD_TYPE
13247 || TREE_CODE (expr
) == UNION_TYPE
13248 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13249 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13250 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13257 /* Helper function for outputting the checksum of a tree T. When
13258 debugging with gdb, you can "define mynext" to be "next" followed
13259 by "call debug_fold_checksum (op0)", then just trace down till the
13262 DEBUG_FUNCTION
void
13263 debug_fold_checksum (const_tree t
)
13266 unsigned char checksum
[16];
13267 struct md5_ctx ctx
;
13268 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13270 md5_init_ctx (&ctx
);
13271 fold_checksum_tree (t
, &ctx
, &ht
);
13272 md5_finish_ctx (&ctx
, checksum
);
13275 for (i
= 0; i
< 16; i
++)
13276 fprintf (stderr
, "%d ", checksum
[i
]);
13278 fprintf (stderr
, "\n");
13283 /* Fold a unary tree expression with code CODE of type TYPE with an
13284 operand OP0. LOC is the location of the resulting expression.
13285 Return a folded expression if successful. Otherwise, return a tree
13286 expression with code CODE of type TYPE with an operand OP0. */
13289 fold_build1_stat_loc (location_t loc
,
13290 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13293 #ifdef ENABLE_FOLD_CHECKING
13294 unsigned char checksum_before
[16], checksum_after
[16];
13295 struct md5_ctx ctx
;
13296 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13298 md5_init_ctx (&ctx
);
13299 fold_checksum_tree (op0
, &ctx
, &ht
);
13300 md5_finish_ctx (&ctx
, checksum_before
);
13304 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13306 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
13308 #ifdef ENABLE_FOLD_CHECKING
13309 md5_init_ctx (&ctx
);
13310 fold_checksum_tree (op0
, &ctx
, &ht
);
13311 md5_finish_ctx (&ctx
, checksum_after
);
13313 if (memcmp (checksum_before
, checksum_after
, 16))
13314 fold_check_failed (op0
, tem
);
13319 /* Fold a binary tree expression with code CODE of type TYPE with
13320 operands OP0 and OP1. LOC is the location of the resulting
13321 expression. Return a folded expression if successful. Otherwise,
13322 return a tree expression with code CODE of type TYPE with operands
13326 fold_build2_stat_loc (location_t loc
,
13327 enum tree_code code
, tree type
, tree op0
, tree op1
13331 #ifdef ENABLE_FOLD_CHECKING
13332 unsigned char checksum_before_op0
[16],
13333 checksum_before_op1
[16],
13334 checksum_after_op0
[16],
13335 checksum_after_op1
[16];
13336 struct md5_ctx ctx
;
13337 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13339 md5_init_ctx (&ctx
);
13340 fold_checksum_tree (op0
, &ctx
, &ht
);
13341 md5_finish_ctx (&ctx
, checksum_before_op0
);
13344 md5_init_ctx (&ctx
);
13345 fold_checksum_tree (op1
, &ctx
, &ht
);
13346 md5_finish_ctx (&ctx
, checksum_before_op1
);
13350 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13352 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
13354 #ifdef ENABLE_FOLD_CHECKING
13355 md5_init_ctx (&ctx
);
13356 fold_checksum_tree (op0
, &ctx
, &ht
);
13357 md5_finish_ctx (&ctx
, checksum_after_op0
);
13360 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13361 fold_check_failed (op0
, tem
);
13363 md5_init_ctx (&ctx
);
13364 fold_checksum_tree (op1
, &ctx
, &ht
);
13365 md5_finish_ctx (&ctx
, checksum_after_op1
);
13367 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13368 fold_check_failed (op1
, tem
);
13373 /* Fold a ternary tree expression with code CODE of type TYPE with
13374 operands OP0, OP1, and OP2. Return a folded expression if
13375 successful. Otherwise, return a tree expression with code CODE of
13376 type TYPE with operands OP0, OP1, and OP2. */
13379 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
13380 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13383 #ifdef ENABLE_FOLD_CHECKING
13384 unsigned char checksum_before_op0
[16],
13385 checksum_before_op1
[16],
13386 checksum_before_op2
[16],
13387 checksum_after_op0
[16],
13388 checksum_after_op1
[16],
13389 checksum_after_op2
[16];
13390 struct md5_ctx ctx
;
13391 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13393 md5_init_ctx (&ctx
);
13394 fold_checksum_tree (op0
, &ctx
, &ht
);
13395 md5_finish_ctx (&ctx
, checksum_before_op0
);
13398 md5_init_ctx (&ctx
);
13399 fold_checksum_tree (op1
, &ctx
, &ht
);
13400 md5_finish_ctx (&ctx
, checksum_before_op1
);
13403 md5_init_ctx (&ctx
);
13404 fold_checksum_tree (op2
, &ctx
, &ht
);
13405 md5_finish_ctx (&ctx
, checksum_before_op2
);
13409 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13410 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13412 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13414 #ifdef ENABLE_FOLD_CHECKING
13415 md5_init_ctx (&ctx
);
13416 fold_checksum_tree (op0
, &ctx
, &ht
);
13417 md5_finish_ctx (&ctx
, checksum_after_op0
);
13420 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13421 fold_check_failed (op0
, tem
);
13423 md5_init_ctx (&ctx
);
13424 fold_checksum_tree (op1
, &ctx
, &ht
);
13425 md5_finish_ctx (&ctx
, checksum_after_op1
);
13428 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13429 fold_check_failed (op1
, tem
);
13431 md5_init_ctx (&ctx
);
13432 fold_checksum_tree (op2
, &ctx
, &ht
);
13433 md5_finish_ctx (&ctx
, checksum_after_op2
);
13435 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13436 fold_check_failed (op2
, tem
);
13441 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13442 arguments in ARGARRAY, and a null static chain.
13443 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13444 of type TYPE from the given operands as constructed by build_call_array. */
13447 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13448 int nargs
, tree
*argarray
)
13451 #ifdef ENABLE_FOLD_CHECKING
13452 unsigned char checksum_before_fn
[16],
13453 checksum_before_arglist
[16],
13454 checksum_after_fn
[16],
13455 checksum_after_arglist
[16];
13456 struct md5_ctx ctx
;
13457 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13460 md5_init_ctx (&ctx
);
13461 fold_checksum_tree (fn
, &ctx
, &ht
);
13462 md5_finish_ctx (&ctx
, checksum_before_fn
);
13465 md5_init_ctx (&ctx
);
13466 for (i
= 0; i
< nargs
; i
++)
13467 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13468 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13472 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13474 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13476 #ifdef ENABLE_FOLD_CHECKING
13477 md5_init_ctx (&ctx
);
13478 fold_checksum_tree (fn
, &ctx
, &ht
);
13479 md5_finish_ctx (&ctx
, checksum_after_fn
);
13482 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13483 fold_check_failed (fn
, tem
);
13485 md5_init_ctx (&ctx
);
13486 for (i
= 0; i
< nargs
; i
++)
13487 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13488 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13490 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13491 fold_check_failed (NULL_TREE
, tem
);
13496 /* Perform constant folding and related simplification of initializer
13497 expression EXPR. These behave identically to "fold_buildN" but ignore
13498 potential run-time traps and exceptions that fold must preserve. */
13500 #define START_FOLD_INIT \
13501 int saved_signaling_nans = flag_signaling_nans;\
13502 int saved_trapping_math = flag_trapping_math;\
13503 int saved_rounding_math = flag_rounding_math;\
13504 int saved_trapv = flag_trapv;\
13505 int saved_folding_initializer = folding_initializer;\
13506 flag_signaling_nans = 0;\
13507 flag_trapping_math = 0;\
13508 flag_rounding_math = 0;\
13510 folding_initializer = 1;
13512 #define END_FOLD_INIT \
13513 flag_signaling_nans = saved_signaling_nans;\
13514 flag_trapping_math = saved_trapping_math;\
13515 flag_rounding_math = saved_rounding_math;\
13516 flag_trapv = saved_trapv;\
13517 folding_initializer = saved_folding_initializer;
13520 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
13521 tree type
, tree op
)
13526 result
= fold_build1_loc (loc
, code
, type
, op
);
13533 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
13534 tree type
, tree op0
, tree op1
)
13539 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
13546 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
13547 int nargs
, tree
*argarray
)
13552 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13558 #undef START_FOLD_INIT
13559 #undef END_FOLD_INIT
13561 /* Determine if first argument is a multiple of second argument. Return 0 if
13562 it is not, or we cannot easily determined it to be.
13564 An example of the sort of thing we care about (at this point; this routine
13565 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13566 fold cases do now) is discovering that
13568 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13574 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13576 This code also handles discovering that
13578 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13580 is a multiple of 8 so we don't have to worry about dealing with a
13581 possible remainder.
13583 Note that we *look* inside a SAVE_EXPR only to determine how it was
13584 calculated; it is not safe for fold to do much of anything else with the
13585 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13586 at run time. For example, the latter example above *cannot* be implemented
13587 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13588 evaluation time of the original SAVE_EXPR is not necessarily the same at
13589 the time the new expression is evaluated. The only optimization of this
13590 sort that would be valid is changing
13592 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13596 SAVE_EXPR (I) * SAVE_EXPR (J)
13598 (where the same SAVE_EXPR (J) is used in the original and the
13599 transformed version). */
13602 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13604 if (operand_equal_p (top
, bottom
, 0))
13607 if (TREE_CODE (type
) != INTEGER_TYPE
)
13610 switch (TREE_CODE (top
))
13613 /* Bitwise and provides a power of two multiple. If the mask is
13614 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13615 if (!integer_pow2p (bottom
))
13620 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13621 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13625 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13626 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13629 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13633 op1
= TREE_OPERAND (top
, 1);
13634 /* const_binop may not detect overflow correctly,
13635 so check for it explicitly here. */
13636 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
13637 && 0 != (t1
= fold_convert (type
,
13638 const_binop (LSHIFT_EXPR
,
13641 && !TREE_OVERFLOW (t1
))
13642 return multiple_of_p (type
, t1
, bottom
);
13647 /* Can't handle conversions from non-integral or wider integral type. */
13648 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13649 || (TYPE_PRECISION (type
)
13650 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13653 /* .. fall through ... */
13656 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13659 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13660 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
13663 if (TREE_CODE (bottom
) != INTEGER_CST
13664 || integer_zerop (bottom
)
13665 || (TYPE_UNSIGNED (type
)
13666 && (tree_int_cst_sgn (top
) < 0
13667 || tree_int_cst_sgn (bottom
) < 0)))
13669 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
13677 /* Return true if CODE or TYPE is known to be non-negative. */
13680 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13682 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13683 && truth_value_p (code
))
13684 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13685 have a signed:1 type (where the value is -1 and 0). */
13690 /* Return true if (CODE OP0) is known to be non-negative. If the return
13691 value is based on the assumption that signed overflow is undefined,
13692 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13693 *STRICT_OVERFLOW_P. */
13696 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13697 bool *strict_overflow_p
)
13699 if (TYPE_UNSIGNED (type
))
13705 /* We can't return 1 if flag_wrapv is set because
13706 ABS_EXPR<INT_MIN> = INT_MIN. */
13707 if (!ANY_INTEGRAL_TYPE_P (type
))
13709 if (TYPE_OVERFLOW_UNDEFINED (type
))
13711 *strict_overflow_p
= true;
13716 case NON_LVALUE_EXPR
:
13718 case FIX_TRUNC_EXPR
:
13719 return tree_expr_nonnegative_warnv_p (op0
,
13720 strict_overflow_p
);
13724 tree inner_type
= TREE_TYPE (op0
);
13725 tree outer_type
= type
;
13727 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13729 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13730 return tree_expr_nonnegative_warnv_p (op0
,
13731 strict_overflow_p
);
13732 if (INTEGRAL_TYPE_P (inner_type
))
13734 if (TYPE_UNSIGNED (inner_type
))
13736 return tree_expr_nonnegative_warnv_p (op0
,
13737 strict_overflow_p
);
13740 else if (INTEGRAL_TYPE_P (outer_type
))
13742 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13743 return tree_expr_nonnegative_warnv_p (op0
,
13744 strict_overflow_p
);
13745 if (INTEGRAL_TYPE_P (inner_type
))
13746 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13747 && TYPE_UNSIGNED (inner_type
);
13753 return tree_simple_nonnegative_warnv_p (code
, type
);
13756 /* We don't know sign of `t', so be conservative and return false. */
13760 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13761 value is based on the assumption that signed overflow is undefined,
13762 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13763 *STRICT_OVERFLOW_P. */
13766 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13767 tree op1
, bool *strict_overflow_p
)
13769 if (TYPE_UNSIGNED (type
))
13774 case POINTER_PLUS_EXPR
:
13776 if (FLOAT_TYPE_P (type
))
13777 return (tree_expr_nonnegative_warnv_p (op0
,
13779 && tree_expr_nonnegative_warnv_p (op1
,
13780 strict_overflow_p
));
13782 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13783 both unsigned and at least 2 bits shorter than the result. */
13784 if (TREE_CODE (type
) == INTEGER_TYPE
13785 && TREE_CODE (op0
) == NOP_EXPR
13786 && TREE_CODE (op1
) == NOP_EXPR
)
13788 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13789 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13790 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13791 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13793 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13794 TYPE_PRECISION (inner2
)) + 1;
13795 return prec
< TYPE_PRECISION (type
);
13801 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
13803 /* x * x is always non-negative for floating point x
13804 or without overflow. */
13805 if (operand_equal_p (op0
, op1
, 0)
13806 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
13807 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
13809 if (ANY_INTEGRAL_TYPE_P (type
)
13810 && TYPE_OVERFLOW_UNDEFINED (type
))
13811 *strict_overflow_p
= true;
13816 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13817 both unsigned and their total bits is shorter than the result. */
13818 if (TREE_CODE (type
) == INTEGER_TYPE
13819 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
13820 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
13822 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
13823 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
13825 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
13826 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
13829 bool unsigned0
= TYPE_UNSIGNED (inner0
);
13830 bool unsigned1
= TYPE_UNSIGNED (inner1
);
13832 if (TREE_CODE (op0
) == INTEGER_CST
)
13833 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
13835 if (TREE_CODE (op1
) == INTEGER_CST
)
13836 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
13838 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
13839 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
13841 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
13842 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
13843 : TYPE_PRECISION (inner0
);
13845 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
13846 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
13847 : TYPE_PRECISION (inner1
);
13849 return precision0
+ precision1
< TYPE_PRECISION (type
);
13856 return (tree_expr_nonnegative_warnv_p (op0
,
13858 || tree_expr_nonnegative_warnv_p (op1
,
13859 strict_overflow_p
));
13865 case TRUNC_DIV_EXPR
:
13866 case CEIL_DIV_EXPR
:
13867 case FLOOR_DIV_EXPR
:
13868 case ROUND_DIV_EXPR
:
13869 return (tree_expr_nonnegative_warnv_p (op0
,
13871 && tree_expr_nonnegative_warnv_p (op1
,
13872 strict_overflow_p
));
13874 case TRUNC_MOD_EXPR
:
13875 case CEIL_MOD_EXPR
:
13876 case FLOOR_MOD_EXPR
:
13877 case ROUND_MOD_EXPR
:
13878 return tree_expr_nonnegative_warnv_p (op0
,
13879 strict_overflow_p
);
13881 return tree_simple_nonnegative_warnv_p (code
, type
);
13884 /* We don't know sign of `t', so be conservative and return false. */
13888 /* Return true if T is known to be non-negative. If the return
13889 value is based on the assumption that signed overflow is undefined,
13890 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13891 *STRICT_OVERFLOW_P. */
13894 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13896 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13899 switch (TREE_CODE (t
))
13902 return tree_int_cst_sgn (t
) >= 0;
13905 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13908 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13911 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13913 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
13914 strict_overflow_p
));
13916 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
13919 /* We don't know sign of `t', so be conservative and return false. */
13923 /* Return true if T is known to be non-negative. If the return
13924 value is based on the assumption that signed overflow is undefined,
13925 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13926 *STRICT_OVERFLOW_P. */
13929 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
13930 tree arg0
, tree arg1
, bool *strict_overflow_p
)
13932 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
13933 switch (DECL_FUNCTION_CODE (fndecl
))
13935 CASE_FLT_FN (BUILT_IN_ACOS
):
13936 CASE_FLT_FN (BUILT_IN_ACOSH
):
13937 CASE_FLT_FN (BUILT_IN_CABS
):
13938 CASE_FLT_FN (BUILT_IN_COSH
):
13939 CASE_FLT_FN (BUILT_IN_ERFC
):
13940 CASE_FLT_FN (BUILT_IN_EXP
):
13941 CASE_FLT_FN (BUILT_IN_EXP10
):
13942 CASE_FLT_FN (BUILT_IN_EXP2
):
13943 CASE_FLT_FN (BUILT_IN_FABS
):
13944 CASE_FLT_FN (BUILT_IN_FDIM
):
13945 CASE_FLT_FN (BUILT_IN_HYPOT
):
13946 CASE_FLT_FN (BUILT_IN_POW10
):
13947 CASE_INT_FN (BUILT_IN_FFS
):
13948 CASE_INT_FN (BUILT_IN_PARITY
):
13949 CASE_INT_FN (BUILT_IN_POPCOUNT
):
13950 CASE_INT_FN (BUILT_IN_CLZ
):
13951 CASE_INT_FN (BUILT_IN_CLRSB
):
13952 case BUILT_IN_BSWAP32
:
13953 case BUILT_IN_BSWAP64
:
13957 CASE_FLT_FN (BUILT_IN_SQRT
):
13958 /* sqrt(-0.0) is -0.0. */
13959 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
13961 return tree_expr_nonnegative_warnv_p (arg0
,
13962 strict_overflow_p
);
13964 CASE_FLT_FN (BUILT_IN_ASINH
):
13965 CASE_FLT_FN (BUILT_IN_ATAN
):
13966 CASE_FLT_FN (BUILT_IN_ATANH
):
13967 CASE_FLT_FN (BUILT_IN_CBRT
):
13968 CASE_FLT_FN (BUILT_IN_CEIL
):
13969 CASE_FLT_FN (BUILT_IN_ERF
):
13970 CASE_FLT_FN (BUILT_IN_EXPM1
):
13971 CASE_FLT_FN (BUILT_IN_FLOOR
):
13972 CASE_FLT_FN (BUILT_IN_FMOD
):
13973 CASE_FLT_FN (BUILT_IN_FREXP
):
13974 CASE_FLT_FN (BUILT_IN_ICEIL
):
13975 CASE_FLT_FN (BUILT_IN_IFLOOR
):
13976 CASE_FLT_FN (BUILT_IN_IRINT
):
13977 CASE_FLT_FN (BUILT_IN_IROUND
):
13978 CASE_FLT_FN (BUILT_IN_LCEIL
):
13979 CASE_FLT_FN (BUILT_IN_LDEXP
):
13980 CASE_FLT_FN (BUILT_IN_LFLOOR
):
13981 CASE_FLT_FN (BUILT_IN_LLCEIL
):
13982 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
13983 CASE_FLT_FN (BUILT_IN_LLRINT
):
13984 CASE_FLT_FN (BUILT_IN_LLROUND
):
13985 CASE_FLT_FN (BUILT_IN_LRINT
):
13986 CASE_FLT_FN (BUILT_IN_LROUND
):
13987 CASE_FLT_FN (BUILT_IN_MODF
):
13988 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
13989 CASE_FLT_FN (BUILT_IN_RINT
):
13990 CASE_FLT_FN (BUILT_IN_ROUND
):
13991 CASE_FLT_FN (BUILT_IN_SCALB
):
13992 CASE_FLT_FN (BUILT_IN_SCALBLN
):
13993 CASE_FLT_FN (BUILT_IN_SCALBN
):
13994 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
13995 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
13996 CASE_FLT_FN (BUILT_IN_SINH
):
13997 CASE_FLT_FN (BUILT_IN_TANH
):
13998 CASE_FLT_FN (BUILT_IN_TRUNC
):
13999 /* True if the 1st argument is nonnegative. */
14000 return tree_expr_nonnegative_warnv_p (arg0
,
14001 strict_overflow_p
);
14003 CASE_FLT_FN (BUILT_IN_FMAX
):
14004 /* True if the 1st OR 2nd arguments are nonnegative. */
14005 return (tree_expr_nonnegative_warnv_p (arg0
,
14007 || (tree_expr_nonnegative_warnv_p (arg1
,
14008 strict_overflow_p
)));
14010 CASE_FLT_FN (BUILT_IN_FMIN
):
14011 /* True if the 1st AND 2nd arguments are nonnegative. */
14012 return (tree_expr_nonnegative_warnv_p (arg0
,
14014 && (tree_expr_nonnegative_warnv_p (arg1
,
14015 strict_overflow_p
)));
14017 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14018 /* True if the 2nd argument is nonnegative. */
14019 return tree_expr_nonnegative_warnv_p (arg1
,
14020 strict_overflow_p
);
14022 CASE_FLT_FN (BUILT_IN_POWI
):
14023 /* True if the 1st argument is nonnegative or the second
14024 argument is an even integer. */
14025 if (TREE_CODE (arg1
) == INTEGER_CST
14026 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14028 return tree_expr_nonnegative_warnv_p (arg0
,
14029 strict_overflow_p
);
14031 CASE_FLT_FN (BUILT_IN_POW
):
14032 /* True if the 1st argument is nonnegative or the second
14033 argument is an even integer valued real. */
14034 if (TREE_CODE (arg1
) == REAL_CST
)
14039 c
= TREE_REAL_CST (arg1
);
14040 n
= real_to_integer (&c
);
14043 REAL_VALUE_TYPE cint
;
14044 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14045 if (real_identical (&c
, &cint
))
14049 return tree_expr_nonnegative_warnv_p (arg0
,
14050 strict_overflow_p
);
14055 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14059 /* Return true if T is known to be non-negative. If the return
14060 value is based on the assumption that signed overflow is undefined,
14061 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14062 *STRICT_OVERFLOW_P. */
14065 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14067 enum tree_code code
= TREE_CODE (t
);
14068 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14075 tree temp
= TARGET_EXPR_SLOT (t
);
14076 t
= TARGET_EXPR_INITIAL (t
);
14078 /* If the initializer is non-void, then it's a normal expression
14079 that will be assigned to the slot. */
14080 if (!VOID_TYPE_P (t
))
14081 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14083 /* Otherwise, the initializer sets the slot in some way. One common
14084 way is an assignment statement at the end of the initializer. */
14087 if (TREE_CODE (t
) == BIND_EXPR
)
14088 t
= expr_last (BIND_EXPR_BODY (t
));
14089 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14090 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14091 t
= expr_last (TREE_OPERAND (t
, 0));
14092 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14097 if (TREE_CODE (t
) == MODIFY_EXPR
14098 && TREE_OPERAND (t
, 0) == temp
)
14099 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14100 strict_overflow_p
);
14107 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14108 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14110 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14111 get_callee_fndecl (t
),
14114 strict_overflow_p
);
14116 case COMPOUND_EXPR
:
14118 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14119 strict_overflow_p
);
14121 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14122 strict_overflow_p
);
14124 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14125 strict_overflow_p
);
14128 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14132 /* We don't know sign of `t', so be conservative and return false. */
14136 /* Return true if T is known to be non-negative. If the return
14137 value is based on the assumption that signed overflow is undefined,
14138 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14139 *STRICT_OVERFLOW_P. */
14142 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14144 enum tree_code code
;
14145 if (t
== error_mark_node
)
14148 code
= TREE_CODE (t
);
14149 switch (TREE_CODE_CLASS (code
))
14152 case tcc_comparison
:
14153 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14155 TREE_OPERAND (t
, 0),
14156 TREE_OPERAND (t
, 1),
14157 strict_overflow_p
);
14160 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14162 TREE_OPERAND (t
, 0),
14163 strict_overflow_p
);
14166 case tcc_declaration
:
14167 case tcc_reference
:
14168 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14176 case TRUTH_AND_EXPR
:
14177 case TRUTH_OR_EXPR
:
14178 case TRUTH_XOR_EXPR
:
14179 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14181 TREE_OPERAND (t
, 0),
14182 TREE_OPERAND (t
, 1),
14183 strict_overflow_p
);
14184 case TRUTH_NOT_EXPR
:
14185 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14187 TREE_OPERAND (t
, 0),
14188 strict_overflow_p
);
14195 case WITH_SIZE_EXPR
:
14197 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14200 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14204 /* Return true if `t' is known to be non-negative. Handle warnings
14205 about undefined signed overflow. */
14208 tree_expr_nonnegative_p (tree t
)
14210 bool ret
, strict_overflow_p
;
14212 strict_overflow_p
= false;
14213 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14214 if (strict_overflow_p
)
14215 fold_overflow_warning (("assuming signed overflow does not occur when "
14216 "determining that expression is always "
14218 WARN_STRICT_OVERFLOW_MISC
);
14223 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14224 For floating point we further ensure that T is not denormal.
14225 Similar logic is present in nonzero_address in rtlanal.h.
14227 If the return value is based on the assumption that signed overflow
14228 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14229 change *STRICT_OVERFLOW_P. */
14232 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14233 bool *strict_overflow_p
)
14238 return tree_expr_nonzero_warnv_p (op0
,
14239 strict_overflow_p
);
14243 tree inner_type
= TREE_TYPE (op0
);
14244 tree outer_type
= type
;
14246 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14247 && tree_expr_nonzero_warnv_p (op0
,
14248 strict_overflow_p
));
14252 case NON_LVALUE_EXPR
:
14253 return tree_expr_nonzero_warnv_p (op0
,
14254 strict_overflow_p
);
14263 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14264 For floating point we further ensure that T is not denormal.
14265 Similar logic is present in nonzero_address in rtlanal.h.
14267 If the return value is based on the assumption that signed overflow
14268 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14269 change *STRICT_OVERFLOW_P. */
14272 tree_binary_nonzero_warnv_p (enum tree_code code
,
14275 tree op1
, bool *strict_overflow_p
)
14277 bool sub_strict_overflow_p
;
14280 case POINTER_PLUS_EXPR
:
14282 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
14284 /* With the presence of negative values it is hard
14285 to say something. */
14286 sub_strict_overflow_p
= false;
14287 if (!tree_expr_nonnegative_warnv_p (op0
,
14288 &sub_strict_overflow_p
)
14289 || !tree_expr_nonnegative_warnv_p (op1
,
14290 &sub_strict_overflow_p
))
14292 /* One of operands must be positive and the other non-negative. */
14293 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14294 overflows, on a twos-complement machine the sum of two
14295 nonnegative numbers can never be zero. */
14296 return (tree_expr_nonzero_warnv_p (op0
,
14298 || tree_expr_nonzero_warnv_p (op1
,
14299 strict_overflow_p
));
14304 if (TYPE_OVERFLOW_UNDEFINED (type
))
14306 if (tree_expr_nonzero_warnv_p (op0
,
14308 && tree_expr_nonzero_warnv_p (op1
,
14309 strict_overflow_p
))
14311 *strict_overflow_p
= true;
14318 sub_strict_overflow_p
= false;
14319 if (tree_expr_nonzero_warnv_p (op0
,
14320 &sub_strict_overflow_p
)
14321 && tree_expr_nonzero_warnv_p (op1
,
14322 &sub_strict_overflow_p
))
14324 if (sub_strict_overflow_p
)
14325 *strict_overflow_p
= true;
14330 sub_strict_overflow_p
= false;
14331 if (tree_expr_nonzero_warnv_p (op0
,
14332 &sub_strict_overflow_p
))
14334 if (sub_strict_overflow_p
)
14335 *strict_overflow_p
= true;
14337 /* When both operands are nonzero, then MAX must be too. */
14338 if (tree_expr_nonzero_warnv_p (op1
,
14339 strict_overflow_p
))
14342 /* MAX where operand 0 is positive is positive. */
14343 return tree_expr_nonnegative_warnv_p (op0
,
14344 strict_overflow_p
);
14346 /* MAX where operand 1 is positive is positive. */
14347 else if (tree_expr_nonzero_warnv_p (op1
,
14348 &sub_strict_overflow_p
)
14349 && tree_expr_nonnegative_warnv_p (op1
,
14350 &sub_strict_overflow_p
))
14352 if (sub_strict_overflow_p
)
14353 *strict_overflow_p
= true;
14359 return (tree_expr_nonzero_warnv_p (op1
,
14361 || tree_expr_nonzero_warnv_p (op0
,
14362 strict_overflow_p
));
14371 /* Return true when T is an address and is known to be nonzero.
14372 For floating point we further ensure that T is not denormal.
14373 Similar logic is present in nonzero_address in rtlanal.h.
14375 If the return value is based on the assumption that signed overflow
14376 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14377 change *STRICT_OVERFLOW_P. */
14380 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14382 bool sub_strict_overflow_p
;
14383 switch (TREE_CODE (t
))
14386 return !integer_zerop (t
);
14390 tree base
= TREE_OPERAND (t
, 0);
14392 if (!DECL_P (base
))
14393 base
= get_base_address (base
);
14398 /* For objects in symbol table check if we know they are non-zero.
14399 Don't do anything for variables and functions before symtab is built;
14400 it is quite possible that they will be declared weak later. */
14401 if (DECL_P (base
) && decl_in_symtab_p (base
))
14403 struct symtab_node
*symbol
;
14405 symbol
= symtab_node::get_create (base
);
14407 return symbol
->nonzero_address ();
14412 /* Function local objects are never NULL. */
14414 && (DECL_CONTEXT (base
)
14415 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
14416 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
14419 /* Constants are never weak. */
14420 if (CONSTANT_CLASS_P (base
))
14427 sub_strict_overflow_p
= false;
14428 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14429 &sub_strict_overflow_p
)
14430 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14431 &sub_strict_overflow_p
))
14433 if (sub_strict_overflow_p
)
14434 *strict_overflow_p
= true;
14445 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14446 attempt to fold the expression to a constant without modifying TYPE,
14449 If the expression could be simplified to a constant, then return
14450 the constant. If the expression would not be simplified to a
14451 constant, then return NULL_TREE. */
14454 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14456 tree tem
= fold_binary (code
, type
, op0
, op1
);
14457 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14460 /* Given the components of a unary expression CODE, TYPE and OP0,
14461 attempt to fold the expression to a constant without modifying
14464 If the expression could be simplified to a constant, then return
14465 the constant. If the expression would not be simplified to a
14466 constant, then return NULL_TREE. */
14469 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14471 tree tem
= fold_unary (code
, type
, op0
);
14472 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14475 /* If EXP represents referencing an element in a constant string
14476 (either via pointer arithmetic or array indexing), return the
14477 tree representing the value accessed, otherwise return NULL. */
14480 fold_read_from_constant_string (tree exp
)
14482 if ((TREE_CODE (exp
) == INDIRECT_REF
14483 || TREE_CODE (exp
) == ARRAY_REF
)
14484 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14486 tree exp1
= TREE_OPERAND (exp
, 0);
14489 location_t loc
= EXPR_LOCATION (exp
);
14491 if (TREE_CODE (exp
) == INDIRECT_REF
)
14492 string
= string_constant (exp1
, &index
);
14495 tree low_bound
= array_ref_low_bound (exp
);
14496 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
14498 /* Optimize the special-case of a zero lower bound.
14500 We convert the low_bound to sizetype to avoid some problems
14501 with constant folding. (E.g. suppose the lower bound is 1,
14502 and its mode is QI. Without the conversion,l (ARRAY
14503 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14504 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14505 if (! integer_zerop (low_bound
))
14506 index
= size_diffop_loc (loc
, index
,
14507 fold_convert_loc (loc
, sizetype
, low_bound
));
14513 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14514 && TREE_CODE (string
) == STRING_CST
14515 && TREE_CODE (index
) == INTEGER_CST
14516 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14517 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14519 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14520 return build_int_cst_type (TREE_TYPE (exp
),
14521 (TREE_STRING_POINTER (string
)
14522 [TREE_INT_CST_LOW (index
)]));
14527 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14528 an integer constant, real, or fixed-point constant.
14530 TYPE is the type of the result. */
14533 fold_negate_const (tree arg0
, tree type
)
14535 tree t
= NULL_TREE
;
14537 switch (TREE_CODE (arg0
))
14542 wide_int val
= wi::neg (arg0
, &overflow
);
14543 t
= force_fit_type (type
, val
, 1,
14544 (overflow
| TREE_OVERFLOW (arg0
))
14545 && !TYPE_UNSIGNED (type
));
14550 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14555 FIXED_VALUE_TYPE f
;
14556 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14557 &(TREE_FIXED_CST (arg0
)), NULL
,
14558 TYPE_SATURATING (type
));
14559 t
= build_fixed (type
, f
);
14560 /* Propagate overflow flags. */
14561 if (overflow_p
| TREE_OVERFLOW (arg0
))
14562 TREE_OVERFLOW (t
) = 1;
14567 gcc_unreachable ();
14573 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14574 an integer constant or real constant.
14576 TYPE is the type of the result. */
14579 fold_abs_const (tree arg0
, tree type
)
14581 tree t
= NULL_TREE
;
14583 switch (TREE_CODE (arg0
))
14587 /* If the value is unsigned or non-negative, then the absolute value
14588 is the same as the ordinary value. */
14589 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
14592 /* If the value is negative, then the absolute value is
14597 wide_int val
= wi::neg (arg0
, &overflow
);
14598 t
= force_fit_type (type
, val
, -1,
14599 overflow
| TREE_OVERFLOW (arg0
));
14605 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14606 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14612 gcc_unreachable ();
14618 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14619 constant. TYPE is the type of the result. */
14622 fold_not_const (const_tree arg0
, tree type
)
14624 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14626 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
14629 /* Given CODE, a relational operator, the target type, TYPE and two
14630 constant operands OP0 and OP1, return the result of the
14631 relational operation. If the result is not a compile time
14632 constant, then return NULL_TREE. */
14635 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14637 int result
, invert
;
14639 /* From here on, the only cases we handle are when the result is
14640 known to be a constant. */
14642 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14644 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14645 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14647 /* Handle the cases where either operand is a NaN. */
14648 if (real_isnan (c0
) || real_isnan (c1
))
14658 case UNORDERED_EXPR
:
14672 if (flag_trapping_math
)
14678 gcc_unreachable ();
14681 return constant_boolean_node (result
, type
);
14684 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14687 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14689 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14690 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14691 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14694 /* Handle equality/inequality of complex constants. */
14695 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14697 tree rcond
= fold_relational_const (code
, type
,
14698 TREE_REALPART (op0
),
14699 TREE_REALPART (op1
));
14700 tree icond
= fold_relational_const (code
, type
,
14701 TREE_IMAGPART (op0
),
14702 TREE_IMAGPART (op1
));
14703 if (code
== EQ_EXPR
)
14704 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14705 else if (code
== NE_EXPR
)
14706 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14711 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
14713 unsigned count
= VECTOR_CST_NELTS (op0
);
14714 tree
*elts
= XALLOCAVEC (tree
, count
);
14715 gcc_assert (VECTOR_CST_NELTS (op1
) == count
14716 && TYPE_VECTOR_SUBPARTS (type
) == count
);
14718 for (unsigned i
= 0; i
< count
; i
++)
14720 tree elem_type
= TREE_TYPE (type
);
14721 tree elem0
= VECTOR_CST_ELT (op0
, i
);
14722 tree elem1
= VECTOR_CST_ELT (op1
, i
);
14724 tree tem
= fold_relational_const (code
, elem_type
,
14727 if (tem
== NULL_TREE
)
14730 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
14733 return build_vector (type
, elts
);
14736 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14738 To compute GT, swap the arguments and do LT.
14739 To compute GE, do LT and invert the result.
14740 To compute LE, swap the arguments, do LT and invert the result.
14741 To compute NE, do EQ and invert the result.
14743 Therefore, the code below must handle only EQ and LT. */
14745 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14747 std::swap (op0
, op1
);
14748 code
= swap_tree_comparison (code
);
14751 /* Note that it is safe to invert for real values here because we
14752 have already handled the one case that it matters. */
14755 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14758 code
= invert_tree_comparison (code
, false);
14761 /* Compute a result for LT or EQ if args permit;
14762 Otherwise return T. */
14763 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14765 if (code
== EQ_EXPR
)
14766 result
= tree_int_cst_equal (op0
, op1
);
14768 result
= tree_int_cst_lt (op0
, op1
);
14775 return constant_boolean_node (result
, type
);
14778 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14779 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14783 fold_build_cleanup_point_expr (tree type
, tree expr
)
14785 /* If the expression does not have side effects then we don't have to wrap
14786 it with a cleanup point expression. */
14787 if (!TREE_SIDE_EFFECTS (expr
))
14790 /* If the expression is a return, check to see if the expression inside the
14791 return has no side effects or the right hand side of the modify expression
14792 inside the return. If either don't have side effects set we don't need to
14793 wrap the expression in a cleanup point expression. Note we don't check the
14794 left hand side of the modify because it should always be a return decl. */
14795 if (TREE_CODE (expr
) == RETURN_EXPR
)
14797 tree op
= TREE_OPERAND (expr
, 0);
14798 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14800 op
= TREE_OPERAND (op
, 1);
14801 if (!TREE_SIDE_EFFECTS (op
))
14805 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14808 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14809 of an indirection through OP0, or NULL_TREE if no simplification is
14813 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
14819 subtype
= TREE_TYPE (sub
);
14820 if (!POINTER_TYPE_P (subtype
))
14823 if (TREE_CODE (sub
) == ADDR_EXPR
)
14825 tree op
= TREE_OPERAND (sub
, 0);
14826 tree optype
= TREE_TYPE (op
);
14827 /* *&CONST_DECL -> to the value of the const decl. */
14828 if (TREE_CODE (op
) == CONST_DECL
)
14829 return DECL_INITIAL (op
);
14830 /* *&p => p; make sure to handle *&"str"[cst] here. */
14831 if (type
== optype
)
14833 tree fop
= fold_read_from_constant_string (op
);
14839 /* *(foo *)&fooarray => fooarray[0] */
14840 else if (TREE_CODE (optype
) == ARRAY_TYPE
14841 && type
== TREE_TYPE (optype
)
14842 && (!in_gimple_form
14843 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14845 tree type_domain
= TYPE_DOMAIN (optype
);
14846 tree min_val
= size_zero_node
;
14847 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14848 min_val
= TYPE_MIN_VALUE (type_domain
);
14850 && TREE_CODE (min_val
) != INTEGER_CST
)
14852 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14853 NULL_TREE
, NULL_TREE
);
14855 /* *(foo *)&complexfoo => __real__ complexfoo */
14856 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14857 && type
== TREE_TYPE (optype
))
14858 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14859 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14860 else if (TREE_CODE (optype
) == VECTOR_TYPE
14861 && type
== TREE_TYPE (optype
))
14863 tree part_width
= TYPE_SIZE (type
);
14864 tree index
= bitsize_int (0);
14865 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14869 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14870 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14872 tree op00
= TREE_OPERAND (sub
, 0);
14873 tree op01
= TREE_OPERAND (sub
, 1);
14876 if (TREE_CODE (op00
) == ADDR_EXPR
)
14879 op00
= TREE_OPERAND (op00
, 0);
14880 op00type
= TREE_TYPE (op00
);
14882 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14883 if (TREE_CODE (op00type
) == VECTOR_TYPE
14884 && type
== TREE_TYPE (op00type
))
14886 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
14887 tree part_width
= TYPE_SIZE (type
);
14888 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
14889 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14890 tree index
= bitsize_int (indexi
);
14892 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
14893 return fold_build3_loc (loc
,
14894 BIT_FIELD_REF
, type
, op00
,
14895 part_width
, index
);
14898 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14899 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14900 && type
== TREE_TYPE (op00type
))
14902 tree size
= TYPE_SIZE_UNIT (type
);
14903 if (tree_int_cst_equal (size
, op01
))
14904 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14906 /* ((foo *)&fooarray)[1] => fooarray[1] */
14907 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14908 && type
== TREE_TYPE (op00type
))
14910 tree type_domain
= TYPE_DOMAIN (op00type
);
14911 tree min_val
= size_zero_node
;
14912 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14913 min_val
= TYPE_MIN_VALUE (type_domain
);
14914 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14915 TYPE_SIZE_UNIT (type
));
14916 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14917 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14918 NULL_TREE
, NULL_TREE
);
14923 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14924 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14925 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14926 && (!in_gimple_form
14927 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14930 tree min_val
= size_zero_node
;
14931 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14932 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14933 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14934 min_val
= TYPE_MIN_VALUE (type_domain
);
14936 && TREE_CODE (min_val
) != INTEGER_CST
)
14938 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14945 /* Builds an expression for an indirection through T, simplifying some
14949 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14951 tree type
= TREE_TYPE (TREE_TYPE (t
));
14952 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14957 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14960 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14963 fold_indirect_ref_loc (location_t loc
, tree t
)
14965 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14973 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14974 whose result is ignored. The type of the returned tree need not be
14975 the same as the original expression. */
14978 fold_ignored_result (tree t
)
14980 if (!TREE_SIDE_EFFECTS (t
))
14981 return integer_zero_node
;
14984 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14987 t
= TREE_OPERAND (t
, 0);
14991 case tcc_comparison
:
14992 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14993 t
= TREE_OPERAND (t
, 0);
14994 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14995 t
= TREE_OPERAND (t
, 1);
15000 case tcc_expression
:
15001 switch (TREE_CODE (t
))
15003 case COMPOUND_EXPR
:
15004 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15006 t
= TREE_OPERAND (t
, 0);
15010 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15011 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15013 t
= TREE_OPERAND (t
, 0);
15026 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15029 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15031 tree div
= NULL_TREE
;
15036 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15037 have to do anything. Only do this when we are not given a const,
15038 because in that case, this check is more expensive than just
15040 if (TREE_CODE (value
) != INTEGER_CST
)
15042 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15044 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15048 /* If divisor is a power of two, simplify this to bit manipulation. */
15049 if (divisor
== (divisor
& -divisor
))
15051 if (TREE_CODE (value
) == INTEGER_CST
)
15053 wide_int val
= value
;
15056 if ((val
& (divisor
- 1)) == 0)
15059 overflow_p
= TREE_OVERFLOW (value
);
15060 val
+= divisor
- 1;
15061 val
&= - (int) divisor
;
15065 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
15071 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15072 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15073 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
15074 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15080 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15081 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15082 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15088 /* Likewise, but round down. */
15091 round_down_loc (location_t loc
, tree value
, int divisor
)
15093 tree div
= NULL_TREE
;
15095 gcc_assert (divisor
> 0);
15099 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15100 have to do anything. Only do this when we are not given a const,
15101 because in that case, this check is more expensive than just
15103 if (TREE_CODE (value
) != INTEGER_CST
)
15105 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15107 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15111 /* If divisor is a power of two, simplify this to bit manipulation. */
15112 if (divisor
== (divisor
& -divisor
))
15116 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15117 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15122 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15123 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15124 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15130 /* Returns the pointer to the base of the object addressed by EXP and
15131 extracts the information about the offset of the access, storing it
15132 to PBITPOS and POFFSET. */
15135 split_address_to_core_and_offset (tree exp
,
15136 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15140 int unsignedp
, volatilep
;
15141 HOST_WIDE_INT bitsize
;
15142 location_t loc
= EXPR_LOCATION (exp
);
15144 if (TREE_CODE (exp
) == ADDR_EXPR
)
15146 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15147 poffset
, &mode
, &unsignedp
, &volatilep
,
15149 core
= build_fold_addr_expr_loc (loc
, core
);
15155 *poffset
= NULL_TREE
;
15161 /* Returns true if addresses of E1 and E2 differ by a constant, false
15162 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15165 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15168 HOST_WIDE_INT bitpos1
, bitpos2
;
15169 tree toffset1
, toffset2
, tdiff
, type
;
15171 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15172 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15174 if (bitpos1
% BITS_PER_UNIT
!= 0
15175 || bitpos2
% BITS_PER_UNIT
!= 0
15176 || !operand_equal_p (core1
, core2
, 0))
15179 if (toffset1
&& toffset2
)
15181 type
= TREE_TYPE (toffset1
);
15182 if (type
!= TREE_TYPE (toffset2
))
15183 toffset2
= fold_convert (type
, toffset2
);
15185 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15186 if (!cst_and_fits_in_hwi (tdiff
))
15189 *diff
= int_cst_value (tdiff
);
15191 else if (toffset1
|| toffset2
)
15193 /* If only one of the offsets is non-constant, the difference cannot
15200 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15204 /* Simplify the floating point expression EXP when the sign of the
15205 result is not significant. Return NULL_TREE if no simplification
15209 fold_strip_sign_ops (tree exp
)
15212 location_t loc
= EXPR_LOCATION (exp
);
15214 switch (TREE_CODE (exp
))
15218 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15219 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15223 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
15225 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15226 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15227 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15228 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
15229 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15230 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15233 case COMPOUND_EXPR
:
15234 arg0
= TREE_OPERAND (exp
, 0);
15235 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15237 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15241 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15242 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15244 return fold_build3_loc (loc
,
15245 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15246 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15247 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15252 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15255 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15256 /* Strip copysign function call, return the 1st argument. */
15257 arg0
= CALL_EXPR_ARG (exp
, 0);
15258 arg1
= CALL_EXPR_ARG (exp
, 1);
15259 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
15262 /* Strip sign ops from the argument of "odd" math functions. */
15263 if (negate_mathfn_p (fcode
))
15265 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15267 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);
15280 /* Return OFF converted to a pointer offset type suitable as offset for
15281 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15283 convert_to_ptrofftype_loc (location_t loc
, tree off
)
15285 return fold_convert_loc (loc
, sizetype
, off
);
15288 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15290 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
15292 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15293 ptr
, convert_to_ptrofftype_loc (loc
, off
));
15296 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15298 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
15300 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15301 ptr
, size_int (off
));