1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tree-iterator.h"
58 #include "insn-config.h"
68 #include "diagnostic-core.h"
70 #include "langhooks.h"
72 #include "internal-fn.h"
78 #include "generic-match.h"
81 #ifndef LOAD_EXTEND_OP
82 #define LOAD_EXTEND_OP(M) UNKNOWN
85 /* Nonzero if we are folding constants inside an initializer; zero
87 int folding_initializer
= 0;
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code
{
111 static bool negate_mathfn_p (enum built_in_function
);
112 static bool negate_expr_p (tree
);
113 static tree
negate_expr (tree
);
114 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
115 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
116 static enum comparison_code
comparison_to_compcode (enum tree_code
);
117 static enum tree_code
compcode_to_comparison (enum comparison_code
);
118 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
119 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
120 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
121 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
122 static tree
make_bit_field_ref (location_t
, tree
, tree
,
123 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
124 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
126 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
128 machine_mode
*, int *, int *,
130 static int simple_operand_p (const_tree
);
131 static bool simple_operand_p_2 (tree
);
132 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
133 static tree
range_predecessor (tree
);
134 static tree
range_successor (tree
);
135 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
136 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
137 static tree
unextend (tree
, int, int, tree
);
138 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
140 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
141 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
142 static tree
fold_binary_op_with_conditional_arg (location_t
,
143 enum tree_code
, tree
,
146 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
147 static bool reorder_operands_p (const_tree
, const_tree
);
148 static tree
fold_negate_const (tree
, tree
);
149 static tree
fold_not_const (const_tree
, tree
);
150 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
151 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
152 static tree
fold_view_convert_expr (tree
, tree
);
153 static bool vec_cst_ctor_to_array (tree
, tree
*);
156 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
157 Otherwise, return LOC. */
160 expr_location_or (tree t
, location_t loc
)
162 location_t tloc
= EXPR_LOCATION (t
);
163 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
166 /* Similar to protected_set_expr_location, but never modify x in place,
167 if location can and needs to be set, unshare it. */
170 protected_set_expr_location_unshare (tree x
, location_t loc
)
172 if (CAN_HAVE_LOCATION_P (x
)
173 && EXPR_LOCATION (x
) != loc
174 && !(TREE_CODE (x
) == SAVE_EXPR
175 || TREE_CODE (x
) == TARGET_EXPR
176 || TREE_CODE (x
) == BIND_EXPR
))
179 SET_EXPR_LOCATION (x
, loc
);
184 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
185 division and returns the quotient. Otherwise returns
189 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
193 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
195 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
209 static int fold_deferring_overflow_warnings
;
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
216 static const char* fold_deferred_overflow_warning
;
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
221 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
227 fold_defer_overflow_warnings (void)
229 ++fold_deferring_overflow_warnings
;
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
242 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
247 gcc_assert (fold_deferring_overflow_warnings
> 0);
248 --fold_deferring_overflow_warnings
;
249 if (fold_deferring_overflow_warnings
> 0)
251 if (fold_deferred_overflow_warning
!= NULL
253 && code
< (int) fold_deferred_overflow_code
)
254 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
258 warnmsg
= fold_deferred_overflow_warning
;
259 fold_deferred_overflow_warning
= NULL
;
261 if (!issue
|| warnmsg
== NULL
)
264 if (gimple_no_warning_p (stmt
))
267 /* Use the smallest code level when deciding to issue the
269 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
270 code
= fold_deferred_overflow_code
;
272 if (!issue_strict_overflow_warning (code
))
276 locus
= input_location
;
278 locus
= gimple_location (stmt
);
279 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
282 /* Stop deferring overflow warnings, ignoring any deferred
286 fold_undefer_and_ignore_overflow_warnings (void)
288 fold_undefer_overflow_warnings (false, NULL
, 0);
291 /* Whether we are deferring overflow warnings. */
294 fold_deferring_overflow_warnings_p (void)
296 return fold_deferring_overflow_warnings
> 0;
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
303 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
305 if (fold_deferring_overflow_warnings
> 0)
307 if (fold_deferred_overflow_warning
== NULL
308 || wc
< fold_deferred_overflow_code
)
310 fold_deferred_overflow_warning
= gmsgid
;
311 fold_deferred_overflow_code
= wc
;
314 else if (issue_strict_overflow_warning (wc
))
315 warning (OPT_Wstrict_overflow
, gmsgid
);
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
322 negate_mathfn_p (enum built_in_function code
)
326 CASE_FLT_FN (BUILT_IN_ASIN
):
327 CASE_FLT_FN (BUILT_IN_ASINH
):
328 CASE_FLT_FN (BUILT_IN_ATAN
):
329 CASE_FLT_FN (BUILT_IN_ATANH
):
330 CASE_FLT_FN (BUILT_IN_CASIN
):
331 CASE_FLT_FN (BUILT_IN_CASINH
):
332 CASE_FLT_FN (BUILT_IN_CATAN
):
333 CASE_FLT_FN (BUILT_IN_CATANH
):
334 CASE_FLT_FN (BUILT_IN_CBRT
):
335 CASE_FLT_FN (BUILT_IN_CPROJ
):
336 CASE_FLT_FN (BUILT_IN_CSIN
):
337 CASE_FLT_FN (BUILT_IN_CSINH
):
338 CASE_FLT_FN (BUILT_IN_CTAN
):
339 CASE_FLT_FN (BUILT_IN_CTANH
):
340 CASE_FLT_FN (BUILT_IN_ERF
):
341 CASE_FLT_FN (BUILT_IN_LLROUND
):
342 CASE_FLT_FN (BUILT_IN_LROUND
):
343 CASE_FLT_FN (BUILT_IN_ROUND
):
344 CASE_FLT_FN (BUILT_IN_SIN
):
345 CASE_FLT_FN (BUILT_IN_SINH
):
346 CASE_FLT_FN (BUILT_IN_TAN
):
347 CASE_FLT_FN (BUILT_IN_TANH
):
348 CASE_FLT_FN (BUILT_IN_TRUNC
):
351 CASE_FLT_FN (BUILT_IN_LLRINT
):
352 CASE_FLT_FN (BUILT_IN_LRINT
):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
354 CASE_FLT_FN (BUILT_IN_RINT
):
355 return !flag_rounding_math
;
363 /* Check whether we may negate an integer constant T without causing
367 may_negate_without_overflow_p (const_tree t
)
371 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
373 type
= TREE_TYPE (t
);
374 if (TYPE_UNSIGNED (type
))
377 return !wi::only_sign_bit_p (t
);
380 /* Determine whether an expression T can be cheaply negated using
381 the function negate_expr without introducing undefined overflow. */
384 negate_expr_p (tree t
)
391 type
= TREE_TYPE (t
);
394 switch (TREE_CODE (t
))
397 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
400 /* Check that -CST will not overflow type. */
401 return may_negate_without_overflow_p (t
);
403 return (INTEGRAL_TYPE_P (type
)
404 && TYPE_OVERFLOW_WRAPS (type
));
410 return !TYPE_OVERFLOW_SANITIZED (type
);
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
418 return negate_expr_p (TREE_REALPART (t
))
419 && negate_expr_p (TREE_IMAGPART (t
));
423 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
426 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
428 for (i
= 0; i
< count
; i
++)
429 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
436 return negate_expr_p (TREE_OPERAND (t
, 0))
437 && negate_expr_p (TREE_OPERAND (t
, 1));
440 return negate_expr_p (TREE_OPERAND (t
, 0));
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
444 || HONOR_SIGNED_ZEROS (element_mode (type
)))
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t
, 1))
448 && reorder_operands_p (TREE_OPERAND (t
, 0),
449 TREE_OPERAND (t
, 1)))
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t
, 0));
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
457 && !HONOR_SIGNED_ZEROS (element_mode (type
))
458 && reorder_operands_p (TREE_OPERAND (t
, 0),
459 TREE_OPERAND (t
, 1));
462 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
469 return negate_expr_p (TREE_OPERAND (t
, 1))
470 || negate_expr_p (TREE_OPERAND (t
, 0));
476 /* In general we can't negate A / B, because if A is INT_MIN and
477 B is 1, we may turn this into INT_MIN / -1 which is undefined
478 and actually traps on some architectures. But if overflow is
479 undefined, we can negate, because - (INT_MIN / 1) is an
481 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
483 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
485 /* If overflow is undefined then we have to be careful because
486 we ask whether it's ok to associate the negate with the
487 division which is not ok for example for
488 -((a - b) / c) where (-(a - b)) / c may invoke undefined
489 overflow because of negating INT_MIN. So do not use
490 negate_expr_p here but open-code the two important cases. */
491 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
492 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
493 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
496 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
498 return negate_expr_p (TREE_OPERAND (t
, 1));
501 /* Negate -((double)float) as (double)(-float). */
502 if (TREE_CODE (type
) == REAL_TYPE
)
504 tree tem
= strip_float_extensions (t
);
506 return negate_expr_p (tem
);
511 /* Negate -f(x) as f(-x). */
512 if (negate_mathfn_p (builtin_mathfn_code (t
)))
513 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
517 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
518 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
520 tree op1
= TREE_OPERAND (t
, 1);
521 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
538 fold_negate_expr (location_t loc
, tree t
)
540 tree type
= TREE_TYPE (t
);
543 switch (TREE_CODE (t
))
545 /* Convert - (~A) to A + 1. */
547 if (INTEGRAL_TYPE_P (type
))
548 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
549 build_one_cst (type
));
553 tem
= fold_negate_const (t
, type
);
554 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
555 || (ANY_INTEGRAL_TYPE_P (type
)
556 && !TYPE_OVERFLOW_TRAPS (type
)
557 && TYPE_OVERFLOW_WRAPS (type
))
558 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
563 tem
= fold_negate_const (t
, type
);
567 tem
= fold_negate_const (t
, type
);
572 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
573 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
575 return build_complex (type
, rpart
, ipart
);
581 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
582 tree
*elts
= XALLOCAVEC (tree
, count
);
584 for (i
= 0; i
< count
; i
++)
586 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
587 if (elts
[i
] == NULL_TREE
)
591 return build_vector (type
, elts
);
595 if (negate_expr_p (t
))
596 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
597 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
598 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
602 if (negate_expr_p (t
))
603 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
604 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
608 if (!TYPE_OVERFLOW_SANITIZED (type
))
609 return TREE_OPERAND (t
, 0);
613 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
614 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
616 /* -(A + B) -> (-B) - A. */
617 if (negate_expr_p (TREE_OPERAND (t
, 1))
618 && reorder_operands_p (TREE_OPERAND (t
, 0),
619 TREE_OPERAND (t
, 1)))
621 tem
= negate_expr (TREE_OPERAND (t
, 1));
622 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
623 tem
, TREE_OPERAND (t
, 0));
626 /* -(A + B) -> (-A) - B. */
627 if (negate_expr_p (TREE_OPERAND (t
, 0)))
629 tem
= negate_expr (TREE_OPERAND (t
, 0));
630 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
631 tem
, TREE_OPERAND (t
, 1));
637 /* - (A - B) -> B - A */
638 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
639 && !HONOR_SIGNED_ZEROS (element_mode (type
))
640 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
641 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
642 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
646 if (TYPE_UNSIGNED (type
))
652 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
654 tem
= TREE_OPERAND (t
, 1);
655 if (negate_expr_p (tem
))
656 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
657 TREE_OPERAND (t
, 0), negate_expr (tem
));
658 tem
= TREE_OPERAND (t
, 0);
659 if (negate_expr_p (tem
))
660 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
661 negate_expr (tem
), TREE_OPERAND (t
, 1));
668 /* In general we can't negate A / B, because if A is INT_MIN and
669 B is 1, we may turn this into INT_MIN / -1 which is undefined
670 and actually traps on some architectures. But if overflow is
671 undefined, we can negate, because - (INT_MIN / 1) is an
673 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
675 const char * const warnmsg
= G_("assuming signed overflow does not "
676 "occur when negating a division");
677 tem
= TREE_OPERAND (t
, 1);
678 if (negate_expr_p (tem
))
680 if (INTEGRAL_TYPE_P (type
)
681 && (TREE_CODE (tem
) != INTEGER_CST
682 || integer_onep (tem
)))
683 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
684 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
685 TREE_OPERAND (t
, 0), negate_expr (tem
));
687 /* If overflow is undefined then we have to be careful because
688 we ask whether it's ok to associate the negate with the
689 division which is not ok for example for
690 -((a - b) / c) where (-(a - b)) / c may invoke undefined
691 overflow because of negating INT_MIN. So do not use
692 negate_expr_p here but open-code the two important cases. */
693 tem
= TREE_OPERAND (t
, 0);
694 if ((INTEGRAL_TYPE_P (type
)
695 && (TREE_CODE (tem
) == NEGATE_EXPR
696 || (TREE_CODE (tem
) == INTEGER_CST
697 && may_negate_without_overflow_p (tem
))))
698 || !INTEGRAL_TYPE_P (type
))
699 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
700 negate_expr (tem
), TREE_OPERAND (t
, 1));
705 /* Convert -((double)float) into (double)(-float). */
706 if (TREE_CODE (type
) == REAL_TYPE
)
708 tem
= strip_float_extensions (t
);
709 if (tem
!= t
&& negate_expr_p (tem
))
710 return fold_convert_loc (loc
, type
, negate_expr (tem
));
715 /* Negate -f(x) as f(-x). */
716 if (negate_mathfn_p (builtin_mathfn_code (t
))
717 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
721 fndecl
= get_callee_fndecl (t
);
722 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
723 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
728 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
729 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
731 tree op1
= TREE_OPERAND (t
, 1);
732 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
734 tree ntype
= TYPE_UNSIGNED (type
)
735 ? signed_type_for (type
)
736 : unsigned_type_for (type
);
737 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
738 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
739 return fold_convert_loc (loc
, type
, temp
);
751 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
752 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
764 loc
= EXPR_LOCATION (t
);
765 type
= TREE_TYPE (t
);
768 tem
= fold_negate_expr (loc
, t
);
770 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
771 return fold_convert_loc (loc
, type
, tem
);
774 /* Split a tree IN into a constant, literal and variable parts that could be
775 combined with CODE to make IN. "constant" means an expression with
776 TREE_CONSTANT but that isn't an actual constant. CODE must be a
777 commutative arithmetic operation. Store the constant part into *CONP,
778 the literal in *LITP and return the variable part. If a part isn't
779 present, set it to null. If the tree does not decompose in this way,
780 return the entire tree as the variable part and the other parts as null.
782 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
783 case, we negate an operand that was subtracted. Except if it is a
784 literal for which we use *MINUS_LITP instead.
786 If NEGATE_P is true, we are negating all of IN, again except a literal
787 for which we use *MINUS_LITP instead.
789 If IN is itself a literal or constant, return it as appropriate.
791 Note that we do not guarantee that any of the three values will be the
792 same type as IN, but they will have the same signedness and mode. */
795 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
796 tree
*minus_litp
, int negate_p
)
804 /* Strip any conversions that don't change the machine mode or signedness. */
805 STRIP_SIGN_NOPS (in
);
807 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
808 || TREE_CODE (in
) == FIXED_CST
)
810 else if (TREE_CODE (in
) == code
811 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
812 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
813 /* We can associate addition and subtraction together (even
814 though the C standard doesn't say so) for integers because
815 the value is not affected. For reals, the value might be
816 affected, so we can't. */
817 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
818 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
820 tree op0
= TREE_OPERAND (in
, 0);
821 tree op1
= TREE_OPERAND (in
, 1);
822 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
823 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
825 /* First see if either of the operands is a literal, then a constant. */
826 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
827 || TREE_CODE (op0
) == FIXED_CST
)
828 *litp
= op0
, op0
= 0;
829 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
830 || TREE_CODE (op1
) == FIXED_CST
)
831 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
833 if (op0
!= 0 && TREE_CONSTANT (op0
))
834 *conp
= op0
, op0
= 0;
835 else if (op1
!= 0 && TREE_CONSTANT (op1
))
836 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
838 /* If we haven't dealt with either operand, this is not a case we can
839 decompose. Otherwise, VAR is either of the ones remaining, if any. */
840 if (op0
!= 0 && op1
!= 0)
845 var
= op1
, neg_var_p
= neg1_p
;
847 /* Now do any needed negations. */
849 *minus_litp
= *litp
, *litp
= 0;
851 *conp
= negate_expr (*conp
);
853 var
= negate_expr (var
);
855 else if (TREE_CODE (in
) == BIT_NOT_EXPR
856 && code
== PLUS_EXPR
)
858 /* -X - 1 is folded to ~X, undo that here. */
859 *minus_litp
= build_one_cst (TREE_TYPE (in
));
860 var
= negate_expr (TREE_OPERAND (in
, 0));
862 else if (TREE_CONSTANT (in
))
870 *minus_litp
= *litp
, *litp
= 0;
871 else if (*minus_litp
)
872 *litp
= *minus_litp
, *minus_litp
= 0;
873 *conp
= negate_expr (*conp
);
874 var
= negate_expr (var
);
880 /* Re-associate trees split by the above function. T1 and T2 are
881 either expressions to associate or null. Return the new
882 expression, if any. LOC is the location of the new expression. If
883 we build an operation, do it in TYPE and with CODE. */
886 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
893 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
894 try to fold this since we will have infinite recursion. But do
895 deal with any NEGATE_EXPRs. */
896 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
897 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
899 if (code
== PLUS_EXPR
)
901 if (TREE_CODE (t1
) == NEGATE_EXPR
)
902 return build2_loc (loc
, MINUS_EXPR
, type
,
903 fold_convert_loc (loc
, type
, t2
),
904 fold_convert_loc (loc
, type
,
905 TREE_OPERAND (t1
, 0)));
906 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
907 return build2_loc (loc
, MINUS_EXPR
, type
,
908 fold_convert_loc (loc
, type
, t1
),
909 fold_convert_loc (loc
, type
,
910 TREE_OPERAND (t2
, 0)));
911 else if (integer_zerop (t2
))
912 return fold_convert_loc (loc
, type
, t1
);
914 else if (code
== MINUS_EXPR
)
916 if (integer_zerop (t2
))
917 return fold_convert_loc (loc
, type
, t1
);
920 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
921 fold_convert_loc (loc
, type
, t2
));
924 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
925 fold_convert_loc (loc
, type
, t2
));
928 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
929 for use in int_const_binop, size_binop and size_diffop. */
932 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
934 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
936 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
951 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
952 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
953 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
957 /* Combine two integer constants ARG1 and ARG2 under operation CODE
958 to produce a new constant. Return NULL_TREE if we don't know how
959 to evaluate CODE at compile-time. */
962 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
967 tree type
= TREE_TYPE (arg1
);
968 signop sign
= TYPE_SIGN (type
);
969 bool overflow
= false;
971 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
972 TYPE_SIGN (TREE_TYPE (parg2
)));
977 res
= wi::bit_or (arg1
, arg2
);
981 res
= wi::bit_xor (arg1
, arg2
);
985 res
= wi::bit_and (arg1
, arg2
);
990 if (wi::neg_p (arg2
))
993 if (code
== RSHIFT_EXPR
)
999 if (code
== RSHIFT_EXPR
)
1000 /* It's unclear from the C standard whether shifts can overflow.
1001 The following code ignores overflow; perhaps a C standard
1002 interpretation ruling is needed. */
1003 res
= wi::rshift (arg1
, arg2
, sign
);
1005 res
= wi::lshift (arg1
, arg2
);
1010 if (wi::neg_p (arg2
))
1013 if (code
== RROTATE_EXPR
)
1014 code
= LROTATE_EXPR
;
1016 code
= RROTATE_EXPR
;
1019 if (code
== RROTATE_EXPR
)
1020 res
= wi::rrotate (arg1
, arg2
);
1022 res
= wi::lrotate (arg1
, arg2
);
1026 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1030 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1034 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1037 case MULT_HIGHPART_EXPR
:
1038 res
= wi::mul_high (arg1
, arg2
, sign
);
1041 case TRUNC_DIV_EXPR
:
1042 case EXACT_DIV_EXPR
:
1045 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1048 case FLOOR_DIV_EXPR
:
1051 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1057 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1060 case ROUND_DIV_EXPR
:
1063 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1066 case TRUNC_MOD_EXPR
:
1069 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1072 case FLOOR_MOD_EXPR
:
1075 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1081 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1084 case ROUND_MOD_EXPR
:
1087 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1091 res
= wi::min (arg1
, arg2
, sign
);
1095 res
= wi::max (arg1
, arg2
, sign
);
1102 t
= force_fit_type (type
, res
, overflowable
,
1103 (((sign
== SIGNED
|| overflowable
== -1)
1105 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1111 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1113 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1116 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1117 constant. We assume ARG1 and ARG2 have the same data type, or at least
1118 are the same kind of constant and the same machine mode. Return zero if
1119 combining the constants is not allowed in the current operating mode. */
1122 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1124 /* Sanity check for the recursive cases. */
1131 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1133 if (code
== POINTER_PLUS_EXPR
)
1134 return int_const_binop (PLUS_EXPR
,
1135 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1137 return int_const_binop (code
, arg1
, arg2
);
1140 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1145 REAL_VALUE_TYPE value
;
1146 REAL_VALUE_TYPE result
;
1150 /* The following codes are handled by real_arithmetic. */
1165 d1
= TREE_REAL_CST (arg1
);
1166 d2
= TREE_REAL_CST (arg2
);
1168 type
= TREE_TYPE (arg1
);
1169 mode
= TYPE_MODE (type
);
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode
)
1174 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code
== RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2
, dconst0
)
1181 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1
))
1188 else if (REAL_VALUE_ISNAN (d2
))
1191 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1192 real_convert (&result
, mode
, &value
);
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode
)
1198 && REAL_VALUE_ISINF (result
)
1199 && !REAL_VALUE_ISINF (d1
)
1200 && !REAL_VALUE_ISINF (d2
))
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1209 && (inexact
|| !real_identical (&result
, &value
)))
1212 t
= build_real (type
, result
);
1214 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1218 if (TREE_CODE (arg1
) == FIXED_CST
)
1220 FIXED_VALUE_TYPE f1
;
1221 FIXED_VALUE_TYPE f2
;
1222 FIXED_VALUE_TYPE result
;
1227 /* The following codes are handled by fixed_arithmetic. */
1233 case TRUNC_DIV_EXPR
:
1234 if (TREE_CODE (arg2
) != FIXED_CST
)
1236 f2
= TREE_FIXED_CST (arg2
);
1242 if (TREE_CODE (arg2
) != INTEGER_CST
)
1245 f2
.data
.high
= w2
.elt (1);
1246 f2
.data
.low
= w2
.elt (0);
1255 f1
= TREE_FIXED_CST (arg1
);
1256 type
= TREE_TYPE (arg1
);
1257 sat_p
= TYPE_SATURATING (type
);
1258 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1259 t
= build_fixed (type
, result
);
1260 /* Propagate overflow flags. */
1261 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1262 TREE_OVERFLOW (t
) = 1;
1266 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1268 tree type
= TREE_TYPE (arg1
);
1269 tree r1
= TREE_REALPART (arg1
);
1270 tree i1
= TREE_IMAGPART (arg1
);
1271 tree r2
= TREE_REALPART (arg2
);
1272 tree i2
= TREE_IMAGPART (arg2
);
1279 real
= const_binop (code
, r1
, r2
);
1280 imag
= const_binop (code
, i1
, i2
);
1284 if (COMPLEX_FLOAT_TYPE_P (type
))
1285 return do_mpc_arg2 (arg1
, arg2
, type
,
1286 /* do_nonfinite= */ folding_initializer
,
1289 real
= const_binop (MINUS_EXPR
,
1290 const_binop (MULT_EXPR
, r1
, r2
),
1291 const_binop (MULT_EXPR
, i1
, i2
));
1292 imag
= const_binop (PLUS_EXPR
,
1293 const_binop (MULT_EXPR
, r1
, i2
),
1294 const_binop (MULT_EXPR
, i1
, r2
));
1298 if (COMPLEX_FLOAT_TYPE_P (type
))
1299 return do_mpc_arg2 (arg1
, arg2
, type
,
1300 /* do_nonfinite= */ folding_initializer
,
1303 case TRUNC_DIV_EXPR
:
1305 case FLOOR_DIV_EXPR
:
1306 case ROUND_DIV_EXPR
:
1307 if (flag_complex_method
== 0)
1309 /* Keep this algorithm in sync with
1310 tree-complex.c:expand_complex_div_straight().
1312 Expand complex division to scalars, straightforward algorithm.
1313 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1317 = const_binop (PLUS_EXPR
,
1318 const_binop (MULT_EXPR
, r2
, r2
),
1319 const_binop (MULT_EXPR
, i2
, i2
));
1321 = const_binop (PLUS_EXPR
,
1322 const_binop (MULT_EXPR
, r1
, r2
),
1323 const_binop (MULT_EXPR
, i1
, i2
));
1325 = const_binop (MINUS_EXPR
,
1326 const_binop (MULT_EXPR
, i1
, r2
),
1327 const_binop (MULT_EXPR
, r1
, i2
));
1329 real
= const_binop (code
, t1
, magsquared
);
1330 imag
= const_binop (code
, t2
, magsquared
);
1334 /* Keep this algorithm in sync with
1335 tree-complex.c:expand_complex_div_wide().
1337 Expand complex division to scalars, modified algorithm to minimize
1338 overflow with wide input ranges. */
1339 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1340 fold_abs_const (r2
, TREE_TYPE (type
)),
1341 fold_abs_const (i2
, TREE_TYPE (type
)));
1343 if (integer_nonzerop (compare
))
1345 /* In the TRUE branch, we compute
1347 div = (br * ratio) + bi;
1348 tr = (ar * ratio) + ai;
1349 ti = (ai * ratio) - ar;
1352 tree ratio
= const_binop (code
, r2
, i2
);
1353 tree div
= const_binop (PLUS_EXPR
, i2
,
1354 const_binop (MULT_EXPR
, r2
, ratio
));
1355 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1356 real
= const_binop (PLUS_EXPR
, real
, i1
);
1357 real
= const_binop (code
, real
, div
);
1359 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1360 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1361 imag
= const_binop (code
, imag
, div
);
1365 /* In the FALSE branch, we compute
1367 divisor = (d * ratio) + c;
1368 tr = (b * ratio) + a;
1369 ti = b - (a * ratio);
1372 tree ratio
= const_binop (code
, i2
, r2
);
1373 tree div
= const_binop (PLUS_EXPR
, r2
,
1374 const_binop (MULT_EXPR
, i2
, ratio
));
1376 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1377 real
= const_binop (PLUS_EXPR
, real
, r1
);
1378 real
= const_binop (code
, real
, div
);
1380 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1381 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1382 imag
= const_binop (code
, imag
, div
);
1392 return build_complex (type
, real
, imag
);
1395 if (TREE_CODE (arg1
) == VECTOR_CST
1396 && TREE_CODE (arg2
) == VECTOR_CST
)
1398 tree type
= TREE_TYPE (arg1
);
1399 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1400 tree
*elts
= XALLOCAVEC (tree
, count
);
1402 for (i
= 0; i
< count
; i
++)
1404 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1405 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1407 elts
[i
] = const_binop (code
, elem1
, elem2
);
1409 /* It is possible that const_binop cannot handle the given
1410 code and return NULL_TREE */
1411 if (elts
[i
] == NULL_TREE
)
1415 return build_vector (type
, elts
);
1418 /* Shifts allow a scalar offset for a vector. */
1419 if (TREE_CODE (arg1
) == VECTOR_CST
1420 && TREE_CODE (arg2
) == INTEGER_CST
)
1422 tree type
= TREE_TYPE (arg1
);
1423 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1424 tree
*elts
= XALLOCAVEC (tree
, count
);
1426 for (i
= 0; i
< count
; i
++)
1428 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1430 elts
[i
] = const_binop (code
, elem1
, arg2
);
1432 /* It is possible that const_binop cannot handle the given
1433 code and return NULL_TREE. */
1434 if (elts
[i
] == NULL_TREE
)
1438 return build_vector (type
, elts
);
1443 /* Overload that adds a TYPE parameter to be able to dispatch
1444 to fold_relational_const. */
1447 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1449 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1450 return fold_relational_const (code
, type
, arg1
, arg2
);
1452 /* ??? Until we make the const_binop worker take the type of the
1453 result as argument put those cases that need it here. */
1457 if ((TREE_CODE (arg1
) == REAL_CST
1458 && TREE_CODE (arg2
) == REAL_CST
)
1459 || (TREE_CODE (arg1
) == INTEGER_CST
1460 && TREE_CODE (arg2
) == INTEGER_CST
))
1461 return build_complex (type
, arg1
, arg2
);
1464 case VEC_PACK_TRUNC_EXPR
:
1465 case VEC_PACK_FIX_TRUNC_EXPR
:
1467 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1470 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1471 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1472 if (TREE_CODE (arg1
) != VECTOR_CST
1473 || TREE_CODE (arg2
) != VECTOR_CST
)
1476 elts
= XALLOCAVEC (tree
, nelts
);
1477 if (!vec_cst_ctor_to_array (arg1
, elts
)
1478 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1481 for (i
= 0; i
< nelts
; i
++)
1483 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1484 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1485 TREE_TYPE (type
), elts
[i
]);
1486 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1490 return build_vector (type
, elts
);
1493 case VEC_WIDEN_MULT_LO_EXPR
:
1494 case VEC_WIDEN_MULT_HI_EXPR
:
1495 case VEC_WIDEN_MULT_EVEN_EXPR
:
1496 case VEC_WIDEN_MULT_ODD_EXPR
:
1498 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1499 unsigned int out
, ofs
, scale
;
1502 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1503 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1504 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1507 elts
= XALLOCAVEC (tree
, nelts
* 4);
1508 if (!vec_cst_ctor_to_array (arg1
, elts
)
1509 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1512 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1513 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1514 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1515 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1516 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1518 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1521 for (out
= 0; out
< nelts
; out
++)
1523 unsigned int in1
= (out
<< scale
) + ofs
;
1524 unsigned int in2
= in1
+ nelts
* 2;
1527 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1528 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1530 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1532 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1533 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1537 return build_vector (type
, elts
);
1543 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1546 /* Make sure type and arg0 have the same saturating flag. */
1547 gcc_checking_assert (TYPE_SATURATING (type
)
1548 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1550 return const_binop (code
, arg1
, arg2
);
1553 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1554 Return zero if computing the constants is not possible. */
1557 const_unop (enum tree_code code
, tree type
, tree arg0
)
1563 case FIX_TRUNC_EXPR
:
1564 case FIXED_CONVERT_EXPR
:
1565 return fold_convert_const (code
, type
, arg0
);
1567 case ADDR_SPACE_CONVERT_EXPR
:
1568 if (integer_zerop (arg0
))
1569 return fold_convert_const (code
, type
, arg0
);
1572 case VIEW_CONVERT_EXPR
:
1573 return fold_view_convert_expr (type
, arg0
);
1577 /* Can't call fold_negate_const directly here as that doesn't
1578 handle all cases and we might not be able to negate some
1580 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1581 if (tem
&& CONSTANT_CLASS_P (tem
))
1587 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1588 return fold_abs_const (arg0
, type
);
1592 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1594 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1596 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1601 if (TREE_CODE (arg0
) == INTEGER_CST
)
1602 return fold_not_const (arg0
, type
);
1603 /* Perform BIT_NOT_EXPR on each element individually. */
1604 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1608 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1610 elements
= XALLOCAVEC (tree
, count
);
1611 for (i
= 0; i
< count
; i
++)
1613 elem
= VECTOR_CST_ELT (arg0
, i
);
1614 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1615 if (elem
== NULL_TREE
)
1620 return build_vector (type
, elements
);
1624 case TRUTH_NOT_EXPR
:
1625 if (TREE_CODE (arg0
) == INTEGER_CST
)
1626 return constant_boolean_node (integer_zerop (arg0
), type
);
1630 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1631 return fold_convert (type
, TREE_REALPART (arg0
));
1635 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1636 return fold_convert (type
, TREE_IMAGPART (arg0
));
1639 case VEC_UNPACK_LO_EXPR
:
1640 case VEC_UNPACK_HI_EXPR
:
1641 case VEC_UNPACK_FLOAT_LO_EXPR
:
1642 case VEC_UNPACK_FLOAT_HI_EXPR
:
1644 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1646 enum tree_code subcode
;
1648 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1649 if (TREE_CODE (arg0
) != VECTOR_CST
)
1652 elts
= XALLOCAVEC (tree
, nelts
* 2);
1653 if (!vec_cst_ctor_to_array (arg0
, elts
))
1656 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1657 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1660 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1663 subcode
= FLOAT_EXPR
;
1665 for (i
= 0; i
< nelts
; i
++)
1667 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1668 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1672 return build_vector (type
, elts
);
1675 case REDUC_MIN_EXPR
:
1676 case REDUC_MAX_EXPR
:
1677 case REDUC_PLUS_EXPR
:
1679 unsigned int nelts
, i
;
1681 enum tree_code subcode
;
1683 if (TREE_CODE (arg0
) != VECTOR_CST
)
1685 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1687 elts
= XALLOCAVEC (tree
, nelts
);
1688 if (!vec_cst_ctor_to_array (arg0
, elts
))
1693 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1694 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1695 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1696 default: gcc_unreachable ();
1699 for (i
= 1; i
< nelts
; i
++)
1701 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1702 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1716 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1717 indicates which particular sizetype to create. */
1720 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1722 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1725 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1726 is a tree code. The type of the result is taken from the operands.
1727 Both must be equivalent integer types, ala int_binop_types_match_p.
1728 If the operands are constant, so is the result. */
1731 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1733 tree type
= TREE_TYPE (arg0
);
1735 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1736 return error_mark_node
;
1738 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1741 /* Handle the special case of two integer constants faster. */
1742 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1744 /* And some specific cases even faster than that. */
1745 if (code
== PLUS_EXPR
)
1747 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1749 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1752 else if (code
== MINUS_EXPR
)
1754 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1757 else if (code
== MULT_EXPR
)
1759 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1763 /* Handle general case of two integer constants. For sizetype
1764 constant calculations we always want to know about overflow,
1765 even in the unsigned case. */
1766 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1769 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1772 /* Given two values, either both of sizetype or both of bitsizetype,
1773 compute the difference between the two values. Return the value
1774 in signed type corresponding to the type of the operands. */
1777 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1779 tree type
= TREE_TYPE (arg0
);
1782 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1785 /* If the type is already signed, just do the simple thing. */
1786 if (!TYPE_UNSIGNED (type
))
1787 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1789 if (type
== sizetype
)
1791 else if (type
== bitsizetype
)
1792 ctype
= sbitsizetype
;
1794 ctype
= signed_type_for (type
);
1796 /* If either operand is not a constant, do the conversions to the signed
1797 type and subtract. The hardware will do the right thing with any
1798 overflow in the subtraction. */
1799 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1800 return size_binop_loc (loc
, MINUS_EXPR
,
1801 fold_convert_loc (loc
, ctype
, arg0
),
1802 fold_convert_loc (loc
, ctype
, arg1
));
1804 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1805 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1806 overflow) and negate (which can't either). Special-case a result
1807 of zero while we're here. */
1808 if (tree_int_cst_equal (arg0
, arg1
))
1809 return build_int_cst (ctype
, 0);
1810 else if (tree_int_cst_lt (arg1
, arg0
))
1811 return fold_convert_loc (loc
, ctype
,
1812 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1814 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1815 fold_convert_loc (loc
, ctype
,
1816 size_binop_loc (loc
,
1821 /* A subroutine of fold_convert_const handling conversions of an
1822 INTEGER_CST to another integer type. */
1825 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1827 /* Given an integer constant, make new constant with new type,
1828 appropriately sign-extended or truncated. Use widest_int
1829 so that any extension is done according ARG1's type. */
1830 return force_fit_type (type
, wi::to_widest (arg1
),
1831 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1832 TREE_OVERFLOW (arg1
));
1835 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1836 to an integer type. */
1839 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1841 bool overflow
= false;
1844 /* The following code implements the floating point to integer
1845 conversion rules required by the Java Language Specification,
1846 that IEEE NaNs are mapped to zero and values that overflow
1847 the target precision saturate, i.e. values greater than
1848 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1849 are mapped to INT_MIN. These semantics are allowed by the
1850 C and C++ standards that simply state that the behavior of
1851 FP-to-integer conversion is unspecified upon overflow. */
1855 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1859 case FIX_TRUNC_EXPR
:
1860 real_trunc (&r
, VOIDmode
, &x
);
1867 /* If R is NaN, return zero and show we have an overflow. */
1868 if (REAL_VALUE_ISNAN (r
))
1871 val
= wi::zero (TYPE_PRECISION (type
));
1874 /* See if R is less than the lower bound or greater than the
1879 tree lt
= TYPE_MIN_VALUE (type
);
1880 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1881 if (REAL_VALUES_LESS (r
, l
))
1890 tree ut
= TYPE_MAX_VALUE (type
);
1893 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1894 if (REAL_VALUES_LESS (u
, r
))
1903 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1905 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1909 /* A subroutine of fold_convert_const handling conversions of a
1910 FIXED_CST to an integer type. */
1913 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1916 double_int temp
, temp_trunc
;
1919 /* Right shift FIXED_CST to temp by fbit. */
1920 temp
= TREE_FIXED_CST (arg1
).data
;
1921 mode
= TREE_FIXED_CST (arg1
).mode
;
1922 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1924 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1925 HOST_BITS_PER_DOUBLE_INT
,
1926 SIGNED_FIXED_POINT_MODE_P (mode
));
1928 /* Left shift temp to temp_trunc by fbit. */
1929 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1930 HOST_BITS_PER_DOUBLE_INT
,
1931 SIGNED_FIXED_POINT_MODE_P (mode
));
1935 temp
= double_int_zero
;
1936 temp_trunc
= double_int_zero
;
1939 /* If FIXED_CST is negative, we need to round the value toward 0.
1940 By checking if the fractional bits are not zero to add 1 to temp. */
1941 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1942 && temp_trunc
.is_negative ()
1943 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1944 temp
+= double_int_one
;
1946 /* Given a fixed-point constant, make new constant with new type,
1947 appropriately sign-extended or truncated. */
1948 t
= force_fit_type (type
, temp
, -1,
1949 (temp
.is_negative ()
1950 && (TYPE_UNSIGNED (type
)
1951 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1952 | TREE_OVERFLOW (arg1
));
1957 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1958 to another floating point type. */
1961 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1963 REAL_VALUE_TYPE value
;
1966 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1967 t
= build_real (type
, value
);
1969 /* If converting an infinity or NAN to a representation that doesn't
1970 have one, set the overflow bit so that we can produce some kind of
1971 error message at the appropriate point if necessary. It's not the
1972 most user-friendly message, but it's better than nothing. */
1973 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1974 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1975 TREE_OVERFLOW (t
) = 1;
1976 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1977 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1978 TREE_OVERFLOW (t
) = 1;
1979 /* Regular overflow, conversion produced an infinity in a mode that
1980 can't represent them. */
1981 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1982 && REAL_VALUE_ISINF (value
)
1983 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1984 TREE_OVERFLOW (t
) = 1;
1986 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1990 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1991 to a floating point type. */
1994 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1996 REAL_VALUE_TYPE value
;
1999 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2000 t
= build_real (type
, value
);
2002 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2006 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2007 to another fixed-point type. */
2010 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2012 FIXED_VALUE_TYPE value
;
2016 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2017 TYPE_SATURATING (type
));
2018 t
= build_fixed (type
, value
);
2020 /* Propagate overflow flags. */
2021 if (overflow_p
| TREE_OVERFLOW (arg1
))
2022 TREE_OVERFLOW (t
) = 1;
2026 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2027 to a fixed-point type. */
2030 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2032 FIXED_VALUE_TYPE value
;
2037 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2039 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2040 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2041 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2043 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2045 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2046 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2047 TYPE_SATURATING (type
));
2048 t
= build_fixed (type
, value
);
2050 /* Propagate overflow flags. */
2051 if (overflow_p
| TREE_OVERFLOW (arg1
))
2052 TREE_OVERFLOW (t
) = 1;
2056 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2057 to a fixed-point type. */
2060 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2062 FIXED_VALUE_TYPE value
;
2066 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2067 &TREE_REAL_CST (arg1
),
2068 TYPE_SATURATING (type
));
2069 t
= build_fixed (type
, value
);
2071 /* Propagate overflow flags. */
2072 if (overflow_p
| TREE_OVERFLOW (arg1
))
2073 TREE_OVERFLOW (t
) = 1;
2077 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2078 type TYPE. If no simplification can be done return NULL_TREE. */
2081 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2083 if (TREE_TYPE (arg1
) == type
)
2086 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2087 || TREE_CODE (type
) == OFFSET_TYPE
)
2089 if (TREE_CODE (arg1
) == INTEGER_CST
)
2090 return fold_convert_const_int_from_int (type
, arg1
);
2091 else if (TREE_CODE (arg1
) == REAL_CST
)
2092 return fold_convert_const_int_from_real (code
, type
, arg1
);
2093 else if (TREE_CODE (arg1
) == FIXED_CST
)
2094 return fold_convert_const_int_from_fixed (type
, arg1
);
2096 else if (TREE_CODE (type
) == REAL_TYPE
)
2098 if (TREE_CODE (arg1
) == INTEGER_CST
)
2099 return build_real_from_int_cst (type
, arg1
);
2100 else if (TREE_CODE (arg1
) == REAL_CST
)
2101 return fold_convert_const_real_from_real (type
, arg1
);
2102 else if (TREE_CODE (arg1
) == FIXED_CST
)
2103 return fold_convert_const_real_from_fixed (type
, arg1
);
2105 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2107 if (TREE_CODE (arg1
) == FIXED_CST
)
2108 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2109 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2110 return fold_convert_const_fixed_from_int (type
, arg1
);
2111 else if (TREE_CODE (arg1
) == REAL_CST
)
2112 return fold_convert_const_fixed_from_real (type
, arg1
);
2117 /* Construct a vector of zero elements of vector type TYPE. */
2120 build_zero_vector (tree type
)
2124 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2125 return build_vector_from_val (type
, t
);
2128 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2131 fold_convertible_p (const_tree type
, const_tree arg
)
2133 tree orig
= TREE_TYPE (arg
);
2138 if (TREE_CODE (arg
) == ERROR_MARK
2139 || TREE_CODE (type
) == ERROR_MARK
2140 || TREE_CODE (orig
) == ERROR_MARK
)
2143 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2146 switch (TREE_CODE (type
))
2148 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2149 case POINTER_TYPE
: case REFERENCE_TYPE
:
2151 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2152 || TREE_CODE (orig
) == OFFSET_TYPE
)
2154 return (TREE_CODE (orig
) == VECTOR_TYPE
2155 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2158 case FIXED_POINT_TYPE
:
2162 return TREE_CODE (type
) == TREE_CODE (orig
);
2169 /* Convert expression ARG to type TYPE. Used by the middle-end for
2170 simple conversions in preference to calling the front-end's convert. */
2173 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2175 tree orig
= TREE_TYPE (arg
);
2181 if (TREE_CODE (arg
) == ERROR_MARK
2182 || TREE_CODE (type
) == ERROR_MARK
2183 || TREE_CODE (orig
) == ERROR_MARK
)
2184 return error_mark_node
;
2186 switch (TREE_CODE (type
))
2189 case REFERENCE_TYPE
:
2190 /* Handle conversions between pointers to different address spaces. */
2191 if (POINTER_TYPE_P (orig
)
2192 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2193 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2194 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2197 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2199 if (TREE_CODE (arg
) == INTEGER_CST
)
2201 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2202 if (tem
!= NULL_TREE
)
2205 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2206 || TREE_CODE (orig
) == OFFSET_TYPE
)
2207 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2208 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2209 return fold_convert_loc (loc
, type
,
2210 fold_build1_loc (loc
, REALPART_EXPR
,
2211 TREE_TYPE (orig
), arg
));
2212 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2213 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2214 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2217 if (TREE_CODE (arg
) == INTEGER_CST
)
2219 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2220 if (tem
!= NULL_TREE
)
2223 else if (TREE_CODE (arg
) == REAL_CST
)
2225 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2226 if (tem
!= NULL_TREE
)
2229 else if (TREE_CODE (arg
) == FIXED_CST
)
2231 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2232 if (tem
!= NULL_TREE
)
2236 switch (TREE_CODE (orig
))
2239 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2240 case POINTER_TYPE
: case REFERENCE_TYPE
:
2241 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2244 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2246 case FIXED_POINT_TYPE
:
2247 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2250 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2251 return fold_convert_loc (loc
, type
, tem
);
2257 case FIXED_POINT_TYPE
:
2258 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2259 || TREE_CODE (arg
) == REAL_CST
)
2261 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2262 if (tem
!= NULL_TREE
)
2263 goto fold_convert_exit
;
2266 switch (TREE_CODE (orig
))
2268 case FIXED_POINT_TYPE
:
2273 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2276 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2277 return fold_convert_loc (loc
, type
, tem
);
2284 switch (TREE_CODE (orig
))
2287 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2288 case POINTER_TYPE
: case REFERENCE_TYPE
:
2290 case FIXED_POINT_TYPE
:
2291 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2292 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2293 fold_convert_loc (loc
, TREE_TYPE (type
),
2294 integer_zero_node
));
2299 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2301 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2302 TREE_OPERAND (arg
, 0));
2303 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2304 TREE_OPERAND (arg
, 1));
2305 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2308 arg
= save_expr (arg
);
2309 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2310 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2311 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2312 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2313 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2321 if (integer_zerop (arg
))
2322 return build_zero_vector (type
);
2323 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2324 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2325 || TREE_CODE (orig
) == VECTOR_TYPE
);
2326 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2329 tem
= fold_ignored_result (arg
);
2330 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2333 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2334 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2338 protected_set_expr_location_unshare (tem
, loc
);
2342 /* Return false if expr can be assumed not to be an lvalue, true
2346 maybe_lvalue_p (const_tree x
)
2348 /* We only need to wrap lvalue tree codes. */
2349 switch (TREE_CODE (x
))
2362 case ARRAY_RANGE_REF
:
2368 case PREINCREMENT_EXPR
:
2369 case PREDECREMENT_EXPR
:
2371 case TRY_CATCH_EXPR
:
2372 case WITH_CLEANUP_EXPR
:
2381 /* Assume the worst for front-end tree codes. */
2382 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2390 /* Return an expr equal to X but certainly not valid as an lvalue. */
2393 non_lvalue_loc (location_t loc
, tree x
)
2395 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2400 if (! maybe_lvalue_p (x
))
2402 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2405 /* When pedantic, return an expr equal to X but certainly not valid as a
2406 pedantic lvalue. Otherwise, return X. */
2409 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2411 return protected_set_expr_location_unshare (x
, loc
);
2414 /* Given a tree comparison code, return the code that is the logical inverse.
2415 It is generally not safe to do this for floating-point comparisons, except
2416 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2417 ERROR_MARK in this case. */
2420 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2422 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2423 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2433 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2435 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2437 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2439 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2453 return UNORDERED_EXPR
;
2454 case UNORDERED_EXPR
:
2455 return ORDERED_EXPR
;
2461 /* Similar, but return the comparison that results if the operands are
2462 swapped. This is safe for floating-point. */
2465 swap_tree_comparison (enum tree_code code
)
2472 case UNORDERED_EXPR
:
2498 /* Convert a comparison tree code from an enum tree_code representation
2499 into a compcode bit-based encoding. This function is the inverse of
2500 compcode_to_comparison. */
2502 static enum comparison_code
2503 comparison_to_compcode (enum tree_code code
)
2520 return COMPCODE_ORD
;
2521 case UNORDERED_EXPR
:
2522 return COMPCODE_UNORD
;
2524 return COMPCODE_UNLT
;
2526 return COMPCODE_UNEQ
;
2528 return COMPCODE_UNLE
;
2530 return COMPCODE_UNGT
;
2532 return COMPCODE_LTGT
;
2534 return COMPCODE_UNGE
;
2540 /* Convert a compcode bit-based encoding of a comparison operator back
2541 to GCC's enum tree_code representation. This function is the
2542 inverse of comparison_to_compcode. */
2544 static enum tree_code
2545 compcode_to_comparison (enum comparison_code code
)
2562 return ORDERED_EXPR
;
2563 case COMPCODE_UNORD
:
2564 return UNORDERED_EXPR
;
2582 /* Return a tree for the comparison which is the combination of
2583 doing the AND or OR (depending on CODE) of the two operations LCODE
2584 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2585 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2586 if this makes the transformation invalid. */
2589 combine_comparisons (location_t loc
,
2590 enum tree_code code
, enum tree_code lcode
,
2591 enum tree_code rcode
, tree truth_type
,
2592 tree ll_arg
, tree lr_arg
)
2594 bool honor_nans
= HONOR_NANS (ll_arg
);
2595 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2596 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2601 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2602 compcode
= lcompcode
& rcompcode
;
2605 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2606 compcode
= lcompcode
| rcompcode
;
2615 /* Eliminate unordered comparisons, as well as LTGT and ORD
2616 which are not used unless the mode has NaNs. */
2617 compcode
&= ~COMPCODE_UNORD
;
2618 if (compcode
== COMPCODE_LTGT
)
2619 compcode
= COMPCODE_NE
;
2620 else if (compcode
== COMPCODE_ORD
)
2621 compcode
= COMPCODE_TRUE
;
2623 else if (flag_trapping_math
)
2625 /* Check that the original operation and the optimized ones will trap
2626 under the same condition. */
2627 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2628 && (lcompcode
!= COMPCODE_EQ
)
2629 && (lcompcode
!= COMPCODE_ORD
);
2630 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2631 && (rcompcode
!= COMPCODE_EQ
)
2632 && (rcompcode
!= COMPCODE_ORD
);
2633 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2634 && (compcode
!= COMPCODE_EQ
)
2635 && (compcode
!= COMPCODE_ORD
);
2637 /* In a short-circuited boolean expression the LHS might be
2638 such that the RHS, if evaluated, will never trap. For
2639 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2640 if neither x nor y is NaN. (This is a mixed blessing: for
2641 example, the expression above will never trap, hence
2642 optimizing it to x < y would be invalid). */
2643 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2644 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2647 /* If the comparison was short-circuited, and only the RHS
2648 trapped, we may now generate a spurious trap. */
2650 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2653 /* If we changed the conditions that cause a trap, we lose. */
2654 if ((ltrap
|| rtrap
) != trap
)
2658 if (compcode
== COMPCODE_TRUE
)
2659 return constant_boolean_node (true, truth_type
);
2660 else if (compcode
== COMPCODE_FALSE
)
2661 return constant_boolean_node (false, truth_type
);
2664 enum tree_code tcode
;
2666 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2667 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2671 /* Return nonzero if two operands (typically of the same tree node)
2672 are necessarily equal. If either argument has side-effects this
2673 function returns zero. FLAGS modifies behavior as follows:
2675 If OEP_ONLY_CONST is set, only return nonzero for constants.
2676 This function tests whether the operands are indistinguishable;
2677 it does not test whether they are equal using C's == operation.
2678 The distinction is important for IEEE floating point, because
2679 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2680 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2682 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2683 even though it may hold multiple values during a function.
2684 This is because a GCC tree node guarantees that nothing else is
2685 executed between the evaluation of its "operands" (which may often
2686 be evaluated in arbitrary order). Hence if the operands themselves
2687 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2688 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2689 unset means assuming isochronic (or instantaneous) tree equivalence.
2690 Unless comparing arbitrary expression trees, such as from different
2691 statements, this flag can usually be left unset.
2693 If OEP_PURE_SAME is set, then pure functions with identical arguments
2694 are considered the same. It is used when the caller has other ways
2695 to ensure that global memory is unchanged in between. */
2698 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2700 /* If either is ERROR_MARK, they aren't equal. */
2701 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2702 || TREE_TYPE (arg0
) == error_mark_node
2703 || TREE_TYPE (arg1
) == error_mark_node
)
2706 /* Similar, if either does not have a type (like a released SSA name),
2707 they aren't equal. */
2708 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2711 /* Check equality of integer constants before bailing out due to
2712 precision differences. */
2713 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2714 return tree_int_cst_equal (arg0
, arg1
);
2716 /* If both types don't have the same signedness, then we can't consider
2717 them equal. We must check this before the STRIP_NOPS calls
2718 because they may change the signedness of the arguments. As pointers
2719 strictly don't have a signedness, require either two pointers or
2720 two non-pointers as well. */
2721 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2722 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2725 /* We cannot consider pointers to different address space equal. */
2726 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2727 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2728 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2731 /* If both types don't have the same precision, then it is not safe
2733 if (element_precision (TREE_TYPE (arg0
))
2734 != element_precision (TREE_TYPE (arg1
)))
2740 /* In case both args are comparisons but with different comparison
2741 code, try to swap the comparison operands of one arg to produce
2742 a match and compare that variant. */
2743 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2744 && COMPARISON_CLASS_P (arg0
)
2745 && COMPARISON_CLASS_P (arg1
))
2747 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2749 if (TREE_CODE (arg0
) == swap_code
)
2750 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2751 TREE_OPERAND (arg1
, 1), flags
)
2752 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2753 TREE_OPERAND (arg1
, 0), flags
);
2756 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2757 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2758 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2761 /* This is needed for conversions and for COMPONENT_REF.
2762 Might as well play it safe and always test this. */
2763 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2764 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2765 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2768 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2769 We don't care about side effects in that case because the SAVE_EXPR
2770 takes care of that for us. In all other cases, two expressions are
2771 equal if they have no side effects. If we have two identical
2772 expressions with side effects that should be treated the same due
2773 to the only side effects being identical SAVE_EXPR's, that will
2774 be detected in the recursive calls below.
2775 If we are taking an invariant address of two identical objects
2776 they are necessarily equal as well. */
2777 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2778 && (TREE_CODE (arg0
) == SAVE_EXPR
2779 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2780 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2783 /* Next handle constant cases, those for which we can return 1 even
2784 if ONLY_CONST is set. */
2785 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2786 switch (TREE_CODE (arg0
))
2789 return tree_int_cst_equal (arg0
, arg1
);
2792 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2793 TREE_FIXED_CST (arg1
));
2796 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2797 TREE_REAL_CST (arg1
)))
2801 if (!HONOR_SIGNED_ZEROS (arg0
))
2803 /* If we do not distinguish between signed and unsigned zero,
2804 consider them equal. */
2805 if (real_zerop (arg0
) && real_zerop (arg1
))
2814 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2817 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2819 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2820 VECTOR_CST_ELT (arg1
, i
), flags
))
2827 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2829 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2833 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2834 && ! memcmp (TREE_STRING_POINTER (arg0
),
2835 TREE_STRING_POINTER (arg1
),
2836 TREE_STRING_LENGTH (arg0
)));
2839 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2840 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2841 ? OEP_CONSTANT_ADDRESS_OF
| OEP_ADDRESS_OF
: 0);
2846 if (flags
& OEP_ONLY_CONST
)
2849 /* Define macros to test an operand from arg0 and arg1 for equality and a
2850 variant that allows null and views null as being different from any
2851 non-null value. In the latter case, if either is null, the both
2852 must be; otherwise, do the normal comparison. */
2853 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2854 TREE_OPERAND (arg1, N), flags)
2856 #define OP_SAME_WITH_NULL(N) \
2857 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2858 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2860 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2863 /* Two conversions are equal only if signedness and modes match. */
2864 switch (TREE_CODE (arg0
))
2867 case FIX_TRUNC_EXPR
:
2868 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2869 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2879 case tcc_comparison
:
2881 if (OP_SAME (0) && OP_SAME (1))
2884 /* For commutative ops, allow the other order. */
2885 return (commutative_tree_code (TREE_CODE (arg0
))
2886 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2887 TREE_OPERAND (arg1
, 1), flags
)
2888 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2889 TREE_OPERAND (arg1
, 0), flags
));
2892 /* If either of the pointer (or reference) expressions we are
2893 dereferencing contain a side effect, these cannot be equal,
2894 but their addresses can be. */
2895 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2896 && (TREE_SIDE_EFFECTS (arg0
)
2897 || TREE_SIDE_EFFECTS (arg1
)))
2900 switch (TREE_CODE (arg0
))
2903 if (!(flags
& OEP_ADDRESS_OF
)
2904 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2905 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2907 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2914 case TARGET_MEM_REF
:
2916 /* Require equal access sizes, and similar pointer types.
2917 We can have incomplete types for array references of
2918 variable-sized arrays from the Fortran frontend
2919 though. Also verify the types are compatible. */
2920 if (!((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2921 || (TYPE_SIZE (TREE_TYPE (arg0
))
2922 && TYPE_SIZE (TREE_TYPE (arg1
))
2923 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2924 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2925 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2926 && ((flags
& OEP_ADDRESS_OF
)
2927 || (alias_ptr_types_compatible_p
2928 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2929 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2930 && (MR_DEPENDENCE_CLIQUE (arg0
)
2931 == MR_DEPENDENCE_CLIQUE (arg1
))
2932 && (MR_DEPENDENCE_BASE (arg0
)
2933 == MR_DEPENDENCE_BASE (arg1
))
2934 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2935 == TYPE_ALIGN (TREE_TYPE (arg1
)))))))
2937 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2938 return (OP_SAME (0) && OP_SAME (1)
2939 /* TARGET_MEM_REF require equal extra operands. */
2940 && (TREE_CODE (arg0
) != TARGET_MEM_REF
2941 || (OP_SAME_WITH_NULL (2)
2942 && OP_SAME_WITH_NULL (3)
2943 && OP_SAME_WITH_NULL (4))));
2946 case ARRAY_RANGE_REF
:
2947 /* Operands 2 and 3 may be null.
2948 Compare the array index by value if it is constant first as we
2949 may have different types but same value here. */
2952 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2953 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2954 TREE_OPERAND (arg1
, 1))
2956 && OP_SAME_WITH_NULL (2)
2957 && OP_SAME_WITH_NULL (3));
2960 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2961 may be NULL when we're called to compare MEM_EXPRs. */
2962 if (!OP_SAME_WITH_NULL (0)
2965 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2966 return OP_SAME_WITH_NULL (2);
2971 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2972 return OP_SAME (1) && OP_SAME (2);
2978 case tcc_expression
:
2979 switch (TREE_CODE (arg0
))
2982 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2983 TREE_OPERAND (arg1
, 0),
2984 flags
| OEP_ADDRESS_OF
);
2986 case TRUTH_NOT_EXPR
:
2989 case TRUTH_ANDIF_EXPR
:
2990 case TRUTH_ORIF_EXPR
:
2991 return OP_SAME (0) && OP_SAME (1);
2994 case WIDEN_MULT_PLUS_EXPR
:
2995 case WIDEN_MULT_MINUS_EXPR
:
2998 /* The multiplcation operands are commutative. */
3001 case TRUTH_AND_EXPR
:
3003 case TRUTH_XOR_EXPR
:
3004 if (OP_SAME (0) && OP_SAME (1))
3007 /* Otherwise take into account this is a commutative operation. */
3008 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3009 TREE_OPERAND (arg1
, 1), flags
)
3010 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3011 TREE_OPERAND (arg1
, 0), flags
));
3016 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3023 switch (TREE_CODE (arg0
))
3026 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3027 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3028 /* If not both CALL_EXPRs are either internal or normal function
3029 functions, then they are not equal. */
3031 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3033 /* If the CALL_EXPRs call different internal functions, then they
3035 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3040 /* If the CALL_EXPRs call different functions, then they are not
3042 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3048 unsigned int cef
= call_expr_flags (arg0
);
3049 if (flags
& OEP_PURE_SAME
)
3050 cef
&= ECF_CONST
| ECF_PURE
;
3057 /* Now see if all the arguments are the same. */
3059 const_call_expr_arg_iterator iter0
, iter1
;
3061 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3062 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3064 a0
= next_const_call_expr_arg (&iter0
),
3065 a1
= next_const_call_expr_arg (&iter1
))
3066 if (! operand_equal_p (a0
, a1
, flags
))
3069 /* If we get here and both argument lists are exhausted
3070 then the CALL_EXPRs are equal. */
3071 return ! (a0
|| a1
);
3077 case tcc_declaration
:
3078 /* Consider __builtin_sqrt equal to sqrt. */
3079 return (TREE_CODE (arg0
) == FUNCTION_DECL
3080 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3081 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3082 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3089 #undef OP_SAME_WITH_NULL
3092 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3093 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3095 When in doubt, return 0. */
3098 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3100 int unsignedp1
, unsignedpo
;
3101 tree primarg0
, primarg1
, primother
;
3102 unsigned int correct_width
;
3104 if (operand_equal_p (arg0
, arg1
, 0))
3107 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3108 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3111 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3112 and see if the inner values are the same. This removes any
3113 signedness comparison, which doesn't matter here. */
3114 primarg0
= arg0
, primarg1
= arg1
;
3115 STRIP_NOPS (primarg0
);
3116 STRIP_NOPS (primarg1
);
3117 if (operand_equal_p (primarg0
, primarg1
, 0))
3120 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3121 actual comparison operand, ARG0.
3123 First throw away any conversions to wider types
3124 already present in the operands. */
3126 primarg1
= get_narrower (arg1
, &unsignedp1
);
3127 primother
= get_narrower (other
, &unsignedpo
);
3129 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3130 if (unsignedp1
== unsignedpo
3131 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3132 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3134 tree type
= TREE_TYPE (arg0
);
3136 /* Make sure shorter operand is extended the right way
3137 to match the longer operand. */
3138 primarg1
= fold_convert (signed_or_unsigned_type_for
3139 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3141 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3148 /* See if ARG is an expression that is either a comparison or is performing
3149 arithmetic on comparisons. The comparisons must only be comparing
3150 two different values, which will be stored in *CVAL1 and *CVAL2; if
3151 they are nonzero it means that some operands have already been found.
3152 No variables may be used anywhere else in the expression except in the
3153 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3154 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3156 If this is true, return 1. Otherwise, return zero. */
3159 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3161 enum tree_code code
= TREE_CODE (arg
);
3162 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3164 /* We can handle some of the tcc_expression cases here. */
3165 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3167 else if (tclass
== tcc_expression
3168 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3169 || code
== COMPOUND_EXPR
))
3170 tclass
= tcc_binary
;
3172 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3173 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3175 /* If we've already found a CVAL1 or CVAL2, this expression is
3176 two complex to handle. */
3177 if (*cval1
|| *cval2
)
3187 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3190 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3191 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3192 cval1
, cval2
, save_p
));
3197 case tcc_expression
:
3198 if (code
== COND_EXPR
)
3199 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3200 cval1
, cval2
, save_p
)
3201 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3202 cval1
, cval2
, save_p
)
3203 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3204 cval1
, cval2
, save_p
));
3207 case tcc_comparison
:
3208 /* First see if we can handle the first operand, then the second. For
3209 the second operand, we know *CVAL1 can't be zero. It must be that
3210 one side of the comparison is each of the values; test for the
3211 case where this isn't true by failing if the two operands
3214 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3215 TREE_OPERAND (arg
, 1), 0))
3219 *cval1
= TREE_OPERAND (arg
, 0);
3220 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3222 else if (*cval2
== 0)
3223 *cval2
= TREE_OPERAND (arg
, 0);
3224 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3229 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3231 else if (*cval2
== 0)
3232 *cval2
= TREE_OPERAND (arg
, 1);
3233 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3245 /* ARG is a tree that is known to contain just arithmetic operations and
3246 comparisons. Evaluate the operations in the tree substituting NEW0 for
3247 any occurrence of OLD0 as an operand of a comparison and likewise for
3251 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3252 tree old1
, tree new1
)
3254 tree type
= TREE_TYPE (arg
);
3255 enum tree_code code
= TREE_CODE (arg
);
3256 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3258 /* We can handle some of the tcc_expression cases here. */
3259 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3261 else if (tclass
== tcc_expression
3262 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3263 tclass
= tcc_binary
;
3268 return fold_build1_loc (loc
, code
, type
,
3269 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3270 old0
, new0
, old1
, new1
));
3273 return fold_build2_loc (loc
, code
, type
,
3274 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3275 old0
, new0
, old1
, new1
),
3276 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3277 old0
, new0
, old1
, new1
));
3279 case tcc_expression
:
3283 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3287 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3291 return fold_build3_loc (loc
, code
, type
,
3292 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3293 old0
, new0
, old1
, new1
),
3294 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3295 old0
, new0
, old1
, new1
),
3296 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3297 old0
, new0
, old1
, new1
));
3301 /* Fall through - ??? */
3303 case tcc_comparison
:
3305 tree arg0
= TREE_OPERAND (arg
, 0);
3306 tree arg1
= TREE_OPERAND (arg
, 1);
3308 /* We need to check both for exact equality and tree equality. The
3309 former will be true if the operand has a side-effect. In that
3310 case, we know the operand occurred exactly once. */
3312 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3314 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3317 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3319 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3322 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3330 /* Return a tree for the case when the result of an expression is RESULT
3331 converted to TYPE and OMITTED was previously an operand of the expression
3332 but is now not needed (e.g., we folded OMITTED * 0).
3334 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3335 the conversion of RESULT to TYPE. */
3338 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3340 tree t
= fold_convert_loc (loc
, type
, result
);
3342 /* If the resulting operand is an empty statement, just return the omitted
3343 statement casted to void. */
3344 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3345 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3346 fold_ignored_result (omitted
));
3348 if (TREE_SIDE_EFFECTS (omitted
))
3349 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3350 fold_ignored_result (omitted
), t
);
3352 return non_lvalue_loc (loc
, t
);
3355 /* Return a tree for the case when the result of an expression is RESULT
3356 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3357 of the expression but are now not needed.
3359 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3360 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3361 evaluated before OMITTED2. Otherwise, if neither has side effects,
3362 just do the conversion of RESULT to TYPE. */
3365 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3366 tree omitted1
, tree omitted2
)
3368 tree t
= fold_convert_loc (loc
, type
, result
);
3370 if (TREE_SIDE_EFFECTS (omitted2
))
3371 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3372 if (TREE_SIDE_EFFECTS (omitted1
))
3373 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3375 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3379 /* Return a simplified tree node for the truth-negation of ARG. This
3380 never alters ARG itself. We assume that ARG is an operation that
3381 returns a truth value (0 or 1).
3383 FIXME: one would think we would fold the result, but it causes
3384 problems with the dominator optimizer. */
3387 fold_truth_not_expr (location_t loc
, tree arg
)
3389 tree type
= TREE_TYPE (arg
);
3390 enum tree_code code
= TREE_CODE (arg
);
3391 location_t loc1
, loc2
;
3393 /* If this is a comparison, we can simply invert it, except for
3394 floating-point non-equality comparisons, in which case we just
3395 enclose a TRUTH_NOT_EXPR around what we have. */
3397 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3399 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3400 if (FLOAT_TYPE_P (op_type
)
3401 && flag_trapping_math
3402 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3403 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3406 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3407 if (code
== ERROR_MARK
)
3410 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3411 TREE_OPERAND (arg
, 1));
3417 return constant_boolean_node (integer_zerop (arg
), type
);
3419 case TRUTH_AND_EXPR
:
3420 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3421 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3422 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3423 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3424 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3427 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3428 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3429 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3430 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3431 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3433 case TRUTH_XOR_EXPR
:
3434 /* Here we can invert either operand. We invert the first operand
3435 unless the second operand is a TRUTH_NOT_EXPR in which case our
3436 result is the XOR of the first operand with the inside of the
3437 negation of the second operand. */
3439 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3440 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3441 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3443 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3444 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3445 TREE_OPERAND (arg
, 1));
3447 case TRUTH_ANDIF_EXPR
:
3448 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3449 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3450 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3451 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3452 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3454 case TRUTH_ORIF_EXPR
:
3455 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3456 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3457 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3458 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3459 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3461 case TRUTH_NOT_EXPR
:
3462 return TREE_OPERAND (arg
, 0);
3466 tree arg1
= TREE_OPERAND (arg
, 1);
3467 tree arg2
= TREE_OPERAND (arg
, 2);
3469 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3470 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3472 /* A COND_EXPR may have a throw as one operand, which
3473 then has void type. Just leave void operands
3475 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3476 VOID_TYPE_P (TREE_TYPE (arg1
))
3477 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3478 VOID_TYPE_P (TREE_TYPE (arg2
))
3479 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3483 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3484 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3485 TREE_OPERAND (arg
, 0),
3486 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3488 case NON_LVALUE_EXPR
:
3489 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3490 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3493 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3494 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3496 /* ... fall through ... */
3499 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3500 return build1_loc (loc
, TREE_CODE (arg
), type
,
3501 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3504 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3506 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3509 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3511 case CLEANUP_POINT_EXPR
:
3512 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3513 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3514 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3521 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3522 assume that ARG is an operation that returns a truth value (0 or 1
3523 for scalars, 0 or -1 for vectors). Return the folded expression if
3524 folding is successful. Otherwise, return NULL_TREE. */
3527 fold_invert_truthvalue (location_t loc
, tree arg
)
3529 tree type
= TREE_TYPE (arg
);
3530 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3536 /* Return a simplified tree node for the truth-negation of ARG. This
3537 never alters ARG itself. We assume that ARG is an operation that
3538 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3541 invert_truthvalue_loc (location_t loc
, tree arg
)
3543 if (TREE_CODE (arg
) == ERROR_MARK
)
3546 tree type
= TREE_TYPE (arg
);
3547 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3553 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3554 operands are another bit-wise operation with a common input. If so,
3555 distribute the bit operations to save an operation and possibly two if
3556 constants are involved. For example, convert
3557 (A | B) & (A | C) into A | (B & C)
3558 Further simplification will occur if B and C are constants.
3560 If this optimization cannot be done, 0 will be returned. */
3563 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3564 tree arg0
, tree arg1
)
3569 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3570 || TREE_CODE (arg0
) == code
3571 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3572 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3575 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3577 common
= TREE_OPERAND (arg0
, 0);
3578 left
= TREE_OPERAND (arg0
, 1);
3579 right
= TREE_OPERAND (arg1
, 1);
3581 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3583 common
= TREE_OPERAND (arg0
, 0);
3584 left
= TREE_OPERAND (arg0
, 1);
3585 right
= TREE_OPERAND (arg1
, 0);
3587 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3589 common
= TREE_OPERAND (arg0
, 1);
3590 left
= TREE_OPERAND (arg0
, 0);
3591 right
= TREE_OPERAND (arg1
, 1);
3593 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3595 common
= TREE_OPERAND (arg0
, 1);
3596 left
= TREE_OPERAND (arg0
, 0);
3597 right
= TREE_OPERAND (arg1
, 0);
3602 common
= fold_convert_loc (loc
, type
, common
);
3603 left
= fold_convert_loc (loc
, type
, left
);
3604 right
= fold_convert_loc (loc
, type
, right
);
3605 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3606 fold_build2_loc (loc
, code
, type
, left
, right
));
3609 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3610 with code CODE. This optimization is unsafe. */
3612 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3613 tree arg0
, tree arg1
)
3615 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3616 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3618 /* (A / C) +- (B / C) -> (A +- B) / C. */
3620 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3621 TREE_OPERAND (arg1
, 1), 0))
3622 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3623 fold_build2_loc (loc
, code
, type
,
3624 TREE_OPERAND (arg0
, 0),
3625 TREE_OPERAND (arg1
, 0)),
3626 TREE_OPERAND (arg0
, 1));
3628 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3629 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3630 TREE_OPERAND (arg1
, 0), 0)
3631 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3632 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3634 REAL_VALUE_TYPE r0
, r1
;
3635 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3636 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3638 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3640 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3641 real_arithmetic (&r0
, code
, &r0
, &r1
);
3642 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3643 TREE_OPERAND (arg0
, 0),
3644 build_real (type
, r0
));
3650 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3651 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3654 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3655 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3657 tree result
, bftype
;
3661 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3662 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3663 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3664 && tree_fits_shwi_p (size
)
3665 && tree_to_shwi (size
) == bitsize
)
3666 return fold_convert_loc (loc
, type
, inner
);
3670 if (TYPE_PRECISION (bftype
) != bitsize
3671 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3672 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3674 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3675 size_int (bitsize
), bitsize_int (bitpos
));
3678 result
= fold_convert_loc (loc
, type
, result
);
3683 /* Optimize a bit-field compare.
3685 There are two cases: First is a compare against a constant and the
3686 second is a comparison of two items where the fields are at the same
3687 bit position relative to the start of a chunk (byte, halfword, word)
3688 large enough to contain it. In these cases we can avoid the shift
3689 implicit in bitfield extractions.
3691 For constants, we emit a compare of the shifted constant with the
3692 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3693 compared. For two fields at the same position, we do the ANDs with the
3694 similar mask and compare the result of the ANDs.
3696 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3697 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3698 are the left and right operands of the comparison, respectively.
3700 If the optimization described above can be done, we return the resulting
3701 tree. Otherwise we return zero. */
3704 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3705 tree compare_type
, tree lhs
, tree rhs
)
3707 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3708 tree type
= TREE_TYPE (lhs
);
3710 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3711 machine_mode lmode
, rmode
, nmode
;
3712 int lunsignedp
, runsignedp
;
3713 int lvolatilep
= 0, rvolatilep
= 0;
3714 tree linner
, rinner
= NULL_TREE
;
3718 /* Get all the information about the extractions being done. If the bit size
3719 if the same as the size of the underlying object, we aren't doing an
3720 extraction at all and so can do nothing. We also don't want to
3721 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3722 then will no longer be able to replace it. */
3723 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3724 &lunsignedp
, &lvolatilep
, false);
3725 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3726 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3731 /* If this is not a constant, we can only do something if bit positions,
3732 sizes, and signedness are the same. */
3733 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3734 &runsignedp
, &rvolatilep
, false);
3736 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3737 || lunsignedp
!= runsignedp
|| offset
!= 0
3738 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3742 /* See if we can find a mode to refer to this field. We should be able to,
3743 but fail if we can't. */
3744 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3745 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3746 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3747 TYPE_ALIGN (TREE_TYPE (rinner
))),
3749 if (nmode
== VOIDmode
)
3752 /* Set signed and unsigned types of the precision of this mode for the
3754 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3756 /* Compute the bit position and size for the new reference and our offset
3757 within it. If the new reference is the same size as the original, we
3758 won't optimize anything, so return zero. */
3759 nbitsize
= GET_MODE_BITSIZE (nmode
);
3760 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3762 if (nbitsize
== lbitsize
)
3765 if (BYTES_BIG_ENDIAN
)
3766 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3768 /* Make the mask to be used against the extracted field. */
3769 mask
= build_int_cst_type (unsigned_type
, -1);
3770 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3771 mask
= const_binop (RSHIFT_EXPR
, mask
,
3772 size_int (nbitsize
- lbitsize
- lbitpos
));
3775 /* If not comparing with constant, just rework the comparison
3777 return fold_build2_loc (loc
, code
, compare_type
,
3778 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3779 make_bit_field_ref (loc
, linner
,
3784 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3785 make_bit_field_ref (loc
, rinner
,
3791 /* Otherwise, we are handling the constant case. See if the constant is too
3792 big for the field. Warn and return a tree of for 0 (false) if so. We do
3793 this not only for its own sake, but to avoid having to test for this
3794 error case below. If we didn't, we might generate wrong code.
3796 For unsigned fields, the constant shifted right by the field length should
3797 be all zero. For signed fields, the high-order bits should agree with
3802 if (wi::lrshift (rhs
, lbitsize
) != 0)
3804 warning (0, "comparison is always %d due to width of bit-field",
3806 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3811 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3812 if (tem
!= 0 && tem
!= -1)
3814 warning (0, "comparison is always %d due to width of bit-field",
3816 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3820 /* Single-bit compares should always be against zero. */
3821 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3823 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3824 rhs
= build_int_cst (type
, 0);
3827 /* Make a new bitfield reference, shift the constant over the
3828 appropriate number of bits and mask it with the computed mask
3829 (in case this was a signed field). If we changed it, make a new one. */
3830 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3832 rhs
= const_binop (BIT_AND_EXPR
,
3833 const_binop (LSHIFT_EXPR
,
3834 fold_convert_loc (loc
, unsigned_type
, rhs
),
3835 size_int (lbitpos
)),
3838 lhs
= build2_loc (loc
, code
, compare_type
,
3839 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3843 /* Subroutine for fold_truth_andor_1: decode a field reference.
3845 If EXP is a comparison reference, we return the innermost reference.
3847 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3848 set to the starting bit number.
3850 If the innermost field can be completely contained in a mode-sized
3851 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3853 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3854 otherwise it is not changed.
3856 *PUNSIGNEDP is set to the signedness of the field.
3858 *PMASK is set to the mask used. This is either contained in a
3859 BIT_AND_EXPR or derived from the width of the field.
3861 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3863 Return 0 if this is not a component reference or is one that we can't
3864 do anything with. */
3867 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3868 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3869 int *punsignedp
, int *pvolatilep
,
3870 tree
*pmask
, tree
*pand_mask
)
3872 tree outer_type
= 0;
3874 tree mask
, inner
, offset
;
3876 unsigned int precision
;
3878 /* All the optimizations using this function assume integer fields.
3879 There are problems with FP fields since the type_for_size call
3880 below can fail for, e.g., XFmode. */
3881 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3884 /* We are interested in the bare arrangement of bits, so strip everything
3885 that doesn't affect the machine mode. However, record the type of the
3886 outermost expression if it may matter below. */
3887 if (CONVERT_EXPR_P (exp
)
3888 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3889 outer_type
= TREE_TYPE (exp
);
3892 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3894 and_mask
= TREE_OPERAND (exp
, 1);
3895 exp
= TREE_OPERAND (exp
, 0);
3896 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3897 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3901 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3902 punsignedp
, pvolatilep
, false);
3903 if ((inner
== exp
&& and_mask
== 0)
3904 || *pbitsize
< 0 || offset
!= 0
3905 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3908 /* If the number of bits in the reference is the same as the bitsize of
3909 the outer type, then the outer type gives the signedness. Otherwise
3910 (in case of a small bitfield) the signedness is unchanged. */
3911 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3912 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3914 /* Compute the mask to access the bitfield. */
3915 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3916 precision
= TYPE_PRECISION (unsigned_type
);
3918 mask
= build_int_cst_type (unsigned_type
, -1);
3920 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3921 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3923 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3925 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3926 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3929 *pand_mask
= and_mask
;
3933 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3934 bit positions and MASK is SIGNED. */
3937 all_ones_mask_p (const_tree mask
, unsigned int size
)
3939 tree type
= TREE_TYPE (mask
);
3940 unsigned int precision
= TYPE_PRECISION (type
);
3942 /* If this function returns true when the type of the mask is
3943 UNSIGNED, then there will be errors. In particular see
3944 gcc.c-torture/execute/990326-1.c. There does not appear to be
3945 any documentation paper trail as to why this is so. But the pre
3946 wide-int worked with that restriction and it has been preserved
3948 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3951 return wi::mask (size
, false, precision
) == mask
;
3954 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3955 represents the sign bit of EXP's type. If EXP represents a sign
3956 or zero extension, also test VAL against the unextended type.
3957 The return value is the (sub)expression whose sign bit is VAL,
3958 or NULL_TREE otherwise. */
3961 sign_bit_p (tree exp
, const_tree val
)
3966 /* Tree EXP must have an integral type. */
3967 t
= TREE_TYPE (exp
);
3968 if (! INTEGRAL_TYPE_P (t
))
3971 /* Tree VAL must be an integer constant. */
3972 if (TREE_CODE (val
) != INTEGER_CST
3973 || TREE_OVERFLOW (val
))
3976 width
= TYPE_PRECISION (t
);
3977 if (wi::only_sign_bit_p (val
, width
))
3980 /* Handle extension from a narrower type. */
3981 if (TREE_CODE (exp
) == NOP_EXPR
3982 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3983 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3988 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3989 to be evaluated unconditionally. */
3992 simple_operand_p (const_tree exp
)
3994 /* Strip any conversions that don't change the machine mode. */
3997 return (CONSTANT_CLASS_P (exp
)
3998 || TREE_CODE (exp
) == SSA_NAME
4000 && ! TREE_ADDRESSABLE (exp
)
4001 && ! TREE_THIS_VOLATILE (exp
)
4002 && ! DECL_NONLOCAL (exp
)
4003 /* Don't regard global variables as simple. They may be
4004 allocated in ways unknown to the compiler (shared memory,
4005 #pragma weak, etc). */
4006 && ! TREE_PUBLIC (exp
)
4007 && ! DECL_EXTERNAL (exp
)
4008 /* Weakrefs are not safe to be read, since they can be NULL.
4009 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4010 have DECL_WEAK flag set. */
4011 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4012 /* Loading a static variable is unduly expensive, but global
4013 registers aren't expensive. */
4014 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4017 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4018 to be evaluated unconditionally.
4019 I addition to simple_operand_p, we assume that comparisons, conversions,
4020 and logic-not operations are simple, if their operands are simple, too. */
4023 simple_operand_p_2 (tree exp
)
4025 enum tree_code code
;
4027 if (TREE_SIDE_EFFECTS (exp
)
4028 || tree_could_trap_p (exp
))
4031 while (CONVERT_EXPR_P (exp
))
4032 exp
= TREE_OPERAND (exp
, 0);
4034 code
= TREE_CODE (exp
);
4036 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4037 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4038 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4040 if (code
== TRUTH_NOT_EXPR
)
4041 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4043 return simple_operand_p (exp
);
4047 /* The following functions are subroutines to fold_range_test and allow it to
4048 try to change a logical combination of comparisons into a range test.
4051 X == 2 || X == 3 || X == 4 || X == 5
4055 (unsigned) (X - 2) <= 3
4057 We describe each set of comparisons as being either inside or outside
4058 a range, using a variable named like IN_P, and then describe the
4059 range with a lower and upper bound. If one of the bounds is omitted,
4060 it represents either the highest or lowest value of the type.
4062 In the comments below, we represent a range by two numbers in brackets
4063 preceded by a "+" to designate being inside that range, or a "-" to
4064 designate being outside that range, so the condition can be inverted by
4065 flipping the prefix. An omitted bound is represented by a "-". For
4066 example, "- [-, 10]" means being outside the range starting at the lowest
4067 possible value and ending at 10, in other words, being greater than 10.
4068 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4071 We set up things so that the missing bounds are handled in a consistent
4072 manner so neither a missing bound nor "true" and "false" need to be
4073 handled using a special case. */
4075 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4076 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4077 and UPPER1_P are nonzero if the respective argument is an upper bound
4078 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4079 must be specified for a comparison. ARG1 will be converted to ARG0's
4080 type if both are specified. */
4083 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4084 tree arg1
, int upper1_p
)
4090 /* If neither arg represents infinity, do the normal operation.
4091 Else, if not a comparison, return infinity. Else handle the special
4092 comparison rules. Note that most of the cases below won't occur, but
4093 are handled for consistency. */
4095 if (arg0
!= 0 && arg1
!= 0)
4097 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4098 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4100 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4103 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4106 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4107 for neither. In real maths, we cannot assume open ended ranges are
4108 the same. But, this is computer arithmetic, where numbers are finite.
4109 We can therefore make the transformation of any unbounded range with
4110 the value Z, Z being greater than any representable number. This permits
4111 us to treat unbounded ranges as equal. */
4112 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4113 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4117 result
= sgn0
== sgn1
;
4120 result
= sgn0
!= sgn1
;
4123 result
= sgn0
< sgn1
;
4126 result
= sgn0
<= sgn1
;
4129 result
= sgn0
> sgn1
;
4132 result
= sgn0
>= sgn1
;
4138 return constant_boolean_node (result
, type
);
4141 /* Helper routine for make_range. Perform one step for it, return
4142 new expression if the loop should continue or NULL_TREE if it should
4146 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4147 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4148 bool *strict_overflow_p
)
4150 tree arg0_type
= TREE_TYPE (arg0
);
4151 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4152 int in_p
= *p_in_p
, n_in_p
;
4156 case TRUTH_NOT_EXPR
:
4157 /* We can only do something if the range is testing for zero. */
4158 if (low
== NULL_TREE
|| high
== NULL_TREE
4159 || ! integer_zerop (low
) || ! integer_zerop (high
))
4164 case EQ_EXPR
: case NE_EXPR
:
4165 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4166 /* We can only do something if the range is testing for zero
4167 and if the second operand is an integer constant. Note that
4168 saying something is "in" the range we make is done by
4169 complementing IN_P since it will set in the initial case of
4170 being not equal to zero; "out" is leaving it alone. */
4171 if (low
== NULL_TREE
|| high
== NULL_TREE
4172 || ! integer_zerop (low
) || ! integer_zerop (high
)
4173 || TREE_CODE (arg1
) != INTEGER_CST
)
4178 case NE_EXPR
: /* - [c, c] */
4181 case EQ_EXPR
: /* + [c, c] */
4182 in_p
= ! in_p
, low
= high
= arg1
;
4184 case GT_EXPR
: /* - [-, c] */
4185 low
= 0, high
= arg1
;
4187 case GE_EXPR
: /* + [c, -] */
4188 in_p
= ! in_p
, low
= arg1
, high
= 0;
4190 case LT_EXPR
: /* - [c, -] */
4191 low
= arg1
, high
= 0;
4193 case LE_EXPR
: /* + [-, c] */
4194 in_p
= ! in_p
, low
= 0, high
= arg1
;
4200 /* If this is an unsigned comparison, we also know that EXP is
4201 greater than or equal to zero. We base the range tests we make
4202 on that fact, so we record it here so we can parse existing
4203 range tests. We test arg0_type since often the return type
4204 of, e.g. EQ_EXPR, is boolean. */
4205 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4207 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4209 build_int_cst (arg0_type
, 0),
4213 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4215 /* If the high bound is missing, but we have a nonzero low
4216 bound, reverse the range so it goes from zero to the low bound
4218 if (high
== 0 && low
&& ! integer_zerop (low
))
4221 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4222 build_int_cst (TREE_TYPE (low
), 1), 0);
4223 low
= build_int_cst (arg0_type
, 0);
4233 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4234 low and high are non-NULL, then normalize will DTRT. */
4235 if (!TYPE_UNSIGNED (arg0_type
)
4236 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4238 if (low
== NULL_TREE
)
4239 low
= TYPE_MIN_VALUE (arg0_type
);
4240 if (high
== NULL_TREE
)
4241 high
= TYPE_MAX_VALUE (arg0_type
);
4244 /* (-x) IN [a,b] -> x in [-b, -a] */
4245 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4246 build_int_cst (exp_type
, 0),
4248 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4249 build_int_cst (exp_type
, 0),
4251 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4257 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4258 build_int_cst (exp_type
, 1));
4262 if (TREE_CODE (arg1
) != INTEGER_CST
)
4265 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4266 move a constant to the other side. */
4267 if (!TYPE_UNSIGNED (arg0_type
)
4268 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4271 /* If EXP is signed, any overflow in the computation is undefined,
4272 so we don't worry about it so long as our computations on
4273 the bounds don't overflow. For unsigned, overflow is defined
4274 and this is exactly the right thing. */
4275 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4276 arg0_type
, low
, 0, arg1
, 0);
4277 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4278 arg0_type
, high
, 1, arg1
, 0);
4279 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4280 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4283 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4284 *strict_overflow_p
= true;
4287 /* Check for an unsigned range which has wrapped around the maximum
4288 value thus making n_high < n_low, and normalize it. */
4289 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4291 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4292 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4293 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4294 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4296 /* If the range is of the form +/- [ x+1, x ], we won't
4297 be able to normalize it. But then, it represents the
4298 whole range or the empty set, so make it
4300 if (tree_int_cst_equal (n_low
, low
)
4301 && tree_int_cst_equal (n_high
, high
))
4307 low
= n_low
, high
= n_high
;
4315 case NON_LVALUE_EXPR
:
4316 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4319 if (! INTEGRAL_TYPE_P (arg0_type
)
4320 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4321 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4324 n_low
= low
, n_high
= high
;
4327 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4330 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4332 /* If we're converting arg0 from an unsigned type, to exp,
4333 a signed type, we will be doing the comparison as unsigned.
4334 The tests above have already verified that LOW and HIGH
4337 So we have to ensure that we will handle large unsigned
4338 values the same way that the current signed bounds treat
4341 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4345 /* For fixed-point modes, we need to pass the saturating flag
4346 as the 2nd parameter. */
4347 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4349 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4350 TYPE_SATURATING (arg0_type
));
4353 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4355 /* A range without an upper bound is, naturally, unbounded.
4356 Since convert would have cropped a very large value, use
4357 the max value for the destination type. */
4359 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4360 : TYPE_MAX_VALUE (arg0_type
);
4362 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4363 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4364 fold_convert_loc (loc
, arg0_type
,
4366 build_int_cst (arg0_type
, 1));
4368 /* If the low bound is specified, "and" the range with the
4369 range for which the original unsigned value will be
4373 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4374 1, fold_convert_loc (loc
, arg0_type
,
4379 in_p
= (n_in_p
== in_p
);
4383 /* Otherwise, "or" the range with the range of the input
4384 that will be interpreted as negative. */
4385 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4386 1, fold_convert_loc (loc
, arg0_type
,
4391 in_p
= (in_p
!= n_in_p
);
4405 /* Given EXP, a logical expression, set the range it is testing into
4406 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4407 actually being tested. *PLOW and *PHIGH will be made of the same
4408 type as the returned expression. If EXP is not a comparison, we
4409 will most likely not be returning a useful value and range. Set
4410 *STRICT_OVERFLOW_P to true if the return value is only valid
4411 because signed overflow is undefined; otherwise, do not change
4412 *STRICT_OVERFLOW_P. */
4415 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4416 bool *strict_overflow_p
)
4418 enum tree_code code
;
4419 tree arg0
, arg1
= NULL_TREE
;
4420 tree exp_type
, nexp
;
4423 location_t loc
= EXPR_LOCATION (exp
);
4425 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4426 and see if we can refine the range. Some of the cases below may not
4427 happen, but it doesn't seem worth worrying about this. We "continue"
4428 the outer loop when we've changed something; otherwise we "break"
4429 the switch, which will "break" the while. */
4432 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4436 code
= TREE_CODE (exp
);
4437 exp_type
= TREE_TYPE (exp
);
4440 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4442 if (TREE_OPERAND_LENGTH (exp
) > 0)
4443 arg0
= TREE_OPERAND (exp
, 0);
4444 if (TREE_CODE_CLASS (code
) == tcc_binary
4445 || TREE_CODE_CLASS (code
) == tcc_comparison
4446 || (TREE_CODE_CLASS (code
) == tcc_expression
4447 && TREE_OPERAND_LENGTH (exp
) > 1))
4448 arg1
= TREE_OPERAND (exp
, 1);
4450 if (arg0
== NULL_TREE
)
4453 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4454 &high
, &in_p
, strict_overflow_p
);
4455 if (nexp
== NULL_TREE
)
4460 /* If EXP is a constant, we can evaluate whether this is true or false. */
4461 if (TREE_CODE (exp
) == INTEGER_CST
)
4463 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4465 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4471 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4475 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4476 type, TYPE, return an expression to test if EXP is in (or out of, depending
4477 on IN_P) the range. Return 0 if the test couldn't be created. */
4480 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4481 tree low
, tree high
)
4483 tree etype
= TREE_TYPE (exp
), value
;
4485 /* Disable this optimization for function pointer expressions
4486 on targets that require function pointer canonicalization. */
4487 if (targetm
.have_canonicalize_funcptr_for_compare ()
4488 && TREE_CODE (etype
) == POINTER_TYPE
4489 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4494 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4496 return invert_truthvalue_loc (loc
, value
);
4501 if (low
== 0 && high
== 0)
4502 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4505 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4506 fold_convert_loc (loc
, etype
, high
));
4509 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4510 fold_convert_loc (loc
, etype
, low
));
4512 if (operand_equal_p (low
, high
, 0))
4513 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4514 fold_convert_loc (loc
, etype
, low
));
4516 if (integer_zerop (low
))
4518 if (! TYPE_UNSIGNED (etype
))
4520 etype
= unsigned_type_for (etype
);
4521 high
= fold_convert_loc (loc
, etype
, high
);
4522 exp
= fold_convert_loc (loc
, etype
, exp
);
4524 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4527 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4528 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4530 int prec
= TYPE_PRECISION (etype
);
4532 if (wi::mask (prec
- 1, false, prec
) == high
)
4534 if (TYPE_UNSIGNED (etype
))
4536 tree signed_etype
= signed_type_for (etype
);
4537 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4539 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4541 etype
= signed_etype
;
4542 exp
= fold_convert_loc (loc
, etype
, exp
);
4544 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4545 build_int_cst (etype
, 0));
4549 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4550 This requires wrap-around arithmetics for the type of the expression.
4551 First make sure that arithmetics in this type is valid, then make sure
4552 that it wraps around. */
4553 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4554 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4555 TYPE_UNSIGNED (etype
));
4557 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4559 tree utype
, minv
, maxv
;
4561 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4562 for the type in question, as we rely on this here. */
4563 utype
= unsigned_type_for (etype
);
4564 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4565 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4566 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4567 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4569 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4576 high
= fold_convert_loc (loc
, etype
, high
);
4577 low
= fold_convert_loc (loc
, etype
, low
);
4578 exp
= fold_convert_loc (loc
, etype
, exp
);
4580 value
= const_binop (MINUS_EXPR
, high
, low
);
4583 if (POINTER_TYPE_P (etype
))
4585 if (value
!= 0 && !TREE_OVERFLOW (value
))
4587 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4588 return build_range_check (loc
, type
,
4589 fold_build_pointer_plus_loc (loc
, exp
, low
),
4590 1, build_int_cst (etype
, 0), value
);
4595 if (value
!= 0 && !TREE_OVERFLOW (value
))
4596 return build_range_check (loc
, type
,
4597 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4598 1, build_int_cst (etype
, 0), value
);
4603 /* Return the predecessor of VAL in its type, handling the infinite case. */
4606 range_predecessor (tree val
)
4608 tree type
= TREE_TYPE (val
);
4610 if (INTEGRAL_TYPE_P (type
)
4611 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4614 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4615 build_int_cst (TREE_TYPE (val
), 1), 0);
4618 /* Return the successor of VAL in its type, handling the infinite case. */
4621 range_successor (tree val
)
4623 tree type
= TREE_TYPE (val
);
4625 if (INTEGRAL_TYPE_P (type
)
4626 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4629 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4630 build_int_cst (TREE_TYPE (val
), 1), 0);
4633 /* Given two ranges, see if we can merge them into one. Return 1 if we
4634 can, 0 if we can't. Set the output range into the specified parameters. */
4637 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4638 tree high0
, int in1_p
, tree low1
, tree high1
)
4646 int lowequal
= ((low0
== 0 && low1
== 0)
4647 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4648 low0
, 0, low1
, 0)));
4649 int highequal
= ((high0
== 0 && high1
== 0)
4650 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4651 high0
, 1, high1
, 1)));
4653 /* Make range 0 be the range that starts first, or ends last if they
4654 start at the same value. Swap them if it isn't. */
4655 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4658 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4659 high1
, 1, high0
, 1))))
4661 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4662 tem
= low0
, low0
= low1
, low1
= tem
;
4663 tem
= high0
, high0
= high1
, high1
= tem
;
4666 /* Now flag two cases, whether the ranges are disjoint or whether the
4667 second range is totally subsumed in the first. Note that the tests
4668 below are simplified by the ones above. */
4669 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4670 high0
, 1, low1
, 0));
4671 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4672 high1
, 1, high0
, 1));
4674 /* We now have four cases, depending on whether we are including or
4675 excluding the two ranges. */
4678 /* If they don't overlap, the result is false. If the second range
4679 is a subset it is the result. Otherwise, the range is from the start
4680 of the second to the end of the first. */
4682 in_p
= 0, low
= high
= 0;
4684 in_p
= 1, low
= low1
, high
= high1
;
4686 in_p
= 1, low
= low1
, high
= high0
;
4689 else if (in0_p
&& ! in1_p
)
4691 /* If they don't overlap, the result is the first range. If they are
4692 equal, the result is false. If the second range is a subset of the
4693 first, and the ranges begin at the same place, we go from just after
4694 the end of the second range to the end of the first. If the second
4695 range is not a subset of the first, or if it is a subset and both
4696 ranges end at the same place, the range starts at the start of the
4697 first range and ends just before the second range.
4698 Otherwise, we can't describe this as a single range. */
4700 in_p
= 1, low
= low0
, high
= high0
;
4701 else if (lowequal
&& highequal
)
4702 in_p
= 0, low
= high
= 0;
4703 else if (subset
&& lowequal
)
4705 low
= range_successor (high1
);
4710 /* We are in the weird situation where high0 > high1 but
4711 high1 has no successor. Punt. */
4715 else if (! subset
|| highequal
)
4718 high
= range_predecessor (low1
);
4722 /* low0 < low1 but low1 has no predecessor. Punt. */
4730 else if (! in0_p
&& in1_p
)
4732 /* If they don't overlap, the result is the second range. If the second
4733 is a subset of the first, the result is false. Otherwise,
4734 the range starts just after the first range and ends at the
4735 end of the second. */
4737 in_p
= 1, low
= low1
, high
= high1
;
4738 else if (subset
|| highequal
)
4739 in_p
= 0, low
= high
= 0;
4742 low
= range_successor (high0
);
4747 /* high1 > high0 but high0 has no successor. Punt. */
4755 /* The case where we are excluding both ranges. Here the complex case
4756 is if they don't overlap. In that case, the only time we have a
4757 range is if they are adjacent. If the second is a subset of the
4758 first, the result is the first. Otherwise, the range to exclude
4759 starts at the beginning of the first range and ends at the end of the
4763 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4764 range_successor (high0
),
4766 in_p
= 0, low
= low0
, high
= high1
;
4769 /* Canonicalize - [min, x] into - [-, x]. */
4770 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4771 switch (TREE_CODE (TREE_TYPE (low0
)))
4774 if (TYPE_PRECISION (TREE_TYPE (low0
))
4775 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4779 if (tree_int_cst_equal (low0
,
4780 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4784 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4785 && integer_zerop (low0
))
4792 /* Canonicalize - [x, max] into - [x, -]. */
4793 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4794 switch (TREE_CODE (TREE_TYPE (high1
)))
4797 if (TYPE_PRECISION (TREE_TYPE (high1
))
4798 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4802 if (tree_int_cst_equal (high1
,
4803 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4807 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4808 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4810 build_int_cst (TREE_TYPE (high1
), 1),
4818 /* The ranges might be also adjacent between the maximum and
4819 minimum values of the given type. For
4820 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4821 return + [x + 1, y - 1]. */
4822 if (low0
== 0 && high1
== 0)
4824 low
= range_successor (high0
);
4825 high
= range_predecessor (low1
);
4826 if (low
== 0 || high
== 0)
4836 in_p
= 0, low
= low0
, high
= high0
;
4838 in_p
= 0, low
= low0
, high
= high1
;
4841 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4846 /* Subroutine of fold, looking inside expressions of the form
4847 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4848 of the COND_EXPR. This function is being used also to optimize
4849 A op B ? C : A, by reversing the comparison first.
4851 Return a folded expression whose code is not a COND_EXPR
4852 anymore, or NULL_TREE if no folding opportunity is found. */
4855 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4856 tree arg0
, tree arg1
, tree arg2
)
4858 enum tree_code comp_code
= TREE_CODE (arg0
);
4859 tree arg00
= TREE_OPERAND (arg0
, 0);
4860 tree arg01
= TREE_OPERAND (arg0
, 1);
4861 tree arg1_type
= TREE_TYPE (arg1
);
4867 /* If we have A op 0 ? A : -A, consider applying the following
4870 A == 0? A : -A same as -A
4871 A != 0? A : -A same as A
4872 A >= 0? A : -A same as abs (A)
4873 A > 0? A : -A same as abs (A)
4874 A <= 0? A : -A same as -abs (A)
4875 A < 0? A : -A same as -abs (A)
4877 None of these transformations work for modes with signed
4878 zeros. If A is +/-0, the first two transformations will
4879 change the sign of the result (from +0 to -0, or vice
4880 versa). The last four will fix the sign of the result,
4881 even though the original expressions could be positive or
4882 negative, depending on the sign of A.
4884 Note that all these transformations are correct if A is
4885 NaN, since the two alternatives (A and -A) are also NaNs. */
4886 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4887 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4888 ? real_zerop (arg01
)
4889 : integer_zerop (arg01
))
4890 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4891 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4892 /* In the case that A is of the form X-Y, '-A' (arg2) may
4893 have already been folded to Y-X, check for that. */
4894 || (TREE_CODE (arg1
) == MINUS_EXPR
4895 && TREE_CODE (arg2
) == MINUS_EXPR
4896 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4897 TREE_OPERAND (arg2
, 1), 0)
4898 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4899 TREE_OPERAND (arg2
, 0), 0))))
4904 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4905 return pedantic_non_lvalue_loc (loc
,
4906 fold_convert_loc (loc
, type
,
4907 negate_expr (tem
)));
4910 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4913 if (flag_trapping_math
)
4918 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4919 arg1
= fold_convert_loc (loc
, signed_type_for
4920 (TREE_TYPE (arg1
)), arg1
);
4921 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4922 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4925 if (flag_trapping_math
)
4929 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4930 arg1
= fold_convert_loc (loc
, signed_type_for
4931 (TREE_TYPE (arg1
)), arg1
);
4932 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4933 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4935 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4939 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4940 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4941 both transformations are correct when A is NaN: A != 0
4942 is then true, and A == 0 is false. */
4944 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4945 && integer_zerop (arg01
) && integer_zerop (arg2
))
4947 if (comp_code
== NE_EXPR
)
4948 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4949 else if (comp_code
== EQ_EXPR
)
4950 return build_zero_cst (type
);
4953 /* Try some transformations of A op B ? A : B.
4955 A == B? A : B same as B
4956 A != B? A : B same as A
4957 A >= B? A : B same as max (A, B)
4958 A > B? A : B same as max (B, A)
4959 A <= B? A : B same as min (A, B)
4960 A < B? A : B same as min (B, A)
4962 As above, these transformations don't work in the presence
4963 of signed zeros. For example, if A and B are zeros of
4964 opposite sign, the first two transformations will change
4965 the sign of the result. In the last four, the original
4966 expressions give different results for (A=+0, B=-0) and
4967 (A=-0, B=+0), but the transformed expressions do not.
4969 The first two transformations are correct if either A or B
4970 is a NaN. In the first transformation, the condition will
4971 be false, and B will indeed be chosen. In the case of the
4972 second transformation, the condition A != B will be true,
4973 and A will be chosen.
4975 The conversions to max() and min() are not correct if B is
4976 a number and A is not. The conditions in the original
4977 expressions will be false, so all four give B. The min()
4978 and max() versions would give a NaN instead. */
4979 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4980 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4981 /* Avoid these transformations if the COND_EXPR may be used
4982 as an lvalue in the C++ front-end. PR c++/19199. */
4984 || VECTOR_TYPE_P (type
)
4985 || (! lang_GNU_CXX ()
4986 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4987 || ! maybe_lvalue_p (arg1
)
4988 || ! maybe_lvalue_p (arg2
)))
4990 tree comp_op0
= arg00
;
4991 tree comp_op1
= arg01
;
4992 tree comp_type
= TREE_TYPE (comp_op0
);
4994 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4995 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5005 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5007 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5012 /* In C++ a ?: expression can be an lvalue, so put the
5013 operand which will be used if they are equal first
5014 so that we can convert this back to the
5015 corresponding COND_EXPR. */
5016 if (!HONOR_NANS (arg1
))
5018 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5019 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5020 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5021 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5022 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5023 comp_op1
, comp_op0
);
5024 return pedantic_non_lvalue_loc (loc
,
5025 fold_convert_loc (loc
, type
, tem
));
5032 if (!HONOR_NANS (arg1
))
5034 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5035 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5036 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5037 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5038 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5039 comp_op1
, comp_op0
);
5040 return pedantic_non_lvalue_loc (loc
,
5041 fold_convert_loc (loc
, type
, tem
));
5045 if (!HONOR_NANS (arg1
))
5046 return pedantic_non_lvalue_loc (loc
,
5047 fold_convert_loc (loc
, type
, arg2
));
5050 if (!HONOR_NANS (arg1
))
5051 return pedantic_non_lvalue_loc (loc
,
5052 fold_convert_loc (loc
, type
, arg1
));
5055 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5060 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5061 we might still be able to simplify this. For example,
5062 if C1 is one less or one more than C2, this might have started
5063 out as a MIN or MAX and been transformed by this function.
5064 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5066 if (INTEGRAL_TYPE_P (type
)
5067 && TREE_CODE (arg01
) == INTEGER_CST
5068 && TREE_CODE (arg2
) == INTEGER_CST
)
5072 if (TREE_CODE (arg1
) == INTEGER_CST
)
5074 /* We can replace A with C1 in this case. */
5075 arg1
= fold_convert_loc (loc
, type
, arg01
);
5076 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5079 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5080 MIN_EXPR, to preserve the signedness of the comparison. */
5081 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5083 && operand_equal_p (arg01
,
5084 const_binop (PLUS_EXPR
, arg2
,
5085 build_int_cst (type
, 1)),
5088 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5089 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5091 return pedantic_non_lvalue_loc (loc
,
5092 fold_convert_loc (loc
, type
, tem
));
5097 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5099 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5101 && operand_equal_p (arg01
,
5102 const_binop (MINUS_EXPR
, arg2
,
5103 build_int_cst (type
, 1)),
5106 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5107 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5109 return pedantic_non_lvalue_loc (loc
,
5110 fold_convert_loc (loc
, type
, tem
));
5115 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5116 MAX_EXPR, to preserve the signedness of the comparison. */
5117 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5119 && operand_equal_p (arg01
,
5120 const_binop (MINUS_EXPR
, arg2
,
5121 build_int_cst (type
, 1)),
5124 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5125 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5127 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5132 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5133 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5135 && operand_equal_p (arg01
,
5136 const_binop (PLUS_EXPR
, arg2
,
5137 build_int_cst (type
, 1)),
5140 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5141 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5143 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5157 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5158 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5159 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5163 /* EXP is some logical combination of boolean tests. See if we can
5164 merge it into some range test. Return the new tree if so. */
5167 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5170 int or_op
= (code
== TRUTH_ORIF_EXPR
5171 || code
== TRUTH_OR_EXPR
);
5172 int in0_p
, in1_p
, in_p
;
5173 tree low0
, low1
, low
, high0
, high1
, high
;
5174 bool strict_overflow_p
= false;
5176 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5177 "when simplifying range test");
5179 if (!INTEGRAL_TYPE_P (type
))
5182 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5183 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5185 /* If this is an OR operation, invert both sides; we will invert
5186 again at the end. */
5188 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5190 /* If both expressions are the same, if we can merge the ranges, and we
5191 can build the range test, return it or it inverted. If one of the
5192 ranges is always true or always false, consider it to be the same
5193 expression as the other. */
5194 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5195 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5197 && 0 != (tem
= (build_range_check (loc
, type
,
5199 : rhs
!= 0 ? rhs
: integer_zero_node
,
5202 if (strict_overflow_p
)
5203 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5204 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5207 /* On machines where the branch cost is expensive, if this is a
5208 short-circuited branch and the underlying object on both sides
5209 is the same, make a non-short-circuit operation. */
5210 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5211 && lhs
!= 0 && rhs
!= 0
5212 && (code
== TRUTH_ANDIF_EXPR
5213 || code
== TRUTH_ORIF_EXPR
)
5214 && operand_equal_p (lhs
, rhs
, 0))
5216 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5217 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5218 which cases we can't do this. */
5219 if (simple_operand_p (lhs
))
5220 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5221 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5224 else if (!lang_hooks
.decls
.global_bindings_p ()
5225 && !CONTAINS_PLACEHOLDER_P (lhs
))
5227 tree common
= save_expr (lhs
);
5229 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5230 or_op
? ! in0_p
: in0_p
,
5232 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5233 or_op
? ! in1_p
: in1_p
,
5236 if (strict_overflow_p
)
5237 fold_overflow_warning (warnmsg
,
5238 WARN_STRICT_OVERFLOW_COMPARISON
);
5239 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5240 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5249 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5250 bit value. Arrange things so the extra bits will be set to zero if and
5251 only if C is signed-extended to its full width. If MASK is nonzero,
5252 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5255 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5257 tree type
= TREE_TYPE (c
);
5258 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5261 if (p
== modesize
|| unsignedp
)
5264 /* We work by getting just the sign bit into the low-order bit, then
5265 into the high-order bit, then sign-extend. We then XOR that value
5267 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5269 /* We must use a signed type in order to get an arithmetic right shift.
5270 However, we must also avoid introducing accidental overflows, so that
5271 a subsequent call to integer_zerop will work. Hence we must
5272 do the type conversion here. At this point, the constant is either
5273 zero or one, and the conversion to a signed type can never overflow.
5274 We could get an overflow if this conversion is done anywhere else. */
5275 if (TYPE_UNSIGNED (type
))
5276 temp
= fold_convert (signed_type_for (type
), temp
);
5278 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5279 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5281 temp
= const_binop (BIT_AND_EXPR
, temp
,
5282 fold_convert (TREE_TYPE (c
), mask
));
5283 /* If necessary, convert the type back to match the type of C. */
5284 if (TYPE_UNSIGNED (type
))
5285 temp
= fold_convert (type
, temp
);
5287 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5290 /* For an expression that has the form
5294 we can drop one of the inner expressions and simplify to
5298 LOC is the location of the resulting expression. OP is the inner
5299 logical operation; the left-hand side in the examples above, while CMPOP
5300 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5301 removing a condition that guards another, as in
5302 (A != NULL && A->...) || A == NULL
5303 which we must not transform. If RHS_ONLY is true, only eliminate the
5304 right-most operand of the inner logical operation. */
5307 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5310 tree type
= TREE_TYPE (cmpop
);
5311 enum tree_code code
= TREE_CODE (cmpop
);
5312 enum tree_code truthop_code
= TREE_CODE (op
);
5313 tree lhs
= TREE_OPERAND (op
, 0);
5314 tree rhs
= TREE_OPERAND (op
, 1);
5315 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5316 enum tree_code rhs_code
= TREE_CODE (rhs
);
5317 enum tree_code lhs_code
= TREE_CODE (lhs
);
5318 enum tree_code inv_code
;
5320 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5323 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5326 if (rhs_code
== truthop_code
)
5328 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5329 if (newrhs
!= NULL_TREE
)
5332 rhs_code
= TREE_CODE (rhs
);
5335 if (lhs_code
== truthop_code
&& !rhs_only
)
5337 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5338 if (newlhs
!= NULL_TREE
)
5341 lhs_code
= TREE_CODE (lhs
);
5345 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5346 if (inv_code
== rhs_code
5347 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5348 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5350 if (!rhs_only
&& inv_code
== lhs_code
5351 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5352 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5354 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5355 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5360 /* Find ways of folding logical expressions of LHS and RHS:
5361 Try to merge two comparisons to the same innermost item.
5362 Look for range tests like "ch >= '0' && ch <= '9'".
5363 Look for combinations of simple terms on machines with expensive branches
5364 and evaluate the RHS unconditionally.
5366 For example, if we have p->a == 2 && p->b == 4 and we can make an
5367 object large enough to span both A and B, we can do this with a comparison
5368 against the object ANDed with the a mask.
5370 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5371 operations to do this with one comparison.
5373 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5374 function and the one above.
5376 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5377 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5379 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5382 We return the simplified tree or 0 if no optimization is possible. */
5385 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5388 /* If this is the "or" of two comparisons, we can do something if
5389 the comparisons are NE_EXPR. If this is the "and", we can do something
5390 if the comparisons are EQ_EXPR. I.e.,
5391 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5393 WANTED_CODE is this operation code. For single bit fields, we can
5394 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5395 comparison for one-bit fields. */
5397 enum tree_code wanted_code
;
5398 enum tree_code lcode
, rcode
;
5399 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5400 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5401 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5402 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5403 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5404 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5405 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5406 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5407 machine_mode lnmode
, rnmode
;
5408 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5409 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5410 tree l_const
, r_const
;
5411 tree lntype
, rntype
, result
;
5412 HOST_WIDE_INT first_bit
, end_bit
;
5415 /* Start by getting the comparison codes. Fail if anything is volatile.
5416 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5417 it were surrounded with a NE_EXPR. */
5419 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5422 lcode
= TREE_CODE (lhs
);
5423 rcode
= TREE_CODE (rhs
);
5425 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5427 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5428 build_int_cst (TREE_TYPE (lhs
), 0));
5432 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5434 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5435 build_int_cst (TREE_TYPE (rhs
), 0));
5439 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5440 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5443 ll_arg
= TREE_OPERAND (lhs
, 0);
5444 lr_arg
= TREE_OPERAND (lhs
, 1);
5445 rl_arg
= TREE_OPERAND (rhs
, 0);
5446 rr_arg
= TREE_OPERAND (rhs
, 1);
5448 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5449 if (simple_operand_p (ll_arg
)
5450 && simple_operand_p (lr_arg
))
5452 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5453 && operand_equal_p (lr_arg
, rr_arg
, 0))
5455 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5456 truth_type
, ll_arg
, lr_arg
);
5460 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5461 && operand_equal_p (lr_arg
, rl_arg
, 0))
5463 result
= combine_comparisons (loc
, code
, lcode
,
5464 swap_tree_comparison (rcode
),
5465 truth_type
, ll_arg
, lr_arg
);
5471 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5472 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5474 /* If the RHS can be evaluated unconditionally and its operands are
5475 simple, it wins to evaluate the RHS unconditionally on machines
5476 with expensive branches. In this case, this isn't a comparison
5477 that can be merged. */
5479 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5481 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5482 && simple_operand_p (rl_arg
)
5483 && simple_operand_p (rr_arg
))
5485 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5486 if (code
== TRUTH_OR_EXPR
5487 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5488 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5489 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5490 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5491 return build2_loc (loc
, NE_EXPR
, truth_type
,
5492 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5494 build_int_cst (TREE_TYPE (ll_arg
), 0));
5496 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5497 if (code
== TRUTH_AND_EXPR
5498 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5499 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5500 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5501 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5502 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5503 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5505 build_int_cst (TREE_TYPE (ll_arg
), 0));
5508 /* See if the comparisons can be merged. Then get all the parameters for
5511 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5512 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5516 ll_inner
= decode_field_reference (loc
, ll_arg
,
5517 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5518 &ll_unsignedp
, &volatilep
, &ll_mask
,
5520 lr_inner
= decode_field_reference (loc
, lr_arg
,
5521 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5522 &lr_unsignedp
, &volatilep
, &lr_mask
,
5524 rl_inner
= decode_field_reference (loc
, rl_arg
,
5525 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5526 &rl_unsignedp
, &volatilep
, &rl_mask
,
5528 rr_inner
= decode_field_reference (loc
, rr_arg
,
5529 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5530 &rr_unsignedp
, &volatilep
, &rr_mask
,
5533 /* It must be true that the inner operation on the lhs of each
5534 comparison must be the same if we are to be able to do anything.
5535 Then see if we have constants. If not, the same must be true for
5537 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5538 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5541 if (TREE_CODE (lr_arg
) == INTEGER_CST
5542 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5543 l_const
= lr_arg
, r_const
= rr_arg
;
5544 else if (lr_inner
== 0 || rr_inner
== 0
5545 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5548 l_const
= r_const
= 0;
5550 /* If either comparison code is not correct for our logical operation,
5551 fail. However, we can convert a one-bit comparison against zero into
5552 the opposite comparison against that bit being set in the field. */
5554 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5555 if (lcode
!= wanted_code
)
5557 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5559 /* Make the left operand unsigned, since we are only interested
5560 in the value of one bit. Otherwise we are doing the wrong
5569 /* This is analogous to the code for l_const above. */
5570 if (rcode
!= wanted_code
)
5572 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5581 /* See if we can find a mode that contains both fields being compared on
5582 the left. If we can't, fail. Otherwise, update all constants and masks
5583 to be relative to a field of that size. */
5584 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5585 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5586 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5587 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5589 if (lnmode
== VOIDmode
)
5592 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5593 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5594 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5595 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5597 if (BYTES_BIG_ENDIAN
)
5599 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5600 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5603 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5604 size_int (xll_bitpos
));
5605 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5606 size_int (xrl_bitpos
));
5610 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5611 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5612 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5613 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5614 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5617 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5619 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5624 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5625 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5626 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5627 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5628 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5631 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5633 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5637 /* If the right sides are not constant, do the same for it. Also,
5638 disallow this optimization if a size or signedness mismatch occurs
5639 between the left and right sides. */
5642 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5643 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5644 /* Make sure the two fields on the right
5645 correspond to the left without being swapped. */
5646 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5649 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5650 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5651 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5652 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5654 if (rnmode
== VOIDmode
)
5657 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5658 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5659 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5660 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5662 if (BYTES_BIG_ENDIAN
)
5664 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5665 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5668 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5670 size_int (xlr_bitpos
));
5671 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5673 size_int (xrr_bitpos
));
5675 /* Make a mask that corresponds to both fields being compared.
5676 Do this for both items being compared. If the operands are the
5677 same size and the bits being compared are in the same position
5678 then we can do this by masking both and comparing the masked
5680 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5681 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5682 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5684 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5685 ll_unsignedp
|| rl_unsignedp
);
5686 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5687 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5689 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5690 lr_unsignedp
|| rr_unsignedp
);
5691 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5692 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5694 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5697 /* There is still another way we can do something: If both pairs of
5698 fields being compared are adjacent, we may be able to make a wider
5699 field containing them both.
5701 Note that we still must mask the lhs/rhs expressions. Furthermore,
5702 the mask must be shifted to account for the shift done by
5703 make_bit_field_ref. */
5704 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5705 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5706 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5707 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5711 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5712 ll_bitsize
+ rl_bitsize
,
5713 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5714 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5715 lr_bitsize
+ rr_bitsize
,
5716 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5718 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5719 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5720 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5721 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5723 /* Convert to the smaller type before masking out unwanted bits. */
5725 if (lntype
!= rntype
)
5727 if (lnbitsize
> rnbitsize
)
5729 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5730 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5733 else if (lnbitsize
< rnbitsize
)
5735 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5736 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5741 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5742 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5744 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5745 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5747 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5753 /* Handle the case of comparisons with constants. If there is something in
5754 common between the masks, those bits of the constants must be the same.
5755 If not, the condition is always false. Test for this to avoid generating
5756 incorrect code below. */
5757 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5758 if (! integer_zerop (result
)
5759 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5760 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5762 if (wanted_code
== NE_EXPR
)
5764 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5765 return constant_boolean_node (true, truth_type
);
5769 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5770 return constant_boolean_node (false, truth_type
);
5774 /* Construct the expression we will return. First get the component
5775 reference we will make. Unless the mask is all ones the width of
5776 that field, perform the mask operation. Then compare with the
5778 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5779 ll_unsignedp
|| rl_unsignedp
);
5781 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5782 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5783 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5785 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5786 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5789 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5793 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5797 enum tree_code op_code
;
5800 int consts_equal
, consts_lt
;
5803 STRIP_SIGN_NOPS (arg0
);
5805 op_code
= TREE_CODE (arg0
);
5806 minmax_const
= TREE_OPERAND (arg0
, 1);
5807 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5808 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5809 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5810 inner
= TREE_OPERAND (arg0
, 0);
5812 /* If something does not permit us to optimize, return the original tree. */
5813 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5814 || TREE_CODE (comp_const
) != INTEGER_CST
5815 || TREE_OVERFLOW (comp_const
)
5816 || TREE_CODE (minmax_const
) != INTEGER_CST
5817 || TREE_OVERFLOW (minmax_const
))
5820 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5821 and GT_EXPR, doing the rest with recursive calls using logical
5825 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5828 = optimize_minmax_comparison (loc
,
5829 invert_tree_comparison (code
, false),
5832 return invert_truthvalue_loc (loc
, tem
);
5838 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5839 optimize_minmax_comparison
5840 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5841 optimize_minmax_comparison
5842 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5845 if (op_code
== MAX_EXPR
&& consts_equal
)
5846 /* MAX (X, 0) == 0 -> X <= 0 */
5847 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5849 else if (op_code
== MAX_EXPR
&& consts_lt
)
5850 /* MAX (X, 0) == 5 -> X == 5 */
5851 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5853 else if (op_code
== MAX_EXPR
)
5854 /* MAX (X, 0) == -1 -> false */
5855 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5857 else if (consts_equal
)
5858 /* MIN (X, 0) == 0 -> X >= 0 */
5859 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5862 /* MIN (X, 0) == 5 -> false */
5863 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5866 /* MIN (X, 0) == -1 -> X == -1 */
5867 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5870 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5871 /* MAX (X, 0) > 0 -> X > 0
5872 MAX (X, 0) > 5 -> X > 5 */
5873 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5875 else if (op_code
== MAX_EXPR
)
5876 /* MAX (X, 0) > -1 -> true */
5877 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5879 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5880 /* MIN (X, 0) > 0 -> false
5881 MIN (X, 0) > 5 -> false */
5882 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5885 /* MIN (X, 0) > -1 -> X > -1 */
5886 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5893 /* T is an integer expression that is being multiplied, divided, or taken a
5894 modulus (CODE says which and what kind of divide or modulus) by a
5895 constant C. See if we can eliminate that operation by folding it with
5896 other operations already in T. WIDE_TYPE, if non-null, is a type that
5897 should be used for the computation if wider than our type.
5899 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5900 (X * 2) + (Y * 4). We must, however, be assured that either the original
5901 expression would not overflow or that overflow is undefined for the type
5902 in the language in question.
5904 If we return a non-null expression, it is an equivalent form of the
5905 original computation, but need not be in the original type.
5907 We set *STRICT_OVERFLOW_P to true if the return values depends on
5908 signed overflow being undefined. Otherwise we do not change
5909 *STRICT_OVERFLOW_P. */
5912 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5913 bool *strict_overflow_p
)
5915 /* To avoid exponential search depth, refuse to allow recursion past
5916 three levels. Beyond that (1) it's highly unlikely that we'll find
5917 something interesting and (2) we've probably processed it before
5918 when we built the inner expression. */
5927 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5934 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5935 bool *strict_overflow_p
)
5937 tree type
= TREE_TYPE (t
);
5938 enum tree_code tcode
= TREE_CODE (t
);
5939 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5940 > GET_MODE_SIZE (TYPE_MODE (type
)))
5941 ? wide_type
: type
);
5943 int same_p
= tcode
== code
;
5944 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5945 bool sub_strict_overflow_p
;
5947 /* Don't deal with constants of zero here; they confuse the code below. */
5948 if (integer_zerop (c
))
5951 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5952 op0
= TREE_OPERAND (t
, 0);
5954 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5955 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5957 /* Note that we need not handle conditional operations here since fold
5958 already handles those cases. So just do arithmetic here. */
5962 /* For a constant, we can always simplify if we are a multiply
5963 or (for divide and modulus) if it is a multiple of our constant. */
5964 if (code
== MULT_EXPR
5965 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5966 return const_binop (code
, fold_convert (ctype
, t
),
5967 fold_convert (ctype
, c
));
5970 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5971 /* If op0 is an expression ... */
5972 if ((COMPARISON_CLASS_P (op0
)
5973 || UNARY_CLASS_P (op0
)
5974 || BINARY_CLASS_P (op0
)
5975 || VL_EXP_CLASS_P (op0
)
5976 || EXPRESSION_CLASS_P (op0
))
5977 /* ... and has wrapping overflow, and its type is smaller
5978 than ctype, then we cannot pass through as widening. */
5979 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5980 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
5981 && (TYPE_PRECISION (ctype
)
5982 > TYPE_PRECISION (TREE_TYPE (op0
))))
5983 /* ... or this is a truncation (t is narrower than op0),
5984 then we cannot pass through this narrowing. */
5985 || (TYPE_PRECISION (type
)
5986 < TYPE_PRECISION (TREE_TYPE (op0
)))
5987 /* ... or signedness changes for division or modulus,
5988 then we cannot pass through this conversion. */
5989 || (code
!= MULT_EXPR
5990 && (TYPE_UNSIGNED (ctype
)
5991 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5992 /* ... or has undefined overflow while the converted to
5993 type has not, we cannot do the operation in the inner type
5994 as that would introduce undefined overflow. */
5995 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
5997 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6000 /* Pass the constant down and see if we can make a simplification. If
6001 we can, replace this expression with the inner simplification for
6002 possible later conversion to our or some other type. */
6003 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6004 && TREE_CODE (t2
) == INTEGER_CST
6005 && !TREE_OVERFLOW (t2
)
6006 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6008 ? ctype
: NULL_TREE
,
6009 strict_overflow_p
))))
6014 /* If widening the type changes it from signed to unsigned, then we
6015 must avoid building ABS_EXPR itself as unsigned. */
6016 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6018 tree cstype
= (*signed_type_for
) (ctype
);
6019 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6022 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6023 return fold_convert (ctype
, t1
);
6027 /* If the constant is negative, we cannot simplify this. */
6028 if (tree_int_cst_sgn (c
) == -1)
6032 /* For division and modulus, type can't be unsigned, as e.g.
6033 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6034 For signed types, even with wrapping overflow, this is fine. */
6035 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6037 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6039 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6042 case MIN_EXPR
: case MAX_EXPR
:
6043 /* If widening the type changes the signedness, then we can't perform
6044 this optimization as that changes the result. */
6045 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6048 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6049 sub_strict_overflow_p
= false;
6050 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6051 &sub_strict_overflow_p
)) != 0
6052 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6053 &sub_strict_overflow_p
)) != 0)
6055 if (tree_int_cst_sgn (c
) < 0)
6056 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6057 if (sub_strict_overflow_p
)
6058 *strict_overflow_p
= true;
6059 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6060 fold_convert (ctype
, t2
));
6064 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6065 /* If the second operand is constant, this is a multiplication
6066 or floor division, by a power of two, so we can treat it that
6067 way unless the multiplier or divisor overflows. Signed
6068 left-shift overflow is implementation-defined rather than
6069 undefined in C90, so do not convert signed left shift into
6071 if (TREE_CODE (op1
) == INTEGER_CST
6072 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6073 /* const_binop may not detect overflow correctly,
6074 so check for it explicitly here. */
6075 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6076 && 0 != (t1
= fold_convert (ctype
,
6077 const_binop (LSHIFT_EXPR
,
6080 && !TREE_OVERFLOW (t1
))
6081 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6082 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6084 fold_convert (ctype
, op0
),
6086 c
, code
, wide_type
, strict_overflow_p
);
6089 case PLUS_EXPR
: case MINUS_EXPR
:
6090 /* See if we can eliminate the operation on both sides. If we can, we
6091 can return a new PLUS or MINUS. If we can't, the only remaining
6092 cases where we can do anything are if the second operand is a
6094 sub_strict_overflow_p
= false;
6095 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6096 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6097 if (t1
!= 0 && t2
!= 0
6098 && (code
== MULT_EXPR
6099 /* If not multiplication, we can only do this if both operands
6100 are divisible by c. */
6101 || (multiple_of_p (ctype
, op0
, c
)
6102 && multiple_of_p (ctype
, op1
, c
))))
6104 if (sub_strict_overflow_p
)
6105 *strict_overflow_p
= true;
6106 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6107 fold_convert (ctype
, t2
));
6110 /* If this was a subtraction, negate OP1 and set it to be an addition.
6111 This simplifies the logic below. */
6112 if (tcode
== MINUS_EXPR
)
6114 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6115 /* If OP1 was not easily negatable, the constant may be OP0. */
6116 if (TREE_CODE (op0
) == INTEGER_CST
)
6118 std::swap (op0
, op1
);
6123 if (TREE_CODE (op1
) != INTEGER_CST
)
6126 /* If either OP1 or C are negative, this optimization is not safe for
6127 some of the division and remainder types while for others we need
6128 to change the code. */
6129 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6131 if (code
== CEIL_DIV_EXPR
)
6132 code
= FLOOR_DIV_EXPR
;
6133 else if (code
== FLOOR_DIV_EXPR
)
6134 code
= CEIL_DIV_EXPR
;
6135 else if (code
!= MULT_EXPR
6136 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6140 /* If it's a multiply or a division/modulus operation of a multiple
6141 of our constant, do the operation and verify it doesn't overflow. */
6142 if (code
== MULT_EXPR
6143 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6145 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6146 fold_convert (ctype
, c
));
6147 /* We allow the constant to overflow with wrapping semantics. */
6149 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6155 /* If we have an unsigned type, we cannot widen the operation since it
6156 will change the result if the original computation overflowed. */
6157 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6160 /* If we were able to eliminate our operation from the first side,
6161 apply our operation to the second side and reform the PLUS. */
6162 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6163 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6165 /* The last case is if we are a multiply. In that case, we can
6166 apply the distributive law to commute the multiply and addition
6167 if the multiplication of the constants doesn't overflow
6168 and overflow is defined. With undefined overflow
6169 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6170 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6171 return fold_build2 (tcode
, ctype
,
6172 fold_build2 (code
, ctype
,
6173 fold_convert (ctype
, op0
),
6174 fold_convert (ctype
, c
)),
6180 /* We have a special case here if we are doing something like
6181 (C * 8) % 4 since we know that's zero. */
6182 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6183 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6184 /* If the multiplication can overflow we cannot optimize this. */
6185 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6186 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6187 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6189 *strict_overflow_p
= true;
6190 return omit_one_operand (type
, integer_zero_node
, op0
);
6193 /* ... fall through ... */
6195 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6196 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6197 /* If we can extract our operation from the LHS, do so and return a
6198 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6199 do something only if the second operand is a constant. */
6201 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6202 strict_overflow_p
)) != 0)
6203 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6204 fold_convert (ctype
, op1
));
6205 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6206 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6207 strict_overflow_p
)) != 0)
6208 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6209 fold_convert (ctype
, t1
));
6210 else if (TREE_CODE (op1
) != INTEGER_CST
)
6213 /* If these are the same operation types, we can associate them
6214 assuming no overflow. */
6217 bool overflow_p
= false;
6218 bool overflow_mul_p
;
6219 signop sign
= TYPE_SIGN (ctype
);
6220 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6221 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6223 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6226 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6227 wide_int_to_tree (ctype
, mul
));
6230 /* If these operations "cancel" each other, we have the main
6231 optimizations of this pass, which occur when either constant is a
6232 multiple of the other, in which case we replace this with either an
6233 operation or CODE or TCODE.
6235 If we have an unsigned type, we cannot do this since it will change
6236 the result if the original computation overflowed. */
6237 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6238 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6239 || (tcode
== MULT_EXPR
6240 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6241 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6242 && code
!= MULT_EXPR
)))
6244 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6246 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6247 *strict_overflow_p
= true;
6248 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6249 fold_convert (ctype
,
6250 const_binop (TRUNC_DIV_EXPR
,
6253 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6255 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6256 *strict_overflow_p
= true;
6257 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6258 fold_convert (ctype
,
6259 const_binop (TRUNC_DIV_EXPR
,
6272 /* Return a node which has the indicated constant VALUE (either 0 or
6273 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6274 and is of the indicated TYPE. */
6277 constant_boolean_node (bool value
, tree type
)
6279 if (type
== integer_type_node
)
6280 return value
? integer_one_node
: integer_zero_node
;
6281 else if (type
== boolean_type_node
)
6282 return value
? boolean_true_node
: boolean_false_node
;
6283 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6284 return build_vector_from_val (type
,
6285 build_int_cst (TREE_TYPE (type
),
6288 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6292 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6293 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6294 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6295 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6296 COND is the first argument to CODE; otherwise (as in the example
6297 given here), it is the second argument. TYPE is the type of the
6298 original expression. Return NULL_TREE if no simplification is
6302 fold_binary_op_with_conditional_arg (location_t loc
,
6303 enum tree_code code
,
6304 tree type
, tree op0
, tree op1
,
6305 tree cond
, tree arg
, int cond_first_p
)
6307 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6308 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6309 tree test
, true_value
, false_value
;
6310 tree lhs
= NULL_TREE
;
6311 tree rhs
= NULL_TREE
;
6312 enum tree_code cond_code
= COND_EXPR
;
6314 if (TREE_CODE (cond
) == COND_EXPR
6315 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6317 test
= TREE_OPERAND (cond
, 0);
6318 true_value
= TREE_OPERAND (cond
, 1);
6319 false_value
= TREE_OPERAND (cond
, 2);
6320 /* If this operand throws an expression, then it does not make
6321 sense to try to perform a logical or arithmetic operation
6323 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6325 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6330 tree testtype
= TREE_TYPE (cond
);
6332 true_value
= constant_boolean_node (true, testtype
);
6333 false_value
= constant_boolean_node (false, testtype
);
6336 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6337 cond_code
= VEC_COND_EXPR
;
6339 /* This transformation is only worthwhile if we don't have to wrap ARG
6340 in a SAVE_EXPR and the operation can be simplified without recursing
6341 on at least one of the branches once its pushed inside the COND_EXPR. */
6342 if (!TREE_CONSTANT (arg
)
6343 && (TREE_SIDE_EFFECTS (arg
)
6344 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6345 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6348 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6351 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6353 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6355 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6359 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6361 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6363 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6366 /* Check that we have simplified at least one of the branches. */
6367 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6370 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6374 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6376 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6377 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6378 ADDEND is the same as X.
6380 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6381 and finite. The problematic cases are when X is zero, and its mode
6382 has signed zeros. In the case of rounding towards -infinity,
6383 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6384 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6387 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6389 if (!real_zerop (addend
))
6392 /* Don't allow the fold with -fsignaling-nans. */
6393 if (HONOR_SNANS (element_mode (type
)))
6396 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6397 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6400 /* In a vector or complex, we would need to check the sign of all zeros. */
6401 if (TREE_CODE (addend
) != REAL_CST
)
6404 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6405 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6408 /* The mode has signed zeros, and we have to honor their sign.
6409 In this situation, there is only one case we can return true for.
6410 X - 0 is the same as X unless rounding towards -infinity is
6412 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6415 /* Subroutine of fold() that optimizes comparisons of a division by
6416 a nonzero integer constant against an integer constant, i.e.
6419 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6420 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6421 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6423 The function returns the constant folded tree if a simplification
6424 can be made, and NULL_TREE otherwise. */
6427 fold_div_compare (location_t loc
,
6428 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6430 tree prod
, tmp
, hi
, lo
;
6431 tree arg00
= TREE_OPERAND (arg0
, 0);
6432 tree arg01
= TREE_OPERAND (arg0
, 1);
6433 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6434 bool neg_overflow
= false;
6437 /* We have to do this the hard way to detect unsigned overflow.
6438 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6439 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6440 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6441 neg_overflow
= false;
6443 if (sign
== UNSIGNED
)
6445 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6446 build_int_cst (TREE_TYPE (arg01
), 1));
6449 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6450 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6451 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6452 -1, overflow
| TREE_OVERFLOW (prod
));
6454 else if (tree_int_cst_sgn (arg01
) >= 0)
6456 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6457 build_int_cst (TREE_TYPE (arg01
), 1));
6458 switch (tree_int_cst_sgn (arg1
))
6461 neg_overflow
= true;
6462 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6467 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6472 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6482 /* A negative divisor reverses the relational operators. */
6483 code
= swap_tree_comparison (code
);
6485 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6486 build_int_cst (TREE_TYPE (arg01
), 1));
6487 switch (tree_int_cst_sgn (arg1
))
6490 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6495 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6500 neg_overflow
= true;
6501 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6513 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6514 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6515 if (TREE_OVERFLOW (hi
))
6516 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6517 if (TREE_OVERFLOW (lo
))
6518 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6519 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6522 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6523 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6524 if (TREE_OVERFLOW (hi
))
6525 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6526 if (TREE_OVERFLOW (lo
))
6527 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6528 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6531 if (TREE_OVERFLOW (lo
))
6533 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6534 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6536 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6539 if (TREE_OVERFLOW (hi
))
6541 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6542 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6544 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6547 if (TREE_OVERFLOW (hi
))
6549 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6550 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6552 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6555 if (TREE_OVERFLOW (lo
))
6557 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6558 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6560 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6570 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6571 equality/inequality test, then return a simplified form of the test
6572 using a sign testing. Otherwise return NULL. TYPE is the desired
6576 fold_single_bit_test_into_sign_test (location_t loc
,
6577 enum tree_code code
, tree arg0
, tree arg1
,
6580 /* If this is testing a single bit, we can optimize the test. */
6581 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6582 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6583 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6585 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6586 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6587 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6589 if (arg00
!= NULL_TREE
6590 /* This is only a win if casting to a signed type is cheap,
6591 i.e. when arg00's type is not a partial mode. */
6592 && TYPE_PRECISION (TREE_TYPE (arg00
))
6593 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6595 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6596 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6598 fold_convert_loc (loc
, stype
, arg00
),
6599 build_int_cst (stype
, 0));
6606 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6607 equality/inequality test, then return a simplified form of
6608 the test using shifts and logical operations. Otherwise return
6609 NULL. TYPE is the desired result type. */
6612 fold_single_bit_test (location_t loc
, enum tree_code code
,
6613 tree arg0
, tree arg1
, tree result_type
)
6615 /* If this is testing a single bit, we can optimize the test. */
6616 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6617 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6618 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6620 tree inner
= TREE_OPERAND (arg0
, 0);
6621 tree type
= TREE_TYPE (arg0
);
6622 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6623 machine_mode operand_mode
= TYPE_MODE (type
);
6625 tree signed_type
, unsigned_type
, intermediate_type
;
6628 /* First, see if we can fold the single bit test into a sign-bit
6630 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6635 /* Otherwise we have (A & C) != 0 where C is a single bit,
6636 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6637 Similarly for (A & C) == 0. */
6639 /* If INNER is a right shift of a constant and it plus BITNUM does
6640 not overflow, adjust BITNUM and INNER. */
6641 if (TREE_CODE (inner
) == RSHIFT_EXPR
6642 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6643 && bitnum
< TYPE_PRECISION (type
)
6644 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6645 TYPE_PRECISION (type
) - bitnum
))
6647 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6648 inner
= TREE_OPERAND (inner
, 0);
6651 /* If we are going to be able to omit the AND below, we must do our
6652 operations as unsigned. If we must use the AND, we have a choice.
6653 Normally unsigned is faster, but for some machines signed is. */
6654 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6655 && !flag_syntax_only
) ? 0 : 1;
6657 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6658 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6659 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6660 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6663 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6664 inner
, size_int (bitnum
));
6666 one
= build_int_cst (intermediate_type
, 1);
6668 if (code
== EQ_EXPR
)
6669 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6671 /* Put the AND last so it can combine with more things. */
6672 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6674 /* Make sure to return the proper type. */
6675 inner
= fold_convert_loc (loc
, result_type
, inner
);
6682 /* Check whether we are allowed to reorder operands arg0 and arg1,
6683 such that the evaluation of arg1 occurs before arg0. */
6686 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6688 if (! flag_evaluation_order
)
6690 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6692 return ! TREE_SIDE_EFFECTS (arg0
)
6693 && ! TREE_SIDE_EFFECTS (arg1
);
6696 /* Test whether it is preferable two swap two operands, ARG0 and
6697 ARG1, for example because ARG0 is an integer constant and ARG1
6698 isn't. If REORDER is true, only recommend swapping if we can
6699 evaluate the operands in reverse order. */
6702 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6704 if (CONSTANT_CLASS_P (arg1
))
6706 if (CONSTANT_CLASS_P (arg0
))
6712 if (TREE_CONSTANT (arg1
))
6714 if (TREE_CONSTANT (arg0
))
6717 if (reorder
&& flag_evaluation_order
6718 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6721 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6722 for commutative and comparison operators. Ensuring a canonical
6723 form allows the optimizers to find additional redundancies without
6724 having to explicitly check for both orderings. */
6725 if (TREE_CODE (arg0
) == SSA_NAME
6726 && TREE_CODE (arg1
) == SSA_NAME
6727 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6730 /* Put SSA_NAMEs last. */
6731 if (TREE_CODE (arg1
) == SSA_NAME
)
6733 if (TREE_CODE (arg0
) == SSA_NAME
)
6736 /* Put variables last. */
6745 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6746 ARG0 is extended to a wider type. */
6749 fold_widened_comparison (location_t loc
, enum tree_code code
,
6750 tree type
, tree arg0
, tree arg1
)
6752 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6754 tree shorter_type
, outer_type
;
6758 if (arg0_unw
== arg0
)
6760 shorter_type
= TREE_TYPE (arg0_unw
);
6762 /* Disable this optimization if we're casting a function pointer
6763 type on targets that require function pointer canonicalization. */
6764 if (targetm
.have_canonicalize_funcptr_for_compare ()
6765 && TREE_CODE (shorter_type
) == POINTER_TYPE
6766 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6769 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6772 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6774 /* If possible, express the comparison in the shorter mode. */
6775 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6776 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6777 && (TREE_TYPE (arg1_unw
) == shorter_type
6778 || ((TYPE_PRECISION (shorter_type
)
6779 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6780 && (TYPE_UNSIGNED (shorter_type
)
6781 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6782 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6783 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6784 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6785 && int_fits_type_p (arg1_unw
, shorter_type
))))
6786 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6787 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6789 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6790 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6791 || int_fits_type_p (arg1_unw
, shorter_type
))
6794 /* If we are comparing with the integer that does not fit into the range
6795 of the shorter type, the result is known. */
6796 outer_type
= TREE_TYPE (arg1_unw
);
6797 min
= lower_bound_in_type (outer_type
, shorter_type
);
6798 max
= upper_bound_in_type (outer_type
, shorter_type
);
6800 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6802 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6809 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6814 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6820 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6822 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6827 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6829 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6838 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6839 ARG0 just the signedness is changed. */
6842 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6843 tree arg0
, tree arg1
)
6846 tree inner_type
, outer_type
;
6848 if (!CONVERT_EXPR_P (arg0
))
6851 outer_type
= TREE_TYPE (arg0
);
6852 arg0_inner
= TREE_OPERAND (arg0
, 0);
6853 inner_type
= TREE_TYPE (arg0_inner
);
6855 /* Disable this optimization if we're casting a function pointer
6856 type on targets that require function pointer canonicalization. */
6857 if (targetm
.have_canonicalize_funcptr_for_compare ()
6858 && TREE_CODE (inner_type
) == POINTER_TYPE
6859 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6862 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6865 if (TREE_CODE (arg1
) != INTEGER_CST
6866 && !(CONVERT_EXPR_P (arg1
)
6867 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6870 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6875 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6878 if (TREE_CODE (arg1
) == INTEGER_CST
)
6879 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6880 TREE_OVERFLOW (arg1
));
6882 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6884 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6888 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6889 means A >= Y && A != MAX, but in this case we know that
6890 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6893 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6895 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6897 if (TREE_CODE (bound
) == LT_EXPR
)
6898 a
= TREE_OPERAND (bound
, 0);
6899 else if (TREE_CODE (bound
) == GT_EXPR
)
6900 a
= TREE_OPERAND (bound
, 1);
6904 typea
= TREE_TYPE (a
);
6905 if (!INTEGRAL_TYPE_P (typea
)
6906 && !POINTER_TYPE_P (typea
))
6909 if (TREE_CODE (ineq
) == LT_EXPR
)
6911 a1
= TREE_OPERAND (ineq
, 1);
6912 y
= TREE_OPERAND (ineq
, 0);
6914 else if (TREE_CODE (ineq
) == GT_EXPR
)
6916 a1
= TREE_OPERAND (ineq
, 0);
6917 y
= TREE_OPERAND (ineq
, 1);
6922 if (TREE_TYPE (a1
) != typea
)
6925 if (POINTER_TYPE_P (typea
))
6927 /* Convert the pointer types into integer before taking the difference. */
6928 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6929 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6930 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6933 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6935 if (!diff
|| !integer_onep (diff
))
6938 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6941 /* Fold a sum or difference of at least one multiplication.
6942 Returns the folded tree or NULL if no simplification could be made. */
6945 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6946 tree arg0
, tree arg1
)
6948 tree arg00
, arg01
, arg10
, arg11
;
6949 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6951 /* (A * C) +- (B * C) -> (A+-B) * C.
6952 (A * C) +- A -> A * (C+-1).
6953 We are most concerned about the case where C is a constant,
6954 but other combinations show up during loop reduction. Since
6955 it is not difficult, try all four possibilities. */
6957 if (TREE_CODE (arg0
) == MULT_EXPR
)
6959 arg00
= TREE_OPERAND (arg0
, 0);
6960 arg01
= TREE_OPERAND (arg0
, 1);
6962 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6964 arg00
= build_one_cst (type
);
6969 /* We cannot generate constant 1 for fract. */
6970 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6973 arg01
= build_one_cst (type
);
6975 if (TREE_CODE (arg1
) == MULT_EXPR
)
6977 arg10
= TREE_OPERAND (arg1
, 0);
6978 arg11
= TREE_OPERAND (arg1
, 1);
6980 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6982 arg10
= build_one_cst (type
);
6983 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6984 the purpose of this canonicalization. */
6985 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6986 && negate_expr_p (arg1
)
6987 && code
== PLUS_EXPR
)
6989 arg11
= negate_expr (arg1
);
6997 /* We cannot generate constant 1 for fract. */
6998 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7001 arg11
= build_one_cst (type
);
7005 if (operand_equal_p (arg01
, arg11
, 0))
7006 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7007 else if (operand_equal_p (arg00
, arg10
, 0))
7008 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7009 else if (operand_equal_p (arg00
, arg11
, 0))
7010 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7011 else if (operand_equal_p (arg01
, arg10
, 0))
7012 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7014 /* No identical multiplicands; see if we can find a common
7015 power-of-two factor in non-power-of-two multiplies. This
7016 can help in multi-dimensional array access. */
7017 else if (tree_fits_shwi_p (arg01
)
7018 && tree_fits_shwi_p (arg11
))
7020 HOST_WIDE_INT int01
, int11
, tmp
;
7023 int01
= tree_to_shwi (arg01
);
7024 int11
= tree_to_shwi (arg11
);
7026 /* Move min of absolute values to int11. */
7027 if (absu_hwi (int01
) < absu_hwi (int11
))
7029 tmp
= int01
, int01
= int11
, int11
= tmp
;
7030 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7037 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7038 /* The remainder should not be a constant, otherwise we
7039 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7040 increased the number of multiplications necessary. */
7041 && TREE_CODE (arg10
) != INTEGER_CST
)
7043 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7044 build_int_cst (TREE_TYPE (arg00
),
7049 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7054 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7055 fold_build2_loc (loc
, code
, type
,
7056 fold_convert_loc (loc
, type
, alt0
),
7057 fold_convert_loc (loc
, type
, alt1
)),
7058 fold_convert_loc (loc
, type
, same
));
7063 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7064 specified by EXPR into the buffer PTR of length LEN bytes.
7065 Return the number of bytes placed in the buffer, or zero
7069 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7071 tree type
= TREE_TYPE (expr
);
7072 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7073 int byte
, offset
, word
, words
;
7074 unsigned char value
;
7076 if ((off
== -1 && total_bytes
> len
)
7077 || off
>= total_bytes
)
7081 words
= total_bytes
/ UNITS_PER_WORD
;
7083 for (byte
= 0; byte
< total_bytes
; byte
++)
7085 int bitpos
= byte
* BITS_PER_UNIT
;
7086 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7088 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7090 if (total_bytes
> UNITS_PER_WORD
)
7092 word
= byte
/ UNITS_PER_WORD
;
7093 if (WORDS_BIG_ENDIAN
)
7094 word
= (words
- 1) - word
;
7095 offset
= word
* UNITS_PER_WORD
;
7096 if (BYTES_BIG_ENDIAN
)
7097 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7099 offset
+= byte
% UNITS_PER_WORD
;
7102 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7104 && offset
- off
< len
)
7105 ptr
[offset
- off
] = value
;
7107 return MIN (len
, total_bytes
- off
);
7111 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7112 specified by EXPR into the buffer PTR of length LEN bytes.
7113 Return the number of bytes placed in the buffer, or zero
7117 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7119 tree type
= TREE_TYPE (expr
);
7120 machine_mode mode
= TYPE_MODE (type
);
7121 int total_bytes
= GET_MODE_SIZE (mode
);
7122 FIXED_VALUE_TYPE value
;
7123 tree i_value
, i_type
;
7125 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7128 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7130 if (NULL_TREE
== i_type
7131 || TYPE_PRECISION (i_type
) != total_bytes
)
7134 value
= TREE_FIXED_CST (expr
);
7135 i_value
= double_int_to_tree (i_type
, value
.data
);
7137 return native_encode_int (i_value
, ptr
, len
, off
);
7141 /* Subroutine of native_encode_expr. Encode the REAL_CST
7142 specified by EXPR into the buffer PTR of length LEN bytes.
7143 Return the number of bytes placed in the buffer, or zero
7147 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7149 tree type
= TREE_TYPE (expr
);
7150 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7151 int byte
, offset
, word
, words
, bitpos
;
7152 unsigned char value
;
7154 /* There are always 32 bits in each long, no matter the size of
7155 the hosts long. We handle floating point representations with
7159 if ((off
== -1 && total_bytes
> len
)
7160 || off
>= total_bytes
)
7164 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7166 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7168 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7169 bitpos
+= BITS_PER_UNIT
)
7171 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7172 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7174 if (UNITS_PER_WORD
< 4)
7176 word
= byte
/ UNITS_PER_WORD
;
7177 if (WORDS_BIG_ENDIAN
)
7178 word
= (words
- 1) - word
;
7179 offset
= word
* UNITS_PER_WORD
;
7180 if (BYTES_BIG_ENDIAN
)
7181 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7183 offset
+= byte
% UNITS_PER_WORD
;
7186 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7187 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7189 && offset
- off
< len
)
7190 ptr
[offset
- off
] = value
;
7192 return MIN (len
, total_bytes
- off
);
7195 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7196 specified by EXPR into the buffer PTR of length LEN bytes.
7197 Return the number of bytes placed in the buffer, or zero
7201 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7206 part
= TREE_REALPART (expr
);
7207 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7211 part
= TREE_IMAGPART (expr
);
7213 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7214 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7218 return rsize
+ isize
;
7222 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7223 specified by EXPR into the buffer PTR of length LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero
7228 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7235 count
= VECTOR_CST_NELTS (expr
);
7236 itype
= TREE_TYPE (TREE_TYPE (expr
));
7237 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7238 for (i
= 0; i
< count
; i
++)
7245 elem
= VECTOR_CST_ELT (expr
, i
);
7246 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7247 if ((off
== -1 && res
!= size
)
7260 /* Subroutine of native_encode_expr. Encode the STRING_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7266 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7268 tree type
= TREE_TYPE (expr
);
7269 HOST_WIDE_INT total_bytes
;
7271 if (TREE_CODE (type
) != ARRAY_TYPE
7272 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7273 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7274 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7276 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7277 if ((off
== -1 && total_bytes
> len
)
7278 || off
>= total_bytes
)
7282 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7285 if (off
< TREE_STRING_LENGTH (expr
))
7287 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7288 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7290 memset (ptr
+ written
, 0,
7291 MIN (total_bytes
- written
, len
- written
));
7294 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7295 return MIN (total_bytes
- off
, len
);
7299 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7300 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7301 buffer PTR of length LEN bytes. If OFF is not -1 then start
7302 the encoding at byte offset OFF and encode at most LEN bytes.
7303 Return the number of bytes placed in the buffer, or zero upon failure. */
7306 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7308 switch (TREE_CODE (expr
))
7311 return native_encode_int (expr
, ptr
, len
, off
);
7314 return native_encode_real (expr
, ptr
, len
, off
);
7317 return native_encode_fixed (expr
, ptr
, len
, off
);
7320 return native_encode_complex (expr
, ptr
, len
, off
);
7323 return native_encode_vector (expr
, ptr
, len
, off
);
7326 return native_encode_string (expr
, ptr
, len
, off
);
7334 /* Subroutine of native_interpret_expr. Interpret the contents of
7335 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7336 If the buffer cannot be interpreted, return NULL_TREE. */
7339 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7341 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7343 if (total_bytes
> len
7344 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7347 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7349 return wide_int_to_tree (type
, result
);
7353 /* Subroutine of native_interpret_expr. Interpret the contents of
7354 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7355 If the buffer cannot be interpreted, return NULL_TREE. */
7358 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7360 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7362 FIXED_VALUE_TYPE fixed_value
;
7364 if (total_bytes
> len
7365 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7368 result
= double_int::from_buffer (ptr
, total_bytes
);
7369 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7371 return build_fixed (type
, fixed_value
);
7375 /* Subroutine of native_interpret_expr. Interpret the contents of
7376 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7377 If the buffer cannot be interpreted, return NULL_TREE. */
7380 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7382 machine_mode mode
= TYPE_MODE (type
);
7383 int total_bytes
= GET_MODE_SIZE (mode
);
7384 int byte
, offset
, word
, words
, bitpos
;
7385 unsigned char value
;
7386 /* There are always 32 bits in each long, no matter the size of
7387 the hosts long. We handle floating point representations with
7392 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7393 if (total_bytes
> len
|| total_bytes
> 24)
7395 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7397 memset (tmp
, 0, sizeof (tmp
));
7398 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7399 bitpos
+= BITS_PER_UNIT
)
7401 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7402 if (UNITS_PER_WORD
< 4)
7404 word
= byte
/ UNITS_PER_WORD
;
7405 if (WORDS_BIG_ENDIAN
)
7406 word
= (words
- 1) - word
;
7407 offset
= word
* UNITS_PER_WORD
;
7408 if (BYTES_BIG_ENDIAN
)
7409 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7411 offset
+= byte
% UNITS_PER_WORD
;
7414 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7415 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7417 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7420 real_from_target (&r
, tmp
, mode
);
7421 return build_real (type
, r
);
7425 /* Subroutine of native_interpret_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7427 If the buffer cannot be interpreted, return NULL_TREE. */
7430 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7432 tree etype
, rpart
, ipart
;
7435 etype
= TREE_TYPE (type
);
7436 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7439 rpart
= native_interpret_expr (etype
, ptr
, size
);
7442 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7445 return build_complex (type
, rpart
, ipart
);
7449 /* Subroutine of native_interpret_expr. Interpret the contents of
7450 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7451 If the buffer cannot be interpreted, return NULL_TREE. */
7454 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7460 etype
= TREE_TYPE (type
);
7461 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7462 count
= TYPE_VECTOR_SUBPARTS (type
);
7463 if (size
* count
> len
)
7466 elements
= XALLOCAVEC (tree
, count
);
7467 for (i
= count
- 1; i
>= 0; i
--)
7469 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7474 return build_vector (type
, elements
);
7478 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7479 the buffer PTR of length LEN as a constant of type TYPE. For
7480 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7481 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7482 return NULL_TREE. */
7485 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7487 switch (TREE_CODE (type
))
7493 case REFERENCE_TYPE
:
7494 return native_interpret_int (type
, ptr
, len
);
7497 return native_interpret_real (type
, ptr
, len
);
7499 case FIXED_POINT_TYPE
:
7500 return native_interpret_fixed (type
, ptr
, len
);
7503 return native_interpret_complex (type
, ptr
, len
);
7506 return native_interpret_vector (type
, ptr
, len
);
7513 /* Returns true if we can interpret the contents of a native encoding
7517 can_native_interpret_type_p (tree type
)
7519 switch (TREE_CODE (type
))
7525 case REFERENCE_TYPE
:
7526 case FIXED_POINT_TYPE
:
7536 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7537 TYPE at compile-time. If we're unable to perform the conversion
7538 return NULL_TREE. */
7541 fold_view_convert_expr (tree type
, tree expr
)
7543 /* We support up to 512-bit values (for V8DFmode). */
7544 unsigned char buffer
[64];
7547 /* Check that the host and target are sane. */
7548 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7551 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7555 return native_interpret_expr (type
, buffer
, len
);
7558 /* Build an expression for the address of T. Folds away INDIRECT_REF
7559 to avoid confusing the gimplify process. */
7562 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7564 /* The size of the object is not relevant when talking about its address. */
7565 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7566 t
= TREE_OPERAND (t
, 0);
7568 if (TREE_CODE (t
) == INDIRECT_REF
)
7570 t
= TREE_OPERAND (t
, 0);
7572 if (TREE_TYPE (t
) != ptrtype
)
7573 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7575 else if (TREE_CODE (t
) == MEM_REF
7576 && integer_zerop (TREE_OPERAND (t
, 1)))
7577 return TREE_OPERAND (t
, 0);
7578 else if (TREE_CODE (t
) == MEM_REF
7579 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7580 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7581 TREE_OPERAND (t
, 0),
7582 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7583 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7585 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7587 if (TREE_TYPE (t
) != ptrtype
)
7588 t
= fold_convert_loc (loc
, ptrtype
, t
);
7591 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7596 /* Build an expression for the address of T. */
7599 build_fold_addr_expr_loc (location_t loc
, tree t
)
7601 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7603 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7606 /* Fold a unary expression of code CODE and type TYPE with operand
7607 OP0. Return the folded expression if folding is successful.
7608 Otherwise, return NULL_TREE. */
7611 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7615 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7617 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7618 && TREE_CODE_LENGTH (code
) == 1);
7623 if (CONVERT_EXPR_CODE_P (code
)
7624 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7626 /* Don't use STRIP_NOPS, because signedness of argument type
7628 STRIP_SIGN_NOPS (arg0
);
7632 /* Strip any conversions that don't change the mode. This
7633 is safe for every expression, except for a comparison
7634 expression because its signedness is derived from its
7637 Note that this is done as an internal manipulation within
7638 the constant folder, in order to find the simplest
7639 representation of the arguments so that their form can be
7640 studied. In any cases, the appropriate type conversions
7641 should be put back in the tree that will get out of the
7646 if (CONSTANT_CLASS_P (arg0
))
7648 tree tem
= const_unop (code
, type
, arg0
);
7651 if (TREE_TYPE (tem
) != type
)
7652 tem
= fold_convert_loc (loc
, type
, tem
);
7658 tem
= generic_simplify (loc
, code
, type
, op0
);
7662 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7664 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7665 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7666 fold_build1_loc (loc
, code
, type
,
7667 fold_convert_loc (loc
, TREE_TYPE (op0
),
7668 TREE_OPERAND (arg0
, 1))));
7669 else if (TREE_CODE (arg0
) == COND_EXPR
)
7671 tree arg01
= TREE_OPERAND (arg0
, 1);
7672 tree arg02
= TREE_OPERAND (arg0
, 2);
7673 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7674 arg01
= fold_build1_loc (loc
, code
, type
,
7675 fold_convert_loc (loc
,
7676 TREE_TYPE (op0
), arg01
));
7677 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7678 arg02
= fold_build1_loc (loc
, code
, type
,
7679 fold_convert_loc (loc
,
7680 TREE_TYPE (op0
), arg02
));
7681 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7684 /* If this was a conversion, and all we did was to move into
7685 inside the COND_EXPR, bring it back out. But leave it if
7686 it is a conversion from integer to integer and the
7687 result precision is no wider than a word since such a
7688 conversion is cheap and may be optimized away by combine,
7689 while it couldn't if it were outside the COND_EXPR. Then return
7690 so we don't get into an infinite recursion loop taking the
7691 conversion out and then back in. */
7693 if ((CONVERT_EXPR_CODE_P (code
)
7694 || code
== NON_LVALUE_EXPR
)
7695 && TREE_CODE (tem
) == COND_EXPR
7696 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7697 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7698 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7699 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7700 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7701 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7702 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7704 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7705 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7706 || flag_syntax_only
))
7707 tem
= build1_loc (loc
, code
, type
,
7709 TREE_TYPE (TREE_OPERAND
7710 (TREE_OPERAND (tem
, 1), 0)),
7711 TREE_OPERAND (tem
, 0),
7712 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7713 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7721 case NON_LVALUE_EXPR
:
7722 if (!maybe_lvalue_p (op0
))
7723 return fold_convert_loc (loc
, type
, op0
);
7728 case FIX_TRUNC_EXPR
:
7729 if (COMPARISON_CLASS_P (op0
))
7731 /* If we have (type) (a CMP b) and type is an integral type, return
7732 new expression involving the new type. Canonicalize
7733 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7735 Do not fold the result as that would not simplify further, also
7736 folding again results in recursions. */
7737 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7738 return build2_loc (loc
, TREE_CODE (op0
), type
,
7739 TREE_OPERAND (op0
, 0),
7740 TREE_OPERAND (op0
, 1));
7741 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7742 && TREE_CODE (type
) != VECTOR_TYPE
)
7743 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7744 constant_boolean_node (true, type
),
7745 constant_boolean_node (false, type
));
7748 /* Handle (T *)&A.B.C for A being of type T and B and C
7749 living at offset zero. This occurs frequently in
7750 C++ upcasting and then accessing the base. */
7751 if (TREE_CODE (op0
) == ADDR_EXPR
7752 && POINTER_TYPE_P (type
)
7753 && handled_component_p (TREE_OPERAND (op0
, 0)))
7755 HOST_WIDE_INT bitsize
, bitpos
;
7758 int unsignedp
, volatilep
;
7759 tree base
= TREE_OPERAND (op0
, 0);
7760 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7761 &mode
, &unsignedp
, &volatilep
, false);
7762 /* If the reference was to a (constant) zero offset, we can use
7763 the address of the base if it has the same base type
7764 as the result type and the pointer type is unqualified. */
7765 if (! offset
&& bitpos
== 0
7766 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7767 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7768 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7769 return fold_convert_loc (loc
, type
,
7770 build_fold_addr_expr_loc (loc
, base
));
7773 if (TREE_CODE (op0
) == MODIFY_EXPR
7774 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7775 /* Detect assigning a bitfield. */
7776 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7778 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7780 /* Don't leave an assignment inside a conversion
7781 unless assigning a bitfield. */
7782 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7783 /* First do the assignment, then return converted constant. */
7784 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7785 TREE_NO_WARNING (tem
) = 1;
7786 TREE_USED (tem
) = 1;
7790 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7791 constants (if x has signed type, the sign bit cannot be set
7792 in c). This folds extension into the BIT_AND_EXPR.
7793 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7794 very likely don't have maximal range for their precision and this
7795 transformation effectively doesn't preserve non-maximal ranges. */
7796 if (TREE_CODE (type
) == INTEGER_TYPE
7797 && TREE_CODE (op0
) == BIT_AND_EXPR
7798 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7800 tree and_expr
= op0
;
7801 tree and0
= TREE_OPERAND (and_expr
, 0);
7802 tree and1
= TREE_OPERAND (and_expr
, 1);
7805 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7806 || (TYPE_PRECISION (type
)
7807 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7809 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7810 <= HOST_BITS_PER_WIDE_INT
7811 && tree_fits_uhwi_p (and1
))
7813 unsigned HOST_WIDE_INT cst
;
7815 cst
= tree_to_uhwi (and1
);
7816 cst
&= HOST_WIDE_INT_M1U
7817 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7818 change
= (cst
== 0);
7820 && !flag_syntax_only
7821 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7824 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7825 and0
= fold_convert_loc (loc
, uns
, and0
);
7826 and1
= fold_convert_loc (loc
, uns
, and1
);
7831 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7832 TREE_OVERFLOW (and1
));
7833 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7834 fold_convert_loc (loc
, type
, and0
), tem
);
7838 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7839 when one of the new casts will fold away. Conservatively we assume
7840 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7841 if (POINTER_TYPE_P (type
)
7842 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7843 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7844 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7845 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7846 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7848 tree arg00
= TREE_OPERAND (arg0
, 0);
7849 tree arg01
= TREE_OPERAND (arg0
, 1);
7851 return fold_build_pointer_plus_loc
7852 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7855 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7856 of the same precision, and X is an integer type not narrower than
7857 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7858 if (INTEGRAL_TYPE_P (type
)
7859 && TREE_CODE (op0
) == BIT_NOT_EXPR
7860 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7861 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7862 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7864 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7865 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7866 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7867 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7868 fold_convert_loc (loc
, type
, tem
));
7871 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7872 type of X and Y (integer types only). */
7873 if (INTEGRAL_TYPE_P (type
)
7874 && TREE_CODE (op0
) == MULT_EXPR
7875 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7876 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7878 /* Be careful not to introduce new overflows. */
7880 if (TYPE_OVERFLOW_WRAPS (type
))
7883 mult_type
= unsigned_type_for (type
);
7885 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7887 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7888 fold_convert_loc (loc
, mult_type
,
7889 TREE_OPERAND (op0
, 0)),
7890 fold_convert_loc (loc
, mult_type
,
7891 TREE_OPERAND (op0
, 1)));
7892 return fold_convert_loc (loc
, type
, tem
);
7898 case VIEW_CONVERT_EXPR
:
7899 if (TREE_CODE (op0
) == MEM_REF
)
7900 return fold_build2_loc (loc
, MEM_REF
, type
,
7901 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7906 tem
= fold_negate_expr (loc
, arg0
);
7908 return fold_convert_loc (loc
, type
, tem
);
7912 /* Convert fabs((double)float) into (double)fabsf(float). */
7913 if (TREE_CODE (arg0
) == NOP_EXPR
7914 && TREE_CODE (type
) == REAL_TYPE
)
7916 tree targ0
= strip_float_extensions (arg0
);
7918 return fold_convert_loc (loc
, type
,
7919 fold_build1_loc (loc
, ABS_EXPR
,
7924 /* Strip sign ops from argument. */
7925 if (TREE_CODE (type
) == REAL_TYPE
)
7927 tem
= fold_strip_sign_ops (arg0
);
7929 return fold_build1_loc (loc
, ABS_EXPR
, type
,
7930 fold_convert_loc (loc
, type
, tem
));
7935 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7936 return fold_convert_loc (loc
, type
, arg0
);
7937 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7939 tree itype
= TREE_TYPE (type
);
7940 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
7941 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
7942 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
7943 negate_expr (ipart
));
7945 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7946 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
7950 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7951 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7952 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7953 fold_convert_loc (loc
, type
,
7954 TREE_OPERAND (arg0
, 0)))))
7955 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7956 fold_convert_loc (loc
, type
,
7957 TREE_OPERAND (arg0
, 1)));
7958 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7959 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7960 fold_convert_loc (loc
, type
,
7961 TREE_OPERAND (arg0
, 1)))))
7962 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7963 fold_convert_loc (loc
, type
,
7964 TREE_OPERAND (arg0
, 0)), tem
);
7968 case TRUTH_NOT_EXPR
:
7969 /* Note that the operand of this must be an int
7970 and its values must be 0 or 1.
7971 ("true" is a fixed value perhaps depending on the language,
7972 but we don't handle values other than 1 correctly yet.) */
7973 tem
= fold_truth_not_expr (loc
, arg0
);
7976 return fold_convert_loc (loc
, type
, tem
);
7979 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7980 return fold_convert_loc (loc
, type
, arg0
);
7981 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7983 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7984 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
7985 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7986 TREE_OPERAND (arg0
, 0)),
7987 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7988 TREE_OPERAND (arg0
, 1)));
7989 return fold_convert_loc (loc
, type
, tem
);
7991 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7993 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7994 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
7995 TREE_OPERAND (arg0
, 0));
7996 return fold_convert_loc (loc
, type
, tem
);
7998 if (TREE_CODE (arg0
) == CALL_EXPR
)
8000 tree fn
= get_callee_fndecl (arg0
);
8001 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8002 switch (DECL_FUNCTION_CODE (fn
))
8004 CASE_FLT_FN (BUILT_IN_CEXPI
):
8005 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8007 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8017 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8018 return build_zero_cst (type
);
8019 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8021 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8022 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8023 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8024 TREE_OPERAND (arg0
, 0)),
8025 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8026 TREE_OPERAND (arg0
, 1)));
8027 return fold_convert_loc (loc
, type
, tem
);
8029 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8031 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8032 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8033 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8035 if (TREE_CODE (arg0
) == CALL_EXPR
)
8037 tree fn
= get_callee_fndecl (arg0
);
8038 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8039 switch (DECL_FUNCTION_CODE (fn
))
8041 CASE_FLT_FN (BUILT_IN_CEXPI
):
8042 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8044 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8054 /* Fold *&X to X if X is an lvalue. */
8055 if (TREE_CODE (op0
) == ADDR_EXPR
)
8057 tree op00
= TREE_OPERAND (op0
, 0);
8058 if ((TREE_CODE (op00
) == VAR_DECL
8059 || TREE_CODE (op00
) == PARM_DECL
8060 || TREE_CODE (op00
) == RESULT_DECL
)
8061 && !TREE_READONLY (op00
))
8068 } /* switch (code) */
8072 /* If the operation was a conversion do _not_ mark a resulting constant
8073 with TREE_OVERFLOW if the original constant was not. These conversions
8074 have implementation defined behavior and retaining the TREE_OVERFLOW
8075 flag here would confuse later passes such as VRP. */
8077 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8078 tree type
, tree op0
)
8080 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8082 && TREE_CODE (res
) == INTEGER_CST
8083 && TREE_CODE (op0
) == INTEGER_CST
8084 && CONVERT_EXPR_CODE_P (code
))
8085 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8090 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8091 operands OP0 and OP1. LOC is the location of the resulting expression.
8092 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8093 Return the folded expression if folding is successful. Otherwise,
8094 return NULL_TREE. */
8096 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8097 tree arg0
, tree arg1
, tree op0
, tree op1
)
8101 /* We only do these simplifications if we are optimizing. */
8105 /* Check for things like (A || B) && (A || C). We can convert this
8106 to A || (B && C). Note that either operator can be any of the four
8107 truth and/or operations and the transformation will still be
8108 valid. Also note that we only care about order for the
8109 ANDIF and ORIF operators. If B contains side effects, this
8110 might change the truth-value of A. */
8111 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8112 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8113 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8114 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8115 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8116 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8118 tree a00
= TREE_OPERAND (arg0
, 0);
8119 tree a01
= TREE_OPERAND (arg0
, 1);
8120 tree a10
= TREE_OPERAND (arg1
, 0);
8121 tree a11
= TREE_OPERAND (arg1
, 1);
8122 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8123 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8124 && (code
== TRUTH_AND_EXPR
8125 || code
== TRUTH_OR_EXPR
));
8127 if (operand_equal_p (a00
, a10
, 0))
8128 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8129 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8130 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8131 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8132 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8133 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8134 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8135 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8137 /* This case if tricky because we must either have commutative
8138 operators or else A10 must not have side-effects. */
8140 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8141 && operand_equal_p (a01
, a11
, 0))
8142 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8143 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8147 /* See if we can build a range comparison. */
8148 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8151 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8152 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8154 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8156 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8159 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8160 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8162 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8164 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8167 /* Check for the possibility of merging component references. If our
8168 lhs is another similar operation, try to merge its rhs with our
8169 rhs. Then try to merge our lhs and rhs. */
8170 if (TREE_CODE (arg0
) == code
8171 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8172 TREE_OPERAND (arg0
, 1), arg1
)))
8173 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8175 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8178 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8179 && (code
== TRUTH_AND_EXPR
8180 || code
== TRUTH_ANDIF_EXPR
8181 || code
== TRUTH_OR_EXPR
8182 || code
== TRUTH_ORIF_EXPR
))
8184 enum tree_code ncode
, icode
;
8186 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8187 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8188 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8190 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8191 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8192 We don't want to pack more than two leafs to a non-IF AND/OR
8194 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8195 equal to IF-CODE, then we don't want to add right-hand operand.
8196 If the inner right-hand side of left-hand operand has
8197 side-effects, or isn't simple, then we can't add to it,
8198 as otherwise we might destroy if-sequence. */
8199 if (TREE_CODE (arg0
) == icode
8200 && simple_operand_p_2 (arg1
)
8201 /* Needed for sequence points to handle trappings, and
8203 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8205 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8207 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8210 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8211 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8212 else if (TREE_CODE (arg1
) == icode
8213 && simple_operand_p_2 (arg0
)
8214 /* Needed for sequence points to handle trappings, and
8216 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8218 tem
= fold_build2_loc (loc
, ncode
, type
,
8219 arg0
, TREE_OPERAND (arg1
, 0));
8220 return fold_build2_loc (loc
, icode
, type
, tem
,
8221 TREE_OPERAND (arg1
, 1));
8223 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8225 For sequence point consistancy, we need to check for trapping,
8226 and side-effects. */
8227 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8228 && simple_operand_p_2 (arg1
))
8229 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8235 /* Fold a binary expression of code CODE and type TYPE with operands
8236 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8237 Return the folded expression if folding is successful. Otherwise,
8238 return NULL_TREE. */
8241 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8243 enum tree_code compl_code
;
8245 if (code
== MIN_EXPR
)
8246 compl_code
= MAX_EXPR
;
8247 else if (code
== MAX_EXPR
)
8248 compl_code
= MIN_EXPR
;
8252 /* MIN (MAX (a, b), b) == b. */
8253 if (TREE_CODE (op0
) == compl_code
8254 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8255 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8257 /* MIN (MAX (b, a), b) == b. */
8258 if (TREE_CODE (op0
) == compl_code
8259 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8260 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8261 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8263 /* MIN (a, MAX (a, b)) == a. */
8264 if (TREE_CODE (op1
) == compl_code
8265 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8266 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8267 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8269 /* MIN (a, MAX (b, a)) == a. */
8270 if (TREE_CODE (op1
) == compl_code
8271 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8272 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8273 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8278 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8279 by changing CODE to reduce the magnitude of constants involved in
8280 ARG0 of the comparison.
8281 Returns a canonicalized comparison tree if a simplification was
8282 possible, otherwise returns NULL_TREE.
8283 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8284 valid if signed overflow is undefined. */
8287 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8288 tree arg0
, tree arg1
,
8289 bool *strict_overflow_p
)
8291 enum tree_code code0
= TREE_CODE (arg0
);
8292 tree t
, cst0
= NULL_TREE
;
8296 /* Match A +- CST code arg1 and CST code arg1. We can change the
8297 first form only if overflow is undefined. */
8298 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8299 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8300 /* In principle pointers also have undefined overflow behavior,
8301 but that causes problems elsewhere. */
8302 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8303 && (code0
== MINUS_EXPR
8304 || code0
== PLUS_EXPR
)
8305 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8306 || code0
== INTEGER_CST
))
8309 /* Identify the constant in arg0 and its sign. */
8310 if (code0
== INTEGER_CST
)
8313 cst0
= TREE_OPERAND (arg0
, 1);
8314 sgn0
= tree_int_cst_sgn (cst0
);
8316 /* Overflowed constants and zero will cause problems. */
8317 if (integer_zerop (cst0
)
8318 || TREE_OVERFLOW (cst0
))
8321 /* See if we can reduce the magnitude of the constant in
8322 arg0 by changing the comparison code. */
8323 if (code0
== INTEGER_CST
)
8325 /* CST <= arg1 -> CST-1 < arg1. */
8326 if (code
== LE_EXPR
&& sgn0
== 1)
8328 /* -CST < arg1 -> -CST-1 <= arg1. */
8329 else if (code
== LT_EXPR
&& sgn0
== -1)
8331 /* CST > arg1 -> CST-1 >= arg1. */
8332 else if (code
== GT_EXPR
&& sgn0
== 1)
8334 /* -CST >= arg1 -> -CST-1 > arg1. */
8335 else if (code
== GE_EXPR
&& sgn0
== -1)
8339 /* arg1 code' CST' might be more canonical. */
8344 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8346 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8348 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8349 else if (code
== GT_EXPR
8350 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8352 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8353 else if (code
== LE_EXPR
8354 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8356 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8357 else if (code
== GE_EXPR
8358 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8362 *strict_overflow_p
= true;
8365 /* Now build the constant reduced in magnitude. But not if that
8366 would produce one outside of its types range. */
8367 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8369 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8370 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8372 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8373 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8374 /* We cannot swap the comparison here as that would cause us to
8375 endlessly recurse. */
8378 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8379 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8380 if (code0
!= INTEGER_CST
)
8381 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8382 t
= fold_convert (TREE_TYPE (arg1
), t
);
8384 /* If swapping might yield to a more canonical form, do so. */
8386 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8388 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8391 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8392 overflow further. Try to decrease the magnitude of constants involved
8393 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8394 and put sole constants at the second argument position.
8395 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8398 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8399 tree arg0
, tree arg1
)
8402 bool strict_overflow_p
;
8403 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8404 "when reducing constant in comparison");
8406 /* Try canonicalization by simplifying arg0. */
8407 strict_overflow_p
= false;
8408 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8409 &strict_overflow_p
);
8412 if (strict_overflow_p
)
8413 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8417 /* Try canonicalization by simplifying arg1 using the swapped
8419 code
= swap_tree_comparison (code
);
8420 strict_overflow_p
= false;
8421 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8422 &strict_overflow_p
);
8423 if (t
&& strict_overflow_p
)
8424 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8428 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8429 space. This is used to avoid issuing overflow warnings for
8430 expressions like &p->x which can not wrap. */
8433 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8435 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8442 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8443 if (offset
== NULL_TREE
)
8444 wi_offset
= wi::zero (precision
);
8445 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8451 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8452 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8456 if (!wi::fits_uhwi_p (total
))
8459 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8463 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8465 if (TREE_CODE (base
) == ADDR_EXPR
)
8467 HOST_WIDE_INT base_size
;
8469 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8470 if (base_size
> 0 && size
< base_size
)
8474 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8477 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8478 kind INTEGER_CST. This makes sure to properly sign-extend the
8481 static HOST_WIDE_INT
8482 size_low_cst (const_tree t
)
8484 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8485 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8486 if (prec
< HOST_BITS_PER_WIDE_INT
)
8487 return sext_hwi (w
, prec
);
8491 /* Subroutine of fold_binary. This routine performs all of the
8492 transformations that are common to the equality/inequality
8493 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8494 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8495 fold_binary should call fold_binary. Fold a comparison with
8496 tree code CODE and type TYPE with operands OP0 and OP1. Return
8497 the folded comparison or NULL_TREE. */
8500 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8503 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8504 tree arg0
, arg1
, tem
;
8509 STRIP_SIGN_NOPS (arg0
);
8510 STRIP_SIGN_NOPS (arg1
);
8512 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8513 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8515 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8516 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8517 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8518 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8519 && TREE_CODE (arg1
) == INTEGER_CST
8520 && !TREE_OVERFLOW (arg1
))
8522 const enum tree_code
8523 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8524 tree const1
= TREE_OPERAND (arg0
, 1);
8525 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8526 tree variable
= TREE_OPERAND (arg0
, 0);
8527 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8529 /* If the constant operation overflowed this can be
8530 simplified as a comparison against INT_MAX/INT_MIN. */
8531 if (TREE_OVERFLOW (new_const
)
8532 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8534 int const1_sgn
= tree_int_cst_sgn (const1
);
8535 enum tree_code code2
= code
;
8537 /* Get the sign of the constant on the lhs if the
8538 operation were VARIABLE + CONST1. */
8539 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8540 const1_sgn
= -const1_sgn
;
8542 /* The sign of the constant determines if we overflowed
8543 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8544 Canonicalize to the INT_MIN overflow by swapping the comparison
8546 if (const1_sgn
== -1)
8547 code2
= swap_tree_comparison (code
);
8549 /* We now can look at the canonicalized case
8550 VARIABLE + 1 CODE2 INT_MIN
8551 and decide on the result. */
8558 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8564 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8573 fold_overflow_warning ("assuming signed overflow does not occur "
8574 "when changing X +- C1 cmp C2 to "
8576 WARN_STRICT_OVERFLOW_COMPARISON
);
8577 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8581 /* For comparisons of pointers we can decompose it to a compile time
8582 comparison of the base objects and the offsets into the object.
8583 This requires at least one operand being an ADDR_EXPR or a
8584 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8585 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8586 && (TREE_CODE (arg0
) == ADDR_EXPR
8587 || TREE_CODE (arg1
) == ADDR_EXPR
8588 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8589 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8591 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8592 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8594 int volatilep
, unsignedp
;
8595 bool indirect_base0
= false, indirect_base1
= false;
8597 /* Get base and offset for the access. Strip ADDR_EXPR for
8598 get_inner_reference, but put it back by stripping INDIRECT_REF
8599 off the base object if possible. indirect_baseN will be true
8600 if baseN is not an address but refers to the object itself. */
8602 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8604 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8605 &bitsize
, &bitpos0
, &offset0
, &mode
,
8606 &unsignedp
, &volatilep
, false);
8607 if (TREE_CODE (base0
) == INDIRECT_REF
)
8608 base0
= TREE_OPERAND (base0
, 0);
8610 indirect_base0
= true;
8612 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8614 base0
= TREE_OPERAND (arg0
, 0);
8615 STRIP_SIGN_NOPS (base0
);
8616 if (TREE_CODE (base0
) == ADDR_EXPR
)
8618 base0
= TREE_OPERAND (base0
, 0);
8619 indirect_base0
= true;
8621 offset0
= TREE_OPERAND (arg0
, 1);
8622 if (tree_fits_shwi_p (offset0
))
8624 HOST_WIDE_INT off
= size_low_cst (offset0
);
8625 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8627 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8629 bitpos0
= off
* BITS_PER_UNIT
;
8630 offset0
= NULL_TREE
;
8636 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8638 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8639 &bitsize
, &bitpos1
, &offset1
, &mode
,
8640 &unsignedp
, &volatilep
, false);
8641 if (TREE_CODE (base1
) == INDIRECT_REF
)
8642 base1
= TREE_OPERAND (base1
, 0);
8644 indirect_base1
= true;
8646 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8648 base1
= TREE_OPERAND (arg1
, 0);
8649 STRIP_SIGN_NOPS (base1
);
8650 if (TREE_CODE (base1
) == ADDR_EXPR
)
8652 base1
= TREE_OPERAND (base1
, 0);
8653 indirect_base1
= true;
8655 offset1
= TREE_OPERAND (arg1
, 1);
8656 if (tree_fits_shwi_p (offset1
))
8658 HOST_WIDE_INT off
= size_low_cst (offset1
);
8659 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8661 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8663 bitpos1
= off
* BITS_PER_UNIT
;
8664 offset1
= NULL_TREE
;
8669 /* A local variable can never be pointed to by
8670 the default SSA name of an incoming parameter. */
8671 if ((TREE_CODE (arg0
) == ADDR_EXPR
8673 && TREE_CODE (base0
) == VAR_DECL
8674 && auto_var_in_fn_p (base0
, current_function_decl
)
8676 && TREE_CODE (base1
) == SSA_NAME
8677 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8678 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8679 || (TREE_CODE (arg1
) == ADDR_EXPR
8681 && TREE_CODE (base1
) == VAR_DECL
8682 && auto_var_in_fn_p (base1
, current_function_decl
)
8684 && TREE_CODE (base0
) == SSA_NAME
8685 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8686 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8688 if (code
== NE_EXPR
)
8689 return constant_boolean_node (1, type
);
8690 else if (code
== EQ_EXPR
)
8691 return constant_boolean_node (0, type
);
8693 /* If we have equivalent bases we might be able to simplify. */
8694 else if (indirect_base0
== indirect_base1
8695 && operand_equal_p (base0
, base1
, 0))
8697 /* We can fold this expression to a constant if the non-constant
8698 offset parts are equal. */
8699 if ((offset0
== offset1
8700 || (offset0
&& offset1
8701 && operand_equal_p (offset0
, offset1
, 0)))
8704 || (indirect_base0
&& DECL_P (base0
))
8705 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8709 && bitpos0
!= bitpos1
8710 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8711 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8712 fold_overflow_warning (("assuming pointer wraparound does not "
8713 "occur when comparing P +- C1 with "
8715 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8720 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8722 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8724 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8726 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8728 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8730 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8734 /* We can simplify the comparison to a comparison of the variable
8735 offset parts if the constant offset parts are equal.
8736 Be careful to use signed sizetype here because otherwise we
8737 mess with array offsets in the wrong way. This is possible
8738 because pointer arithmetic is restricted to retain within an
8739 object and overflow on pointer differences is undefined as of
8740 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8741 else if (bitpos0
== bitpos1
8743 || (indirect_base0
&& DECL_P (base0
))
8744 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8746 /* By converting to signed sizetype we cover middle-end pointer
8747 arithmetic which operates on unsigned pointer types of size
8748 type size and ARRAY_REF offsets which are properly sign or
8749 zero extended from their type in case it is narrower than
8751 if (offset0
== NULL_TREE
)
8752 offset0
= build_int_cst (ssizetype
, 0);
8754 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8755 if (offset1
== NULL_TREE
)
8756 offset1
= build_int_cst (ssizetype
, 0);
8758 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8761 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8762 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8763 fold_overflow_warning (("assuming pointer wraparound does not "
8764 "occur when comparing P +- C1 with "
8766 WARN_STRICT_OVERFLOW_COMPARISON
);
8768 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8771 /* For non-equal bases we can simplify if they are addresses
8772 declarations with different addresses. */
8773 else if (indirect_base0
&& indirect_base1
8774 /* We know that !operand_equal_p (base0, base1, 0)
8775 because the if condition was false. But make
8776 sure two decls are not the same. */
8778 && TREE_CODE (arg0
) == ADDR_EXPR
8779 && TREE_CODE (arg1
) == ADDR_EXPR
8782 /* Watch for aliases. */
8783 && (!decl_in_symtab_p (base0
)
8784 || !decl_in_symtab_p (base1
)
8785 || !symtab_node::get_create (base0
)->equal_address_to
8786 (symtab_node::get_create (base1
))))
8788 if (code
== EQ_EXPR
)
8789 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8791 else if (code
== NE_EXPR
)
8792 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8795 /* For equal offsets we can simplify to a comparison of the
8797 else if (bitpos0
== bitpos1
8799 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8801 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8802 && ((offset0
== offset1
)
8803 || (offset0
&& offset1
8804 && operand_equal_p (offset0
, offset1
, 0))))
8807 base0
= build_fold_addr_expr_loc (loc
, base0
);
8809 base1
= build_fold_addr_expr_loc (loc
, base1
);
8810 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8814 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8815 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8816 the resulting offset is smaller in absolute value than the
8817 original one and has the same sign. */
8818 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8819 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8820 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8821 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8822 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8823 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8824 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8825 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8827 tree const1
= TREE_OPERAND (arg0
, 1);
8828 tree const2
= TREE_OPERAND (arg1
, 1);
8829 tree variable1
= TREE_OPERAND (arg0
, 0);
8830 tree variable2
= TREE_OPERAND (arg1
, 0);
8832 const char * const warnmsg
= G_("assuming signed overflow does not "
8833 "occur when combining constants around "
8836 /* Put the constant on the side where it doesn't overflow and is
8837 of lower absolute value and of same sign than before. */
8838 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8839 ? MINUS_EXPR
: PLUS_EXPR
,
8841 if (!TREE_OVERFLOW (cst
)
8842 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8843 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8845 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8846 return fold_build2_loc (loc
, code
, type
,
8848 fold_build2_loc (loc
, TREE_CODE (arg1
),
8853 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8854 ? MINUS_EXPR
: PLUS_EXPR
,
8856 if (!TREE_OVERFLOW (cst
)
8857 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8858 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8860 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8861 return fold_build2_loc (loc
, code
, type
,
8862 fold_build2_loc (loc
, TREE_CODE (arg0
),
8869 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8873 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8874 && CONVERT_EXPR_P (arg0
))
8876 /* If we are widening one operand of an integer comparison,
8877 see if the other operand is similarly being widened. Perhaps we
8878 can do the comparison in the narrower type. */
8879 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
8883 /* Or if we are changing signedness. */
8884 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
8889 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8890 constant, we can simplify it. */
8891 if (TREE_CODE (arg1
) == INTEGER_CST
8892 && (TREE_CODE (arg0
) == MIN_EXPR
8893 || TREE_CODE (arg0
) == MAX_EXPR
)
8894 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8896 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
8901 /* If we are comparing an expression that just has comparisons
8902 of two integer values, arithmetic expressions of those comparisons,
8903 and constants, we can simplify it. There are only three cases
8904 to check: the two values can either be equal, the first can be
8905 greater, or the second can be greater. Fold the expression for
8906 those three values. Since each value must be 0 or 1, we have
8907 eight possibilities, each of which corresponds to the constant 0
8908 or 1 or one of the six possible comparisons.
8910 This handles common cases like (a > b) == 0 but also handles
8911 expressions like ((x > y) - (y > x)) > 0, which supposedly
8912 occur in macroized code. */
8914 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8916 tree cval1
= 0, cval2
= 0;
8919 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8920 /* Don't handle degenerate cases here; they should already
8921 have been handled anyway. */
8922 && cval1
!= 0 && cval2
!= 0
8923 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8924 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8925 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8926 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8927 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8928 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8929 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8931 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8932 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8934 /* We can't just pass T to eval_subst in case cval1 or cval2
8935 was the same as ARG1. */
8938 = fold_build2_loc (loc
, code
, type
,
8939 eval_subst (loc
, arg0
, cval1
, maxval
,
8943 = fold_build2_loc (loc
, code
, type
,
8944 eval_subst (loc
, arg0
, cval1
, maxval
,
8948 = fold_build2_loc (loc
, code
, type
,
8949 eval_subst (loc
, arg0
, cval1
, minval
,
8953 /* All three of these results should be 0 or 1. Confirm they are.
8954 Then use those values to select the proper code to use. */
8956 if (TREE_CODE (high_result
) == INTEGER_CST
8957 && TREE_CODE (equal_result
) == INTEGER_CST
8958 && TREE_CODE (low_result
) == INTEGER_CST
)
8960 /* Make a 3-bit mask with the high-order bit being the
8961 value for `>', the next for '=', and the low for '<'. */
8962 switch ((integer_onep (high_result
) * 4)
8963 + (integer_onep (equal_result
) * 2)
8964 + integer_onep (low_result
))
8968 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8989 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8994 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8995 SET_EXPR_LOCATION (tem
, loc
);
8998 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9003 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9004 into a single range test. */
9005 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9006 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9007 && TREE_CODE (arg1
) == INTEGER_CST
9008 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9009 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9010 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9011 && !TREE_OVERFLOW (arg1
))
9013 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9014 if (tem
!= NULL_TREE
)
9022 /* Subroutine of fold_binary. Optimize complex multiplications of the
9023 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9024 argument EXPR represents the expression "z" of type TYPE. */
9027 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9029 tree itype
= TREE_TYPE (type
);
9030 tree rpart
, ipart
, tem
;
9032 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9034 rpart
= TREE_OPERAND (expr
, 0);
9035 ipart
= TREE_OPERAND (expr
, 1);
9037 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9039 rpart
= TREE_REALPART (expr
);
9040 ipart
= TREE_IMAGPART (expr
);
9044 expr
= save_expr (expr
);
9045 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9046 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9049 rpart
= save_expr (rpart
);
9050 ipart
= save_expr (ipart
);
9051 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9052 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9053 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9054 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9055 build_zero_cst (itype
));
9059 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9060 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9063 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9065 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9067 if (TREE_CODE (arg
) == VECTOR_CST
)
9069 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9070 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9072 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9074 constructor_elt
*elt
;
9076 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9077 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9080 elts
[i
] = elt
->value
;
9084 for (; i
< nelts
; i
++)
9086 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9090 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9091 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9092 NULL_TREE otherwise. */
9095 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9097 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9099 bool need_ctor
= false;
9101 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9102 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9103 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9104 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9107 elts
= XALLOCAVEC (tree
, nelts
* 3);
9108 if (!vec_cst_ctor_to_array (arg0
, elts
)
9109 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9112 for (i
= 0; i
< nelts
; i
++)
9114 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9116 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9121 vec
<constructor_elt
, va_gc
> *v
;
9122 vec_alloc (v
, nelts
);
9123 for (i
= 0; i
< nelts
; i
++)
9124 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9125 return build_constructor (type
, v
);
9128 return build_vector (type
, &elts
[2 * nelts
]);
9131 /* Try to fold a pointer difference of type TYPE two address expressions of
9132 array references AREF0 and AREF1 using location LOC. Return a
9133 simplified expression for the difference or NULL_TREE. */
9136 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9137 tree aref0
, tree aref1
)
9139 tree base0
= TREE_OPERAND (aref0
, 0);
9140 tree base1
= TREE_OPERAND (aref1
, 0);
9141 tree base_offset
= build_int_cst (type
, 0);
9143 /* If the bases are array references as well, recurse. If the bases
9144 are pointer indirections compute the difference of the pointers.
9145 If the bases are equal, we are set. */
9146 if ((TREE_CODE (base0
) == ARRAY_REF
9147 && TREE_CODE (base1
) == ARRAY_REF
9149 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9150 || (INDIRECT_REF_P (base0
)
9151 && INDIRECT_REF_P (base1
)
9152 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9153 TREE_OPERAND (base0
, 0),
9154 TREE_OPERAND (base1
, 0))))
9155 || operand_equal_p (base0
, base1
, 0))
9157 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9158 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9159 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9160 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9161 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9163 fold_build2_loc (loc
, MULT_EXPR
, type
,
9169 /* If the real or vector real constant CST of type TYPE has an exact
9170 inverse, return it, else return NULL. */
9173 exact_inverse (tree type
, tree cst
)
9176 tree unit_type
, *elts
;
9178 unsigned vec_nelts
, i
;
9180 switch (TREE_CODE (cst
))
9183 r
= TREE_REAL_CST (cst
);
9185 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9186 return build_real (type
, r
);
9191 vec_nelts
= VECTOR_CST_NELTS (cst
);
9192 elts
= XALLOCAVEC (tree
, vec_nelts
);
9193 unit_type
= TREE_TYPE (type
);
9194 mode
= TYPE_MODE (unit_type
);
9196 for (i
= 0; i
< vec_nelts
; i
++)
9198 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9199 if (!exact_real_inverse (mode
, &r
))
9201 elts
[i
] = build_real (unit_type
, r
);
9204 return build_vector (type
, elts
);
9211 /* Mask out the tz least significant bits of X of type TYPE where
9212 tz is the number of trailing zeroes in Y. */
9214 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9216 int tz
= wi::ctz (y
);
9218 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9222 /* Return true when T is an address and is known to be nonzero.
9223 For floating point we further ensure that T is not denormal.
9224 Similar logic is present in nonzero_address in rtlanal.h.
9226 If the return value is based on the assumption that signed overflow
9227 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9228 change *STRICT_OVERFLOW_P. */
9231 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9233 tree type
= TREE_TYPE (t
);
9234 enum tree_code code
;
9236 /* Doing something useful for floating point would need more work. */
9237 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9240 code
= TREE_CODE (t
);
9241 switch (TREE_CODE_CLASS (code
))
9244 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9247 case tcc_comparison
:
9248 return tree_binary_nonzero_warnv_p (code
, type
,
9249 TREE_OPERAND (t
, 0),
9250 TREE_OPERAND (t
, 1),
9253 case tcc_declaration
:
9255 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9263 case TRUTH_NOT_EXPR
:
9264 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9267 case TRUTH_AND_EXPR
:
9269 case TRUTH_XOR_EXPR
:
9270 return tree_binary_nonzero_warnv_p (code
, type
,
9271 TREE_OPERAND (t
, 0),
9272 TREE_OPERAND (t
, 1),
9280 case WITH_SIZE_EXPR
:
9282 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9287 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9291 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9296 tree fndecl
= get_callee_fndecl (t
);
9297 if (!fndecl
) return false;
9298 if (flag_delete_null_pointer_checks
&& !flag_check_new
9299 && DECL_IS_OPERATOR_NEW (fndecl
)
9300 && !TREE_NOTHROW (fndecl
))
9302 if (flag_delete_null_pointer_checks
9303 && lookup_attribute ("returns_nonnull",
9304 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9306 return alloca_call_p (t
);
9315 /* Return true when T is an address and is known to be nonzero.
9316 Handle warnings about undefined signed overflow. */
9319 tree_expr_nonzero_p (tree t
)
9321 bool ret
, strict_overflow_p
;
9323 strict_overflow_p
= false;
9324 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9325 if (strict_overflow_p
)
9326 fold_overflow_warning (("assuming signed overflow does not occur when "
9327 "determining that expression is always "
9329 WARN_STRICT_OVERFLOW_MISC
);
9333 /* Fold a binary expression of code CODE and type TYPE with operands
9334 OP0 and OP1. LOC is the location of the resulting expression.
9335 Return the folded expression if folding is successful. Otherwise,
9336 return NULL_TREE. */
9339 fold_binary_loc (location_t loc
,
9340 enum tree_code code
, tree type
, tree op0
, tree op1
)
9342 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9343 tree arg0
, arg1
, tem
;
9344 tree t1
= NULL_TREE
;
9345 bool strict_overflow_p
;
9348 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9349 && TREE_CODE_LENGTH (code
) == 2
9351 && op1
!= NULL_TREE
);
9356 /* Strip any conversions that don't change the mode. This is
9357 safe for every expression, except for a comparison expression
9358 because its signedness is derived from its operands. So, in
9359 the latter case, only strip conversions that don't change the
9360 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9363 Note that this is done as an internal manipulation within the
9364 constant folder, in order to find the simplest representation
9365 of the arguments so that their form can be studied. In any
9366 cases, the appropriate type conversions should be put back in
9367 the tree that will get out of the constant folder. */
9369 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9371 STRIP_SIGN_NOPS (arg0
);
9372 STRIP_SIGN_NOPS (arg1
);
9380 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9381 constant but we can't do arithmetic on them. */
9382 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9384 tem
= const_binop (code
, type
, arg0
, arg1
);
9385 if (tem
!= NULL_TREE
)
9387 if (TREE_TYPE (tem
) != type
)
9388 tem
= fold_convert_loc (loc
, type
, tem
);
9393 /* If this is a commutative operation, and ARG0 is a constant, move it
9394 to ARG1 to reduce the number of tests below. */
9395 if (commutative_tree_code (code
)
9396 && tree_swap_operands_p (arg0
, arg1
, true))
9397 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9399 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9400 to ARG1 to reduce the number of tests below. */
9401 if (kind
== tcc_comparison
9402 && tree_swap_operands_p (arg0
, arg1
, true))
9403 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9405 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9409 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9411 First check for cases where an arithmetic operation is applied to a
9412 compound, conditional, or comparison operation. Push the arithmetic
9413 operation inside the compound or conditional to see if any folding
9414 can then be done. Convert comparison to conditional for this purpose.
9415 The also optimizes non-constant cases that used to be done in
9418 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9419 one of the operands is a comparison and the other is a comparison, a
9420 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9421 code below would make the expression more complex. Change it to a
9422 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9423 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9425 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9426 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9427 && TREE_CODE (type
) != VECTOR_TYPE
9428 && ((truth_value_p (TREE_CODE (arg0
))
9429 && (truth_value_p (TREE_CODE (arg1
))
9430 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9431 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9432 || (truth_value_p (TREE_CODE (arg1
))
9433 && (truth_value_p (TREE_CODE (arg0
))
9434 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9435 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9437 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9438 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9441 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9442 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9444 if (code
== EQ_EXPR
)
9445 tem
= invert_truthvalue_loc (loc
, tem
);
9447 return fold_convert_loc (loc
, type
, tem
);
9450 if (TREE_CODE_CLASS (code
) == tcc_binary
9451 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9453 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9455 tem
= fold_build2_loc (loc
, code
, type
,
9456 fold_convert_loc (loc
, TREE_TYPE (op0
),
9457 TREE_OPERAND (arg0
, 1)), op1
);
9458 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9461 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9462 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9464 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9465 fold_convert_loc (loc
, TREE_TYPE (op1
),
9466 TREE_OPERAND (arg1
, 1)));
9467 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9471 if (TREE_CODE (arg0
) == COND_EXPR
9472 || TREE_CODE (arg0
) == VEC_COND_EXPR
9473 || COMPARISON_CLASS_P (arg0
))
9475 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9477 /*cond_first_p=*/1);
9478 if (tem
!= NULL_TREE
)
9482 if (TREE_CODE (arg1
) == COND_EXPR
9483 || TREE_CODE (arg1
) == VEC_COND_EXPR
9484 || COMPARISON_CLASS_P (arg1
))
9486 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9488 /*cond_first_p=*/0);
9489 if (tem
!= NULL_TREE
)
9497 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9498 if (TREE_CODE (arg0
) == ADDR_EXPR
9499 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9501 tree iref
= TREE_OPERAND (arg0
, 0);
9502 return fold_build2 (MEM_REF
, type
,
9503 TREE_OPERAND (iref
, 0),
9504 int_const_binop (PLUS_EXPR
, arg1
,
9505 TREE_OPERAND (iref
, 1)));
9508 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9509 if (TREE_CODE (arg0
) == ADDR_EXPR
9510 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9513 HOST_WIDE_INT coffset
;
9514 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9518 return fold_build2 (MEM_REF
, type
,
9519 build_fold_addr_expr (base
),
9520 int_const_binop (PLUS_EXPR
, arg1
,
9521 size_int (coffset
)));
9526 case POINTER_PLUS_EXPR
:
9527 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9528 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9529 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9530 return fold_convert_loc (loc
, type
,
9531 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9532 fold_convert_loc (loc
, sizetype
,
9534 fold_convert_loc (loc
, sizetype
,
9540 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9542 /* X + (X / CST) * -CST is X % CST. */
9543 if (TREE_CODE (arg1
) == MULT_EXPR
9544 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9545 && operand_equal_p (arg0
,
9546 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9548 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9549 tree cst1
= TREE_OPERAND (arg1
, 1);
9550 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9552 if (sum
&& integer_zerop (sum
))
9553 return fold_convert_loc (loc
, type
,
9554 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9555 TREE_TYPE (arg0
), arg0
,
9560 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9561 one. Make sure the type is not saturating and has the signedness of
9562 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9563 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9564 if ((TREE_CODE (arg0
) == MULT_EXPR
9565 || TREE_CODE (arg1
) == MULT_EXPR
)
9566 && !TYPE_SATURATING (type
)
9567 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9568 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9569 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9571 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9576 if (! FLOAT_TYPE_P (type
))
9578 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9579 with a constant, and the two constants have no bits in common,
9580 we should treat this as a BIT_IOR_EXPR since this may produce more
9582 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9583 && TREE_CODE (arg1
) == BIT_AND_EXPR
9584 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9585 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9586 && wi::bit_and (TREE_OPERAND (arg0
, 1),
9587 TREE_OPERAND (arg1
, 1)) == 0)
9589 code
= BIT_IOR_EXPR
;
9593 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9594 (plus (plus (mult) (mult)) (foo)) so that we can
9595 take advantage of the factoring cases below. */
9596 if (ANY_INTEGRAL_TYPE_P (type
)
9597 && TYPE_OVERFLOW_WRAPS (type
)
9598 && (((TREE_CODE (arg0
) == PLUS_EXPR
9599 || TREE_CODE (arg0
) == MINUS_EXPR
)
9600 && TREE_CODE (arg1
) == MULT_EXPR
)
9601 || ((TREE_CODE (arg1
) == PLUS_EXPR
9602 || TREE_CODE (arg1
) == MINUS_EXPR
)
9603 && TREE_CODE (arg0
) == MULT_EXPR
)))
9605 tree parg0
, parg1
, parg
, marg
;
9606 enum tree_code pcode
;
9608 if (TREE_CODE (arg1
) == MULT_EXPR
)
9609 parg
= arg0
, marg
= arg1
;
9611 parg
= arg1
, marg
= arg0
;
9612 pcode
= TREE_CODE (parg
);
9613 parg0
= TREE_OPERAND (parg
, 0);
9614 parg1
= TREE_OPERAND (parg
, 1);
9618 if (TREE_CODE (parg0
) == MULT_EXPR
9619 && TREE_CODE (parg1
) != MULT_EXPR
)
9620 return fold_build2_loc (loc
, pcode
, type
,
9621 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9622 fold_convert_loc (loc
, type
,
9624 fold_convert_loc (loc
, type
,
9626 fold_convert_loc (loc
, type
, parg1
));
9627 if (TREE_CODE (parg0
) != MULT_EXPR
9628 && TREE_CODE (parg1
) == MULT_EXPR
)
9630 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9631 fold_convert_loc (loc
, type
, parg0
),
9632 fold_build2_loc (loc
, pcode
, type
,
9633 fold_convert_loc (loc
, type
, marg
),
9634 fold_convert_loc (loc
, type
,
9640 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9641 to __complex__ ( x, y ). This is not the same for SNaNs or
9642 if signed zeros are involved. */
9643 if (!HONOR_SNANS (element_mode (arg0
))
9644 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9645 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9647 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9648 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9649 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9650 bool arg0rz
= false, arg0iz
= false;
9651 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9652 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9654 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9655 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9656 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9658 tree rp
= arg1r
? arg1r
9659 : build1 (REALPART_EXPR
, rtype
, arg1
);
9660 tree ip
= arg0i
? arg0i
9661 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9662 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9664 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9666 tree rp
= arg0r
? arg0r
9667 : build1 (REALPART_EXPR
, rtype
, arg0
);
9668 tree ip
= arg1i
? arg1i
9669 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9670 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9675 if (flag_unsafe_math_optimizations
9676 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9677 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9678 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9681 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9682 We associate floats only if the user has specified
9683 -fassociative-math. */
9684 if (flag_associative_math
9685 && TREE_CODE (arg1
) == PLUS_EXPR
9686 && TREE_CODE (arg0
) != MULT_EXPR
)
9688 tree tree10
= TREE_OPERAND (arg1
, 0);
9689 tree tree11
= TREE_OPERAND (arg1
, 1);
9690 if (TREE_CODE (tree11
) == MULT_EXPR
9691 && TREE_CODE (tree10
) == MULT_EXPR
)
9694 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9695 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9698 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9699 We associate floats only if the user has specified
9700 -fassociative-math. */
9701 if (flag_associative_math
9702 && TREE_CODE (arg0
) == PLUS_EXPR
9703 && TREE_CODE (arg1
) != MULT_EXPR
)
9705 tree tree00
= TREE_OPERAND (arg0
, 0);
9706 tree tree01
= TREE_OPERAND (arg0
, 1);
9707 if (TREE_CODE (tree01
) == MULT_EXPR
9708 && TREE_CODE (tree00
) == MULT_EXPR
)
9711 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9712 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9718 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9719 is a rotate of A by C1 bits. */
9720 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9721 is a rotate of A by B bits. */
9723 enum tree_code code0
, code1
;
9725 code0
= TREE_CODE (arg0
);
9726 code1
= TREE_CODE (arg1
);
9727 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9728 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9729 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9730 TREE_OPERAND (arg1
, 0), 0)
9731 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9732 TYPE_UNSIGNED (rtype
))
9733 /* Only create rotates in complete modes. Other cases are not
9734 expanded properly. */
9735 && (element_precision (rtype
)
9736 == element_precision (TYPE_MODE (rtype
))))
9738 tree tree01
, tree11
;
9739 enum tree_code code01
, code11
;
9741 tree01
= TREE_OPERAND (arg0
, 1);
9742 tree11
= TREE_OPERAND (arg1
, 1);
9743 STRIP_NOPS (tree01
);
9744 STRIP_NOPS (tree11
);
9745 code01
= TREE_CODE (tree01
);
9746 code11
= TREE_CODE (tree11
);
9747 if (code01
== INTEGER_CST
9748 && code11
== INTEGER_CST
9749 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9750 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9752 tem
= build2_loc (loc
, LROTATE_EXPR
,
9753 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9754 TREE_OPERAND (arg0
, 0),
9755 code0
== LSHIFT_EXPR
9756 ? TREE_OPERAND (arg0
, 1)
9757 : TREE_OPERAND (arg1
, 1));
9758 return fold_convert_loc (loc
, type
, tem
);
9760 else if (code11
== MINUS_EXPR
)
9762 tree tree110
, tree111
;
9763 tree110
= TREE_OPERAND (tree11
, 0);
9764 tree111
= TREE_OPERAND (tree11
, 1);
9765 STRIP_NOPS (tree110
);
9766 STRIP_NOPS (tree111
);
9767 if (TREE_CODE (tree110
) == INTEGER_CST
9768 && 0 == compare_tree_int (tree110
,
9770 (TREE_TYPE (TREE_OPERAND
9772 && operand_equal_p (tree01
, tree111
, 0))
9774 fold_convert_loc (loc
, type
,
9775 build2 ((code0
== LSHIFT_EXPR
9778 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9779 TREE_OPERAND (arg0
, 0),
9780 TREE_OPERAND (arg0
, 1)));
9782 else if (code01
== MINUS_EXPR
)
9784 tree tree010
, tree011
;
9785 tree010
= TREE_OPERAND (tree01
, 0);
9786 tree011
= TREE_OPERAND (tree01
, 1);
9787 STRIP_NOPS (tree010
);
9788 STRIP_NOPS (tree011
);
9789 if (TREE_CODE (tree010
) == INTEGER_CST
9790 && 0 == compare_tree_int (tree010
,
9792 (TREE_TYPE (TREE_OPERAND
9794 && operand_equal_p (tree11
, tree011
, 0))
9795 return fold_convert_loc
9797 build2 ((code0
!= LSHIFT_EXPR
9800 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9801 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9807 /* In most languages, can't associate operations on floats through
9808 parentheses. Rather than remember where the parentheses were, we
9809 don't associate floats at all, unless the user has specified
9811 And, we need to make sure type is not saturating. */
9813 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9814 && !TYPE_SATURATING (type
))
9816 tree var0
, con0
, lit0
, minus_lit0
;
9817 tree var1
, con1
, lit1
, minus_lit1
;
9821 /* Split both trees into variables, constants, and literals. Then
9822 associate each group together, the constants with literals,
9823 then the result with variables. This increases the chances of
9824 literals being recombined later and of generating relocatable
9825 expressions for the sum of a constant and literal. */
9826 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9827 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9828 code
== MINUS_EXPR
);
9830 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9831 if (code
== MINUS_EXPR
)
9834 /* With undefined overflow prefer doing association in a type
9835 which wraps on overflow, if that is one of the operand types. */
9836 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9837 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9839 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9840 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9841 atype
= TREE_TYPE (arg0
);
9842 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9843 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9844 atype
= TREE_TYPE (arg1
);
9845 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9848 /* With undefined overflow we can only associate constants with one
9849 variable, and constants whose association doesn't overflow. */
9850 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9851 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9858 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9859 tmp0
= TREE_OPERAND (tmp0
, 0);
9860 if (CONVERT_EXPR_P (tmp0
)
9861 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9862 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9863 <= TYPE_PRECISION (atype
)))
9864 tmp0
= TREE_OPERAND (tmp0
, 0);
9865 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9866 tmp1
= TREE_OPERAND (tmp1
, 0);
9867 if (CONVERT_EXPR_P (tmp1
)
9868 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9869 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9870 <= TYPE_PRECISION (atype
)))
9871 tmp1
= TREE_OPERAND (tmp1
, 0);
9872 /* The only case we can still associate with two variables
9873 is if they are the same, modulo negation and bit-pattern
9874 preserving conversions. */
9875 if (!operand_equal_p (tmp0
, tmp1
, 0))
9880 /* Only do something if we found more than two objects. Otherwise,
9881 nothing has changed and we risk infinite recursion. */
9883 && (2 < ((var0
!= 0) + (var1
!= 0)
9884 + (con0
!= 0) + (con1
!= 0)
9885 + (lit0
!= 0) + (lit1
!= 0)
9886 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9888 bool any_overflows
= false;
9889 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9890 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9891 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9892 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9893 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9894 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9895 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9896 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9899 /* Preserve the MINUS_EXPR if the negative part of the literal is
9900 greater than the positive part. Otherwise, the multiplicative
9901 folding code (i.e extract_muldiv) may be fooled in case
9902 unsigned constants are subtracted, like in the following
9903 example: ((X*2 + 4) - 8U)/2. */
9904 if (minus_lit0
&& lit0
)
9906 if (TREE_CODE (lit0
) == INTEGER_CST
9907 && TREE_CODE (minus_lit0
) == INTEGER_CST
9908 && tree_int_cst_lt (lit0
, minus_lit0
))
9910 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9916 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9922 /* Don't introduce overflows through reassociation. */
9924 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9925 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9932 fold_convert_loc (loc
, type
,
9933 associate_trees (loc
, var0
, minus_lit0
,
9934 MINUS_EXPR
, atype
));
9937 con0
= associate_trees (loc
, con0
, minus_lit0
,
9940 fold_convert_loc (loc
, type
,
9941 associate_trees (loc
, var0
, con0
,
9946 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9948 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9956 /* Pointer simplifications for subtraction, simple reassociations. */
9957 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
9959 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9960 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9961 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9963 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9964 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
9965 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
9966 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
9967 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9968 fold_build2_loc (loc
, MINUS_EXPR
, type
,
9970 fold_build2_loc (loc
, MINUS_EXPR
, type
,
9973 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9974 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9976 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
9977 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
9978 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
9979 fold_convert_loc (loc
, type
, arg1
));
9981 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
9983 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9985 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9987 tree arg10
= fold_convert_loc (loc
, type
,
9988 TREE_OPERAND (arg1
, 0));
9989 tree arg11
= fold_convert_loc (loc
, type
,
9990 TREE_OPERAND (arg1
, 1));
9991 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9992 fold_convert_loc (loc
, type
, arg0
),
9995 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
9998 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9999 if (TREE_CODE (arg0
) == NEGATE_EXPR
10000 && negate_expr_p (arg1
)
10001 && reorder_operands_p (arg0
, arg1
))
10002 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10003 fold_convert_loc (loc
, type
,
10004 negate_expr (arg1
)),
10005 fold_convert_loc (loc
, type
,
10006 TREE_OPERAND (arg0
, 0)));
10008 if (! FLOAT_TYPE_P (type
))
10010 /* Fold A - (A & B) into ~B & A. */
10011 if (!TREE_SIDE_EFFECTS (arg0
)
10012 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10014 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10016 tree arg10
= fold_convert_loc (loc
, type
,
10017 TREE_OPERAND (arg1
, 0));
10018 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10019 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10021 fold_convert_loc (loc
, type
, arg0
));
10023 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10025 tree arg11
= fold_convert_loc (loc
,
10026 type
, TREE_OPERAND (arg1
, 1));
10027 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10028 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10030 fold_convert_loc (loc
, type
, arg0
));
10034 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10035 any power of 2 minus 1. */
10036 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10037 && TREE_CODE (arg1
) == BIT_AND_EXPR
10038 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10039 TREE_OPERAND (arg1
, 0), 0))
10041 tree mask0
= TREE_OPERAND (arg0
, 1);
10042 tree mask1
= TREE_OPERAND (arg1
, 1);
10043 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10045 if (operand_equal_p (tem
, mask1
, 0))
10047 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10048 TREE_OPERAND (arg0
, 0), mask1
);
10049 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10054 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10055 __complex__ ( x, -y ). This is not the same for SNaNs or if
10056 signed zeros are involved. */
10057 if (!HONOR_SNANS (element_mode (arg0
))
10058 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10059 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10061 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10062 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10063 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10064 bool arg0rz
= false, arg0iz
= false;
10065 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10066 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10068 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10069 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10070 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10072 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10074 : build1 (REALPART_EXPR
, rtype
, arg1
));
10075 tree ip
= arg0i
? arg0i
10076 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10077 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10079 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10081 tree rp
= arg0r
? arg0r
10082 : build1 (REALPART_EXPR
, rtype
, arg0
);
10083 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10085 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10086 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10091 /* A - B -> A + (-B) if B is easily negatable. */
10092 if (negate_expr_p (arg1
)
10093 && !TYPE_OVERFLOW_SANITIZED (type
)
10094 && ((FLOAT_TYPE_P (type
)
10095 /* Avoid this transformation if B is a positive REAL_CST. */
10096 && (TREE_CODE (arg1
) != REAL_CST
10097 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10098 || INTEGRAL_TYPE_P (type
)))
10099 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10100 fold_convert_loc (loc
, type
, arg0
),
10101 fold_convert_loc (loc
, type
,
10102 negate_expr (arg1
)));
10104 /* Fold &a[i] - &a[j] to i-j. */
10105 if (TREE_CODE (arg0
) == ADDR_EXPR
10106 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10107 && TREE_CODE (arg1
) == ADDR_EXPR
10108 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10110 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10111 TREE_OPERAND (arg0
, 0),
10112 TREE_OPERAND (arg1
, 0));
10117 if (FLOAT_TYPE_P (type
)
10118 && flag_unsafe_math_optimizations
10119 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10120 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10121 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10124 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10125 one. Make sure the type is not saturating and has the signedness of
10126 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10127 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10128 if ((TREE_CODE (arg0
) == MULT_EXPR
10129 || TREE_CODE (arg1
) == MULT_EXPR
)
10130 && !TYPE_SATURATING (type
)
10131 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10132 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10133 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10135 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10143 /* (-A) * (-B) -> A * B */
10144 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10145 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10146 fold_convert_loc (loc
, type
,
10147 TREE_OPERAND (arg0
, 0)),
10148 fold_convert_loc (loc
, type
,
10149 negate_expr (arg1
)));
10150 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10151 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10152 fold_convert_loc (loc
, type
,
10153 negate_expr (arg0
)),
10154 fold_convert_loc (loc
, type
,
10155 TREE_OPERAND (arg1
, 0)));
10157 if (! FLOAT_TYPE_P (type
))
10159 /* Transform x * -C into -x * C if x is easily negatable. */
10160 if (TREE_CODE (arg1
) == INTEGER_CST
10161 && tree_int_cst_sgn (arg1
) == -1
10162 && negate_expr_p (arg0
)
10163 && (tem
= negate_expr (arg1
)) != arg1
10164 && !TREE_OVERFLOW (tem
))
10165 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10166 fold_convert_loc (loc
, type
,
10167 negate_expr (arg0
)),
10170 /* (a * (1 << b)) is (a << b) */
10171 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10172 && integer_onep (TREE_OPERAND (arg1
, 0)))
10173 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10174 TREE_OPERAND (arg1
, 1));
10175 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10176 && integer_onep (TREE_OPERAND (arg0
, 0)))
10177 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10178 TREE_OPERAND (arg0
, 1));
10180 /* (A + A) * C -> A * 2 * C */
10181 if (TREE_CODE (arg0
) == PLUS_EXPR
10182 && TREE_CODE (arg1
) == INTEGER_CST
10183 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10184 TREE_OPERAND (arg0
, 1), 0))
10185 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10186 omit_one_operand_loc (loc
, type
,
10187 TREE_OPERAND (arg0
, 0),
10188 TREE_OPERAND (arg0
, 1)),
10189 fold_build2_loc (loc
, MULT_EXPR
, type
,
10190 build_int_cst (type
, 2) , arg1
));
10192 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10193 sign-changing only. */
10194 if (TREE_CODE (arg1
) == INTEGER_CST
10195 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10196 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10197 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10199 strict_overflow_p
= false;
10200 if (TREE_CODE (arg1
) == INTEGER_CST
10201 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10202 &strict_overflow_p
)))
10204 if (strict_overflow_p
)
10205 fold_overflow_warning (("assuming signed overflow does not "
10206 "occur when simplifying "
10208 WARN_STRICT_OVERFLOW_MISC
);
10209 return fold_convert_loc (loc
, type
, tem
);
10212 /* Optimize z * conj(z) for integer complex numbers. */
10213 if (TREE_CODE (arg0
) == CONJ_EXPR
10214 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10215 return fold_mult_zconjz (loc
, type
, arg1
);
10216 if (TREE_CODE (arg1
) == CONJ_EXPR
10217 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10218 return fold_mult_zconjz (loc
, type
, arg0
);
10222 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10223 the result for floating point types due to rounding so it is applied
10224 only if -fassociative-math was specify. */
10225 if (flag_associative_math
10226 && TREE_CODE (arg0
) == RDIV_EXPR
10227 && TREE_CODE (arg1
) == REAL_CST
10228 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10230 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10233 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10234 TREE_OPERAND (arg0
, 1));
10237 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10238 if (operand_equal_p (arg0
, arg1
, 0))
10240 tree tem
= fold_strip_sign_ops (arg0
);
10241 if (tem
!= NULL_TREE
)
10243 tem
= fold_convert_loc (loc
, type
, tem
);
10244 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10248 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10249 This is not the same for NaNs or if signed zeros are
10251 if (!HONOR_NANS (arg0
)
10252 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10253 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10254 && TREE_CODE (arg1
) == COMPLEX_CST
10255 && real_zerop (TREE_REALPART (arg1
)))
10257 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10258 if (real_onep (TREE_IMAGPART (arg1
)))
10260 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10261 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10263 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10264 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10266 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10267 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10268 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10272 /* Optimize z * conj(z) for floating point complex numbers.
10273 Guarded by flag_unsafe_math_optimizations as non-finite
10274 imaginary components don't produce scalar results. */
10275 if (flag_unsafe_math_optimizations
10276 && TREE_CODE (arg0
) == CONJ_EXPR
10277 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10278 return fold_mult_zconjz (loc
, type
, arg1
);
10279 if (flag_unsafe_math_optimizations
10280 && TREE_CODE (arg1
) == CONJ_EXPR
10281 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10282 return fold_mult_zconjz (loc
, type
, arg0
);
10284 if (flag_unsafe_math_optimizations
)
10286 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10287 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10289 /* Optimizations of root(...)*root(...). */
10290 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10293 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10294 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10296 /* Optimize sqrt(x)*sqrt(x) as x. */
10297 if (BUILTIN_SQRT_P (fcode0
)
10298 && operand_equal_p (arg00
, arg10
, 0)
10299 && ! HONOR_SNANS (element_mode (type
)))
10302 /* Optimize root(x)*root(y) as root(x*y). */
10303 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10304 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10305 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10308 /* Optimize expN(x)*expN(y) as expN(x+y). */
10309 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10311 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10312 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10313 CALL_EXPR_ARG (arg0
, 0),
10314 CALL_EXPR_ARG (arg1
, 0));
10315 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10318 /* Optimizations of pow(...)*pow(...). */
10319 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10320 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10321 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10323 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10324 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10325 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10326 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10328 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10329 if (operand_equal_p (arg01
, arg11
, 0))
10331 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10332 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10334 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10337 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10338 if (operand_equal_p (arg00
, arg10
, 0))
10340 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10341 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10343 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10347 /* Optimize tan(x)*cos(x) as sin(x). */
10348 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10349 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10350 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10351 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10352 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10353 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10354 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10355 CALL_EXPR_ARG (arg1
, 0), 0))
10357 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10359 if (sinfn
!= NULL_TREE
)
10360 return build_call_expr_loc (loc
, sinfn
, 1,
10361 CALL_EXPR_ARG (arg0
, 0));
10364 /* Optimize x*pow(x,c) as pow(x,c+1). */
10365 if (fcode1
== BUILT_IN_POW
10366 || fcode1
== BUILT_IN_POWF
10367 || fcode1
== BUILT_IN_POWL
)
10369 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10370 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10371 if (TREE_CODE (arg11
) == REAL_CST
10372 && !TREE_OVERFLOW (arg11
)
10373 && operand_equal_p (arg0
, arg10
, 0))
10375 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10379 c
= TREE_REAL_CST (arg11
);
10380 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10381 arg
= build_real (type
, c
);
10382 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10386 /* Optimize pow(x,c)*x as pow(x,c+1). */
10387 if (fcode0
== BUILT_IN_POW
10388 || fcode0
== BUILT_IN_POWF
10389 || fcode0
== BUILT_IN_POWL
)
10391 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10392 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10393 if (TREE_CODE (arg01
) == REAL_CST
10394 && !TREE_OVERFLOW (arg01
)
10395 && operand_equal_p (arg1
, arg00
, 0))
10397 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10401 c
= TREE_REAL_CST (arg01
);
10402 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10403 arg
= build_real (type
, c
);
10404 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10408 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10409 if (!in_gimple_form
10411 && operand_equal_p (arg0
, arg1
, 0))
10413 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10417 tree arg
= build_real (type
, dconst2
);
10418 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10427 /* Canonicalize (X & C1) | C2. */
10428 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10429 && TREE_CODE (arg1
) == INTEGER_CST
10430 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10432 int width
= TYPE_PRECISION (type
), w
;
10433 wide_int c1
= TREE_OPERAND (arg0
, 1);
10434 wide_int c2
= arg1
;
10436 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10437 if ((c1
& c2
) == c1
)
10438 return omit_one_operand_loc (loc
, type
, arg1
,
10439 TREE_OPERAND (arg0
, 0));
10441 wide_int msk
= wi::mask (width
, false,
10442 TYPE_PRECISION (TREE_TYPE (arg1
)));
10444 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10445 if (msk
.and_not (c1
| c2
) == 0)
10446 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10447 TREE_OPERAND (arg0
, 0), arg1
);
10449 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10450 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10451 mode which allows further optimizations. */
10454 wide_int c3
= c1
.and_not (c2
);
10455 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10457 wide_int mask
= wi::mask (w
, false,
10458 TYPE_PRECISION (type
));
10459 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10467 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10468 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10469 TREE_OPERAND (arg0
, 0),
10470 wide_int_to_tree (type
,
10475 /* (X & ~Y) | (~X & Y) is X ^ Y */
10476 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10477 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10479 tree a0
, a1
, l0
, l1
, n0
, n1
;
10481 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10482 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10484 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10485 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10487 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10488 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10490 if ((operand_equal_p (n0
, a0
, 0)
10491 && operand_equal_p (n1
, a1
, 0))
10492 || (operand_equal_p (n0
, a1
, 0)
10493 && operand_equal_p (n1
, a0
, 0)))
10494 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10497 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10498 if (t1
!= NULL_TREE
)
10501 /* See if this can be simplified into a rotate first. If that
10502 is unsuccessful continue in the association code. */
10506 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10507 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10508 && INTEGRAL_TYPE_P (type
)
10509 && integer_onep (TREE_OPERAND (arg0
, 1))
10510 && integer_onep (arg1
))
10511 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10512 build_zero_cst (TREE_TYPE (arg0
)));
10514 /* See if this can be simplified into a rotate first. If that
10515 is unsuccessful continue in the association code. */
10519 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10520 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
10521 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10522 || (TREE_CODE (arg0
) == EQ_EXPR
10523 && integer_zerop (TREE_OPERAND (arg0
, 1))))
10524 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10525 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10527 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10528 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
10529 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10530 || (TREE_CODE (arg1
) == EQ_EXPR
10531 && integer_zerop (TREE_OPERAND (arg1
, 1))))
10532 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10533 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10535 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10536 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10537 && INTEGRAL_TYPE_P (type
)
10538 && integer_onep (TREE_OPERAND (arg0
, 1))
10539 && integer_onep (arg1
))
10542 tem
= TREE_OPERAND (arg0
, 0);
10543 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10544 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10546 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10547 build_zero_cst (TREE_TYPE (tem
)));
10549 /* Fold ~X & 1 as (X & 1) == 0. */
10550 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10551 && INTEGRAL_TYPE_P (type
)
10552 && integer_onep (arg1
))
10555 tem
= TREE_OPERAND (arg0
, 0);
10556 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
10557 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
10559 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
10560 build_zero_cst (TREE_TYPE (tem
)));
10562 /* Fold !X & 1 as X == 0. */
10563 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10564 && integer_onep (arg1
))
10566 tem
= TREE_OPERAND (arg0
, 0);
10567 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
10568 build_zero_cst (TREE_TYPE (tem
)));
10571 /* Fold (X ^ Y) & Y as ~X & Y. */
10572 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10573 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10575 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10576 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10577 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10578 fold_convert_loc (loc
, type
, arg1
));
10580 /* Fold (X ^ Y) & X as ~Y & X. */
10581 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10582 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10583 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10585 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10586 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10587 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10588 fold_convert_loc (loc
, type
, arg1
));
10590 /* Fold X & (X ^ Y) as X & ~Y. */
10591 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10592 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10594 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10595 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10596 fold_convert_loc (loc
, type
, arg0
),
10597 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10599 /* Fold X & (Y ^ X) as ~Y & X. */
10600 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10601 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10602 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10604 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10605 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10606 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10607 fold_convert_loc (loc
, type
, arg0
));
10610 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10611 multiple of 1 << CST. */
10612 if (TREE_CODE (arg1
) == INTEGER_CST
)
10614 wide_int cst1
= arg1
;
10615 wide_int ncst1
= -cst1
;
10616 if ((cst1
& ncst1
) == ncst1
10617 && multiple_of_p (type
, arg0
,
10618 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
10619 return fold_convert_loc (loc
, type
, arg0
);
10622 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10624 if (TREE_CODE (arg1
) == INTEGER_CST
10625 && TREE_CODE (arg0
) == MULT_EXPR
10626 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10628 wide_int warg1
= arg1
;
10629 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
10632 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
10634 else if (masked
!= warg1
)
10636 /* Avoid the transform if arg1 is a mask of some
10637 mode which allows further optimizations. */
10638 int pop
= wi::popcount (warg1
);
10639 if (!(pop
>= BITS_PER_UNIT
10640 && exact_log2 (pop
) != -1
10641 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10642 return fold_build2_loc (loc
, code
, type
, op0
,
10643 wide_int_to_tree (type
, masked
));
10647 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10648 ((A & N) + B) & M -> (A + B) & M
10649 Similarly if (N & M) == 0,
10650 ((A | N) + B) & M -> (A + B) & M
10651 and for - instead of + (or unary - instead of +)
10652 and/or ^ instead of |.
10653 If B is constant and (B & M) == 0, fold into A & M. */
10654 if (TREE_CODE (arg1
) == INTEGER_CST
)
10656 wide_int cst1
= arg1
;
10657 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10658 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10659 && (TREE_CODE (arg0
) == PLUS_EXPR
10660 || TREE_CODE (arg0
) == MINUS_EXPR
10661 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10662 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10663 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10669 /* Now we know that arg0 is (C + D) or (C - D) or
10670 -C and arg1 (M) is == (1LL << cst) - 1.
10671 Store C into PMOP[0] and D into PMOP[1]. */
10672 pmop
[0] = TREE_OPERAND (arg0
, 0);
10674 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10676 pmop
[1] = TREE_OPERAND (arg0
, 1);
10680 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10683 for (; which
>= 0; which
--)
10684 switch (TREE_CODE (pmop
[which
]))
10689 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10692 cst0
= TREE_OPERAND (pmop
[which
], 1);
10694 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10699 else if (cst0
!= 0)
10701 /* If C or D is of the form (A & N) where
10702 (N & M) == M, or of the form (A | N) or
10703 (A ^ N) where (N & M) == 0, replace it with A. */
10704 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10707 /* If C or D is a N where (N & M) == 0, it can be
10708 omitted (assumed 0). */
10709 if ((TREE_CODE (arg0
) == PLUS_EXPR
10710 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10711 && (cst1
& pmop
[which
]) == 0)
10712 pmop
[which
] = NULL
;
10718 /* Only build anything new if we optimized one or both arguments
10720 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10721 || (TREE_CODE (arg0
) != NEGATE_EXPR
10722 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10724 tree utype
= TREE_TYPE (arg0
);
10725 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10727 /* Perform the operations in a type that has defined
10728 overflow behavior. */
10729 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10730 if (pmop
[0] != NULL
)
10731 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10732 if (pmop
[1] != NULL
)
10733 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10736 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10737 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10738 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10740 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10741 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10743 else if (pmop
[0] != NULL
)
10745 else if (pmop
[1] != NULL
)
10748 return build_int_cst (type
, 0);
10750 else if (pmop
[0] == NULL
)
10751 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10753 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10755 /* TEM is now the new binary +, - or unary - replacement. */
10756 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10757 fold_convert_loc (loc
, utype
, arg1
));
10758 return fold_convert_loc (loc
, type
, tem
);
10763 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10764 if (t1
!= NULL_TREE
)
10766 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10767 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10768 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10770 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10772 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10775 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10781 /* Don't touch a floating-point divide by zero unless the mode
10782 of the constant can represent infinity. */
10783 if (TREE_CODE (arg1
) == REAL_CST
10784 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10785 && real_zerop (arg1
))
10788 /* (-A) / (-B) -> A / B */
10789 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10790 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10791 TREE_OPERAND (arg0
, 0),
10792 negate_expr (arg1
));
10793 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10794 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10795 negate_expr (arg0
),
10796 TREE_OPERAND (arg1
, 0));
10798 /* Convert A/B/C to A/(B*C). */
10799 if (flag_reciprocal_math
10800 && TREE_CODE (arg0
) == RDIV_EXPR
)
10801 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10802 fold_build2_loc (loc
, MULT_EXPR
, type
,
10803 TREE_OPERAND (arg0
, 1), arg1
));
10805 /* Convert A/(B/C) to (A/B)*C. */
10806 if (flag_reciprocal_math
10807 && TREE_CODE (arg1
) == RDIV_EXPR
)
10808 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10809 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
10810 TREE_OPERAND (arg1
, 0)),
10811 TREE_OPERAND (arg1
, 1));
10813 /* Convert C1/(X*C2) into (C1/C2)/X. */
10814 if (flag_reciprocal_math
10815 && TREE_CODE (arg1
) == MULT_EXPR
10816 && TREE_CODE (arg0
) == REAL_CST
10817 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10819 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10820 TREE_OPERAND (arg1
, 1));
10822 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10823 TREE_OPERAND (arg1
, 0));
10826 if (flag_unsafe_math_optimizations
)
10828 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10829 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10831 /* Optimize sin(x)/cos(x) as tan(x). */
10832 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10833 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10834 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10835 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10836 CALL_EXPR_ARG (arg1
, 0), 0))
10838 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10840 if (tanfn
!= NULL_TREE
)
10841 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10844 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10845 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10846 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10847 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10848 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10849 CALL_EXPR_ARG (arg1
, 0), 0))
10851 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10853 if (tanfn
!= NULL_TREE
)
10855 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
10856 CALL_EXPR_ARG (arg0
, 0));
10857 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10858 build_real (type
, dconst1
), tmp
);
10862 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10863 NaNs or Infinities. */
10864 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10865 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10866 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
10868 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10869 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10871 if (! HONOR_NANS (arg00
)
10872 && ! HONOR_INFINITIES (element_mode (arg00
))
10873 && operand_equal_p (arg00
, arg01
, 0))
10875 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10877 if (cosfn
!= NULL_TREE
)
10878 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
10882 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10883 NaNs or Infinities. */
10884 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
10885 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
10886 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
10888 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10889 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10891 if (! HONOR_NANS (arg00
)
10892 && ! HONOR_INFINITIES (element_mode (arg00
))
10893 && operand_equal_p (arg00
, arg01
, 0))
10895 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10897 if (cosfn
!= NULL_TREE
)
10899 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
10900 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10901 build_real (type
, dconst1
),
10907 /* Optimize pow(x,c)/x as pow(x,c-1). */
10908 if (fcode0
== BUILT_IN_POW
10909 || fcode0
== BUILT_IN_POWF
10910 || fcode0
== BUILT_IN_POWL
)
10912 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10913 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10914 if (TREE_CODE (arg01
) == REAL_CST
10915 && !TREE_OVERFLOW (arg01
)
10916 && operand_equal_p (arg1
, arg00
, 0))
10918 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10922 c
= TREE_REAL_CST (arg01
);
10923 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
10924 arg
= build_real (type
, c
);
10925 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10929 /* Optimize a/root(b/c) into a*root(c/b). */
10930 if (BUILTIN_ROOT_P (fcode1
))
10932 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
10934 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
10936 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10937 tree b
= TREE_OPERAND (rootarg
, 0);
10938 tree c
= TREE_OPERAND (rootarg
, 1);
10940 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
10942 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
10943 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
10947 /* Optimize x/expN(y) into x*expN(-y). */
10948 if (BUILTIN_EXPONENT_P (fcode1
))
10950 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10951 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
10952 arg1
= build_call_expr_loc (loc
,
10954 fold_convert_loc (loc
, type
, arg
));
10955 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
10958 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10959 if (fcode1
== BUILT_IN_POW
10960 || fcode1
== BUILT_IN_POWF
10961 || fcode1
== BUILT_IN_POWL
)
10963 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10964 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10965 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10966 tree neg11
= fold_convert_loc (loc
, type
,
10967 negate_expr (arg11
));
10968 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
10969 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
10974 case TRUNC_DIV_EXPR
:
10975 /* Optimize (X & (-A)) / A where A is a power of 2,
10977 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10978 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
10979 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
10981 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
10982 arg1
, TREE_OPERAND (arg0
, 1));
10983 if (sum
&& integer_zerop (sum
)) {
10984 tree pow2
= build_int_cst (integer_type_node
,
10985 wi::exact_log2 (arg1
));
10986 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10987 TREE_OPERAND (arg0
, 0), pow2
);
10993 case FLOOR_DIV_EXPR
:
10994 /* Simplify A / (B << N) where A and B are positive and B is
10995 a power of 2, to A >> (N + log2(B)). */
10996 strict_overflow_p
= false;
10997 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10998 && (TYPE_UNSIGNED (type
)
10999 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11001 tree sval
= TREE_OPERAND (arg1
, 0);
11002 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11004 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11005 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11006 wi::exact_log2 (sval
));
11008 if (strict_overflow_p
)
11009 fold_overflow_warning (("assuming signed overflow does not "
11010 "occur when simplifying A / (B << N)"),
11011 WARN_STRICT_OVERFLOW_MISC
);
11013 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11015 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11016 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11022 case ROUND_DIV_EXPR
:
11023 case CEIL_DIV_EXPR
:
11024 case EXACT_DIV_EXPR
:
11025 if (integer_zerop (arg1
))
11028 /* Convert -A / -B to A / B when the type is signed and overflow is
11030 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11031 && TREE_CODE (arg0
) == NEGATE_EXPR
11032 && negate_expr_p (arg1
))
11034 if (INTEGRAL_TYPE_P (type
))
11035 fold_overflow_warning (("assuming signed overflow does not occur "
11036 "when distributing negation across "
11038 WARN_STRICT_OVERFLOW_MISC
);
11039 return fold_build2_loc (loc
, code
, type
,
11040 fold_convert_loc (loc
, type
,
11041 TREE_OPERAND (arg0
, 0)),
11042 fold_convert_loc (loc
, type
,
11043 negate_expr (arg1
)));
11045 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11046 && TREE_CODE (arg1
) == NEGATE_EXPR
11047 && negate_expr_p (arg0
))
11049 if (INTEGRAL_TYPE_P (type
))
11050 fold_overflow_warning (("assuming signed overflow does not occur "
11051 "when distributing negation across "
11053 WARN_STRICT_OVERFLOW_MISC
);
11054 return fold_build2_loc (loc
, code
, type
,
11055 fold_convert_loc (loc
, type
,
11056 negate_expr (arg0
)),
11057 fold_convert_loc (loc
, type
,
11058 TREE_OPERAND (arg1
, 0)));
11061 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11062 operation, EXACT_DIV_EXPR.
11064 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11065 At one time others generated faster code, it's not clear if they do
11066 after the last round to changes to the DIV code in expmed.c. */
11067 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11068 && multiple_of_p (type
, arg0
, arg1
))
11069 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11071 strict_overflow_p
= false;
11072 if (TREE_CODE (arg1
) == INTEGER_CST
11073 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11074 &strict_overflow_p
)))
11076 if (strict_overflow_p
)
11077 fold_overflow_warning (("assuming signed overflow does not occur "
11078 "when simplifying division"),
11079 WARN_STRICT_OVERFLOW_MISC
);
11080 return fold_convert_loc (loc
, type
, tem
);
11085 case CEIL_MOD_EXPR
:
11086 case FLOOR_MOD_EXPR
:
11087 case ROUND_MOD_EXPR
:
11088 case TRUNC_MOD_EXPR
:
11089 strict_overflow_p
= false;
11090 if (TREE_CODE (arg1
) == INTEGER_CST
11091 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11092 &strict_overflow_p
)))
11094 if (strict_overflow_p
)
11095 fold_overflow_warning (("assuming signed overflow does not occur "
11096 "when simplifying modulus"),
11097 WARN_STRICT_OVERFLOW_MISC
);
11098 return fold_convert_loc (loc
, type
, tem
);
11107 /* Since negative shift count is not well-defined,
11108 don't try to compute it in the compiler. */
11109 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11112 prec
= element_precision (type
);
11114 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11115 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
11116 && tree_to_uhwi (arg1
) < prec
11117 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11118 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11120 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11121 + tree_to_uhwi (arg1
));
11123 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11124 being well defined. */
11127 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11129 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11130 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
11131 TREE_OPERAND (arg0
, 0));
11136 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11137 build_int_cst (TREE_TYPE (arg1
), low
));
11140 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11141 into x & ((unsigned)-1 >> c) for unsigned types. */
11142 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11143 || (TYPE_UNSIGNED (type
)
11144 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11145 && tree_fits_uhwi_p (arg1
)
11146 && tree_to_uhwi (arg1
) < prec
11147 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11148 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11150 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11151 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
11157 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11159 lshift
= build_minus_one_cst (type
);
11160 lshift
= const_binop (code
, lshift
, arg1
);
11162 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11166 /* If we have a rotate of a bit operation with the rotate count and
11167 the second operand of the bit operation both constant,
11168 permute the two operations. */
11169 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11170 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11171 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11172 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11173 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11174 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11175 fold_build2_loc (loc
, code
, type
,
11176 TREE_OPERAND (arg0
, 0), arg1
),
11177 fold_build2_loc (loc
, code
, type
,
11178 TREE_OPERAND (arg0
, 1), arg1
));
11180 /* Two consecutive rotates adding up to the some integer
11181 multiple of the precision of the type can be ignored. */
11182 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11183 && TREE_CODE (arg0
) == RROTATE_EXPR
11184 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11185 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
11187 return TREE_OPERAND (arg0
, 0);
11192 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11198 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11203 case TRUTH_ANDIF_EXPR
:
11204 /* Note that the operands of this must be ints
11205 and their values must be 0 or 1.
11206 ("true" is a fixed value perhaps depending on the language.) */
11207 /* If first arg is constant zero, return it. */
11208 if (integer_zerop (arg0
))
11209 return fold_convert_loc (loc
, type
, arg0
);
11210 case TRUTH_AND_EXPR
:
11211 /* If either arg is constant true, drop it. */
11212 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11213 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11214 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11215 /* Preserve sequence points. */
11216 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11217 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11218 /* If second arg is constant zero, result is zero, but first arg
11219 must be evaluated. */
11220 if (integer_zerop (arg1
))
11221 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11222 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11223 case will be handled here. */
11224 if (integer_zerop (arg0
))
11225 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11227 /* !X && X is always false. */
11228 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11229 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11230 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11231 /* X && !X is always false. */
11232 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11233 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11234 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11236 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11237 means A >= Y && A != MAX, but in this case we know that
11240 if (!TREE_SIDE_EFFECTS (arg0
)
11241 && !TREE_SIDE_EFFECTS (arg1
))
11243 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11244 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11245 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11247 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11248 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11249 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11252 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11258 case TRUTH_ORIF_EXPR
:
11259 /* Note that the operands of this must be ints
11260 and their values must be 0 or true.
11261 ("true" is a fixed value perhaps depending on the language.) */
11262 /* If first arg is constant true, return it. */
11263 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11264 return fold_convert_loc (loc
, type
, arg0
);
11265 case TRUTH_OR_EXPR
:
11266 /* If either arg is constant zero, drop it. */
11267 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11268 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11269 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11270 /* Preserve sequence points. */
11271 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11272 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11273 /* If second arg is constant true, result is true, but we must
11274 evaluate first arg. */
11275 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11276 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11277 /* Likewise for first arg, but note this only occurs here for
11279 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11280 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11282 /* !X || X is always true. */
11283 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11284 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11285 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11286 /* X || !X is always true. */
11287 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11288 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11289 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11291 /* (X && !Y) || (!X && Y) is X ^ Y */
11292 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
11293 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
11295 tree a0
, a1
, l0
, l1
, n0
, n1
;
11297 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11298 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11300 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11301 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11303 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
11304 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
11306 if ((operand_equal_p (n0
, a0
, 0)
11307 && operand_equal_p (n1
, a1
, 0))
11308 || (operand_equal_p (n0
, a1
, 0)
11309 && operand_equal_p (n1
, a0
, 0)))
11310 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
11313 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11319 case TRUTH_XOR_EXPR
:
11320 /* If the second arg is constant zero, drop it. */
11321 if (integer_zerop (arg1
))
11322 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11323 /* If the second arg is constant true, this is a logical inversion. */
11324 if (integer_onep (arg1
))
11326 tem
= invert_truthvalue_loc (loc
, arg0
);
11327 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
11329 /* Identical arguments cancel to zero. */
11330 if (operand_equal_p (arg0
, arg1
, 0))
11331 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11333 /* !X ^ X is always true. */
11334 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11335 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11336 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11338 /* X ^ !X is always true. */
11339 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11340 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11341 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11350 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11351 if (tem
!= NULL_TREE
)
11354 /* bool_var != 0 becomes bool_var. */
11355 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11356 && code
== NE_EXPR
)
11357 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11359 /* bool_var == 1 becomes bool_var. */
11360 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11361 && code
== EQ_EXPR
)
11362 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11364 /* bool_var != 1 becomes !bool_var. */
11365 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11366 && code
== NE_EXPR
)
11367 return fold_convert_loc (loc
, type
,
11368 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11369 TREE_TYPE (arg0
), arg0
));
11371 /* bool_var == 0 becomes !bool_var. */
11372 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11373 && code
== EQ_EXPR
)
11374 return fold_convert_loc (loc
, type
,
11375 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11376 TREE_TYPE (arg0
), arg0
));
11378 /* !exp != 0 becomes !exp */
11379 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
11380 && code
== NE_EXPR
)
11381 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11383 /* If this is an equality comparison of the address of two non-weak,
11384 unaliased symbols neither of which are extern (since we do not
11385 have access to attributes for externs), then we know the result. */
11386 if (TREE_CODE (arg0
) == ADDR_EXPR
11387 && DECL_P (TREE_OPERAND (arg0
, 0))
11388 && TREE_CODE (arg1
) == ADDR_EXPR
11389 && DECL_P (TREE_OPERAND (arg1
, 0)))
11393 if (decl_in_symtab_p (TREE_OPERAND (arg0
, 0))
11394 && decl_in_symtab_p (TREE_OPERAND (arg1
, 0)))
11395 equal
= symtab_node::get_create (TREE_OPERAND (arg0
, 0))
11396 ->equal_address_to (symtab_node::get_create
11397 (TREE_OPERAND (arg1
, 0)));
11399 equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11401 return constant_boolean_node (equal
11402 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11406 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11407 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11408 && TREE_CODE (arg1
) == INTEGER_CST
11409 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11410 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11411 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11412 fold_convert_loc (loc
,
11415 TREE_OPERAND (arg0
, 1)));
11417 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
11418 if ((TREE_CODE (arg0
) == PLUS_EXPR
11419 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
11420 || TREE_CODE (arg0
) == MINUS_EXPR
)
11421 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
11424 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11425 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11427 tree val
= TREE_OPERAND (arg0
, 1);
11428 return omit_two_operands_loc (loc
, type
,
11429 fold_build2_loc (loc
, code
, type
,
11431 build_int_cst (TREE_TYPE (val
),
11433 TREE_OPERAND (arg0
, 0), arg1
);
11436 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
11437 if (TREE_CODE (arg0
) == MINUS_EXPR
11438 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
11439 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
11442 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
11444 return omit_two_operands_loc (loc
, type
,
11446 ? boolean_true_node
: boolean_false_node
,
11447 TREE_OPERAND (arg0
, 1), arg1
);
11450 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11451 if (TREE_CODE (arg0
) == ABS_EXPR
11452 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11453 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11455 /* If this is an EQ or NE comparison with zero and ARG0 is
11456 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11457 two operations, but the latter can be done in one less insn
11458 on machines that have only two-operand insns or on which a
11459 constant cannot be the first operand. */
11460 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11461 && integer_zerop (arg1
))
11463 tree arg00
= TREE_OPERAND (arg0
, 0);
11464 tree arg01
= TREE_OPERAND (arg0
, 1);
11465 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11466 && integer_onep (TREE_OPERAND (arg00
, 0)))
11468 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
11469 arg01
, TREE_OPERAND (arg00
, 1));
11470 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11471 build_int_cst (TREE_TYPE (arg0
), 1));
11472 return fold_build2_loc (loc
, code
, type
,
11473 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11476 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11477 && integer_onep (TREE_OPERAND (arg01
, 0)))
11479 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
11480 arg00
, TREE_OPERAND (arg01
, 1));
11481 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11482 build_int_cst (TREE_TYPE (arg0
), 1));
11483 return fold_build2_loc (loc
, code
, type
,
11484 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
11489 /* If this is an NE or EQ comparison of zero against the result of a
11490 signed MOD operation whose second operand is a power of 2, make
11491 the MOD operation unsigned since it is simpler and equivalent. */
11492 if (integer_zerop (arg1
)
11493 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11494 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11495 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11496 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11497 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11498 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11500 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
11501 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
11502 fold_convert_loc (loc
, newtype
,
11503 TREE_OPERAND (arg0
, 0)),
11504 fold_convert_loc (loc
, newtype
,
11505 TREE_OPERAND (arg0
, 1)));
11507 return fold_build2_loc (loc
, code
, type
, newmod
,
11508 fold_convert_loc (loc
, newtype
, arg1
));
11511 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11512 C1 is a valid shift constant, and C2 is a power of two, i.e.
11514 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11515 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11516 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11518 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11519 && integer_zerop (arg1
))
11521 tree itype
= TREE_TYPE (arg0
);
11522 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11523 prec
= TYPE_PRECISION (itype
);
11525 /* Check for a valid shift count. */
11526 if (wi::ltu_p (arg001
, prec
))
11528 tree arg01
= TREE_OPERAND (arg0
, 1);
11529 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11530 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11531 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11532 can be rewritten as (X & (C2 << C1)) != 0. */
11533 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11535 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
11536 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
11537 return fold_build2_loc (loc
, code
, type
, tem
,
11538 fold_convert_loc (loc
, itype
, arg1
));
11540 /* Otherwise, for signed (arithmetic) shifts,
11541 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11542 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11543 else if (!TYPE_UNSIGNED (itype
))
11544 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11545 arg000
, build_int_cst (itype
, 0));
11546 /* Otherwise, of unsigned (logical) shifts,
11547 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11548 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11550 return omit_one_operand_loc (loc
, type
,
11551 code
== EQ_EXPR
? integer_one_node
11552 : integer_zero_node
,
11557 /* If we have (A & C) == C where C is a power of 2, convert this into
11558 (A & C) != 0. Similarly for NE_EXPR. */
11559 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11560 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11561 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11562 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11563 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
11564 integer_zero_node
));
11566 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11567 bit, then fold the expression into A < 0 or A >= 0. */
11568 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
11572 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11573 Similarly for NE_EXPR. */
11574 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11575 && TREE_CODE (arg1
) == INTEGER_CST
11576 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11578 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
11579 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11580 TREE_OPERAND (arg0
, 1));
11582 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11583 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
11585 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11586 if (integer_nonzerop (dandnotc
))
11587 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
11590 /* If this is a comparison of a field, we may be able to simplify it. */
11591 if ((TREE_CODE (arg0
) == COMPONENT_REF
11592 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11593 /* Handle the constant case even without -O
11594 to make sure the warnings are given. */
11595 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11597 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
11602 /* Optimize comparisons of strlen vs zero to a compare of the
11603 first character of the string vs zero. To wit,
11604 strlen(ptr) == 0 => *ptr == 0
11605 strlen(ptr) != 0 => *ptr != 0
11606 Other cases should reduce to one of these two (or a constant)
11607 due to the return value of strlen being unsigned. */
11608 if (TREE_CODE (arg0
) == CALL_EXPR
11609 && integer_zerop (arg1
))
11611 tree fndecl
= get_callee_fndecl (arg0
);
11614 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11615 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11616 && call_expr_nargs (arg0
) == 1
11617 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
11619 tree iref
= build_fold_indirect_ref_loc (loc
,
11620 CALL_EXPR_ARG (arg0
, 0));
11621 return fold_build2_loc (loc
, code
, type
, iref
,
11622 build_int_cst (TREE_TYPE (iref
), 0));
11626 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11627 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11628 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11629 && integer_zerop (arg1
)
11630 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11632 tree arg00
= TREE_OPERAND (arg0
, 0);
11633 tree arg01
= TREE_OPERAND (arg0
, 1);
11634 tree itype
= TREE_TYPE (arg00
);
11635 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
11637 if (TYPE_UNSIGNED (itype
))
11639 itype
= signed_type_for (itype
);
11640 arg00
= fold_convert_loc (loc
, itype
, arg00
);
11642 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11643 type
, arg00
, build_zero_cst (itype
));
11647 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11648 (X & C) == 0 when C is a single bit. */
11649 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11650 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11651 && integer_zerop (arg1
)
11652 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11654 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11655 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11656 TREE_OPERAND (arg0
, 1));
11657 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11659 fold_convert_loc (loc
, TREE_TYPE (arg0
),
11663 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11664 constant C is a power of two, i.e. a single bit. */
11665 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11666 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11667 && integer_zerop (arg1
)
11668 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11669 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11670 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11672 tree arg00
= TREE_OPERAND (arg0
, 0);
11673 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11674 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11677 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11678 when is C is a power of two, i.e. a single bit. */
11679 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11680 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11681 && integer_zerop (arg1
)
11682 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11683 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11684 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11686 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11687 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
11688 arg000
, TREE_OPERAND (arg0
, 1));
11689 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11690 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11693 if (integer_zerop (arg1
)
11694 && tree_expr_nonzero_p (arg0
))
11696 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11697 return omit_one_operand_loc (loc
, type
, res
, arg0
);
11700 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11701 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11702 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11704 tree arg00
= TREE_OPERAND (arg0
, 0);
11705 tree arg01
= TREE_OPERAND (arg0
, 1);
11706 tree arg10
= TREE_OPERAND (arg1
, 0);
11707 tree arg11
= TREE_OPERAND (arg1
, 1);
11708 tree itype
= TREE_TYPE (arg0
);
11710 if (operand_equal_p (arg01
, arg11
, 0))
11711 return fold_build2_loc (loc
, code
, type
,
11712 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11713 fold_build2_loc (loc
,
11714 BIT_XOR_EXPR
, itype
,
11717 build_zero_cst (itype
));
11719 if (operand_equal_p (arg01
, arg10
, 0))
11720 return fold_build2_loc (loc
, code
, type
,
11721 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11722 fold_build2_loc (loc
,
11723 BIT_XOR_EXPR
, itype
,
11726 build_zero_cst (itype
));
11728 if (operand_equal_p (arg00
, arg11
, 0))
11729 return fold_build2_loc (loc
, code
, type
,
11730 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11731 fold_build2_loc (loc
,
11732 BIT_XOR_EXPR
, itype
,
11735 build_zero_cst (itype
));
11737 if (operand_equal_p (arg00
, arg10
, 0))
11738 return fold_build2_loc (loc
, code
, type
,
11739 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
11740 fold_build2_loc (loc
,
11741 BIT_XOR_EXPR
, itype
,
11744 build_zero_cst (itype
));
11747 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11748 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11750 tree arg00
= TREE_OPERAND (arg0
, 0);
11751 tree arg01
= TREE_OPERAND (arg0
, 1);
11752 tree arg10
= TREE_OPERAND (arg1
, 0);
11753 tree arg11
= TREE_OPERAND (arg1
, 1);
11754 tree itype
= TREE_TYPE (arg0
);
11756 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11757 operand_equal_p guarantees no side-effects so we don't need
11758 to use omit_one_operand on Z. */
11759 if (operand_equal_p (arg01
, arg11
, 0))
11760 return fold_build2_loc (loc
, code
, type
, arg00
,
11761 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11763 if (operand_equal_p (arg01
, arg10
, 0))
11764 return fold_build2_loc (loc
, code
, type
, arg00
,
11765 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11767 if (operand_equal_p (arg00
, arg11
, 0))
11768 return fold_build2_loc (loc
, code
, type
, arg01
,
11769 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11771 if (operand_equal_p (arg00
, arg10
, 0))
11772 return fold_build2_loc (loc
, code
, type
, arg01
,
11773 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11776 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11777 if (TREE_CODE (arg01
) == INTEGER_CST
11778 && TREE_CODE (arg11
) == INTEGER_CST
)
11780 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
11781 fold_convert_loc (loc
, itype
, arg11
));
11782 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
11783 return fold_build2_loc (loc
, code
, type
, tem
,
11784 fold_convert_loc (loc
, itype
, arg10
));
11788 /* Attempt to simplify equality/inequality comparisons of complex
11789 values. Only lower the comparison if the result is known or
11790 can be simplified to a single scalar comparison. */
11791 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
11792 || TREE_CODE (arg0
) == COMPLEX_CST
)
11793 && (TREE_CODE (arg1
) == COMPLEX_EXPR
11794 || TREE_CODE (arg1
) == COMPLEX_CST
))
11796 tree real0
, imag0
, real1
, imag1
;
11799 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
11801 real0
= TREE_OPERAND (arg0
, 0);
11802 imag0
= TREE_OPERAND (arg0
, 1);
11806 real0
= TREE_REALPART (arg0
);
11807 imag0
= TREE_IMAGPART (arg0
);
11810 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
11812 real1
= TREE_OPERAND (arg1
, 0);
11813 imag1
= TREE_OPERAND (arg1
, 1);
11817 real1
= TREE_REALPART (arg1
);
11818 imag1
= TREE_IMAGPART (arg1
);
11821 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
11822 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
11824 if (integer_zerop (rcond
))
11826 if (code
== EQ_EXPR
)
11827 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11829 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
11833 if (code
== NE_EXPR
)
11834 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11836 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11840 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11841 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11843 if (integer_zerop (icond
))
11845 if (code
== EQ_EXPR
)
11846 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11848 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11852 if (code
== NE_EXPR
)
11853 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11855 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11866 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11867 if (tem
!= NULL_TREE
)
11870 /* Transform comparisons of the form X +- C CMP X. */
11871 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11872 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11873 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11874 && !HONOR_SNANS (arg0
))
11875 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11876 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11878 tree arg01
= TREE_OPERAND (arg0
, 1);
11879 enum tree_code code0
= TREE_CODE (arg0
);
11882 if (TREE_CODE (arg01
) == REAL_CST
)
11883 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11885 is_positive
= tree_int_cst_sgn (arg01
);
11887 /* (X - c) > X becomes false. */
11888 if (code
== GT_EXPR
11889 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11890 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11892 if (TREE_CODE (arg01
) == INTEGER_CST
11893 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11894 fold_overflow_warning (("assuming signed overflow does not "
11895 "occur when assuming that (X - c) > X "
11896 "is always false"),
11897 WARN_STRICT_OVERFLOW_ALL
);
11898 return constant_boolean_node (0, type
);
11901 /* Likewise (X + c) < X becomes false. */
11902 if (code
== LT_EXPR
11903 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11904 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11906 if (TREE_CODE (arg01
) == INTEGER_CST
11907 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11908 fold_overflow_warning (("assuming signed overflow does not "
11909 "occur when assuming that "
11910 "(X + c) < X is always false"),
11911 WARN_STRICT_OVERFLOW_ALL
);
11912 return constant_boolean_node (0, type
);
11915 /* Convert (X - c) <= X to true. */
11916 if (!HONOR_NANS (arg1
)
11918 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11919 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11921 if (TREE_CODE (arg01
) == INTEGER_CST
11922 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11923 fold_overflow_warning (("assuming signed overflow does not "
11924 "occur when assuming that "
11925 "(X - c) <= X is always true"),
11926 WARN_STRICT_OVERFLOW_ALL
);
11927 return constant_boolean_node (1, type
);
11930 /* Convert (X + c) >= X to true. */
11931 if (!HONOR_NANS (arg1
)
11933 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11934 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11936 if (TREE_CODE (arg01
) == INTEGER_CST
11937 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11938 fold_overflow_warning (("assuming signed overflow does not "
11939 "occur when assuming that "
11940 "(X + c) >= X is always true"),
11941 WARN_STRICT_OVERFLOW_ALL
);
11942 return constant_boolean_node (1, type
);
11945 if (TREE_CODE (arg01
) == INTEGER_CST
)
11947 /* Convert X + c > X and X - c < X to true for integers. */
11948 if (code
== GT_EXPR
11949 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11950 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11952 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11953 fold_overflow_warning (("assuming signed overflow does "
11954 "not occur when assuming that "
11955 "(X + c) > X is always true"),
11956 WARN_STRICT_OVERFLOW_ALL
);
11957 return constant_boolean_node (1, type
);
11960 if (code
== LT_EXPR
11961 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11962 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11964 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11965 fold_overflow_warning (("assuming signed overflow does "
11966 "not occur when assuming that "
11967 "(X - c) < X is always true"),
11968 WARN_STRICT_OVERFLOW_ALL
);
11969 return constant_boolean_node (1, type
);
11972 /* Convert X + c <= X and X - c >= X to false for integers. */
11973 if (code
== LE_EXPR
11974 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11975 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11977 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11978 fold_overflow_warning (("assuming signed overflow does "
11979 "not occur when assuming that "
11980 "(X + c) <= X is always false"),
11981 WARN_STRICT_OVERFLOW_ALL
);
11982 return constant_boolean_node (0, type
);
11985 if (code
== GE_EXPR
11986 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11987 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11989 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11990 fold_overflow_warning (("assuming signed overflow does "
11991 "not occur when assuming that "
11992 "(X - c) >= X is always false"),
11993 WARN_STRICT_OVERFLOW_ALL
);
11994 return constant_boolean_node (0, type
);
11999 /* Comparisons with the highest or lowest possible integer of
12000 the specified precision will have known values. */
12002 tree arg1_type
= TREE_TYPE (arg1
);
12003 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12005 if (TREE_CODE (arg1
) == INTEGER_CST
12006 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12008 wide_int max
= wi::max_value (arg1_type
);
12009 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12010 wide_int min
= wi::min_value (arg1_type
);
12012 if (wi::eq_p (arg1
, max
))
12016 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12019 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12022 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12025 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12027 /* The GE_EXPR and LT_EXPR cases above are not normally
12028 reached because of previous transformations. */
12033 else if (wi::eq_p (arg1
, max
- 1))
12037 arg1
= const_binop (PLUS_EXPR
, arg1
,
12038 build_int_cst (TREE_TYPE (arg1
), 1));
12039 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12040 fold_convert_loc (loc
,
12041 TREE_TYPE (arg1
), arg0
),
12044 arg1
= const_binop (PLUS_EXPR
, arg1
,
12045 build_int_cst (TREE_TYPE (arg1
), 1));
12046 return fold_build2_loc (loc
, NE_EXPR
, type
,
12047 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12053 else if (wi::eq_p (arg1
, min
))
12057 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12060 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12063 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12066 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12071 else if (wi::eq_p (arg1
, min
+ 1))
12075 arg1
= const_binop (MINUS_EXPR
, arg1
,
12076 build_int_cst (TREE_TYPE (arg1
), 1));
12077 return fold_build2_loc (loc
, NE_EXPR
, type
,
12078 fold_convert_loc (loc
,
12079 TREE_TYPE (arg1
), arg0
),
12082 arg1
= const_binop (MINUS_EXPR
, arg1
,
12083 build_int_cst (TREE_TYPE (arg1
), 1));
12084 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12085 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12092 else if (wi::eq_p (arg1
, signed_max
)
12093 && TYPE_UNSIGNED (arg1_type
)
12094 /* We will flip the signedness of the comparison operator
12095 associated with the mode of arg1, so the sign bit is
12096 specified by this mode. Check that arg1 is the signed
12097 max associated with this sign bit. */
12098 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
12099 /* signed_type does not work on pointer types. */
12100 && INTEGRAL_TYPE_P (arg1_type
))
12102 /* The following case also applies to X < signed_max+1
12103 and X >= signed_max+1 because previous transformations. */
12104 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12106 tree st
= signed_type_for (arg1_type
);
12107 return fold_build2_loc (loc
,
12108 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12109 type
, fold_convert_loc (loc
, st
, arg0
),
12110 build_int_cst (st
, 0));
12116 /* If we are comparing an ABS_EXPR with a constant, we can
12117 convert all the cases into explicit comparisons, but they may
12118 well not be faster than doing the ABS and one comparison.
12119 But ABS (X) <= C is a range comparison, which becomes a subtraction
12120 and a comparison, and is probably faster. */
12121 if (code
== LE_EXPR
12122 && TREE_CODE (arg1
) == INTEGER_CST
12123 && TREE_CODE (arg0
) == ABS_EXPR
12124 && ! TREE_SIDE_EFFECTS (arg0
)
12125 && (0 != (tem
= negate_expr (arg1
)))
12126 && TREE_CODE (tem
) == INTEGER_CST
12127 && !TREE_OVERFLOW (tem
))
12128 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12129 build2 (GE_EXPR
, type
,
12130 TREE_OPERAND (arg0
, 0), tem
),
12131 build2 (LE_EXPR
, type
,
12132 TREE_OPERAND (arg0
, 0), arg1
));
12134 /* Convert ABS_EXPR<x> >= 0 to true. */
12135 strict_overflow_p
= false;
12136 if (code
== GE_EXPR
12137 && (integer_zerop (arg1
)
12138 || (! HONOR_NANS (arg0
)
12139 && real_zerop (arg1
)))
12140 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12142 if (strict_overflow_p
)
12143 fold_overflow_warning (("assuming signed overflow does not occur "
12144 "when simplifying comparison of "
12145 "absolute value and zero"),
12146 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12147 return omit_one_operand_loc (loc
, type
,
12148 constant_boolean_node (true, type
),
12152 /* Convert ABS_EXPR<x> < 0 to false. */
12153 strict_overflow_p
= false;
12154 if (code
== LT_EXPR
12155 && (integer_zerop (arg1
) || real_zerop (arg1
))
12156 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12158 if (strict_overflow_p
)
12159 fold_overflow_warning (("assuming signed overflow does not occur "
12160 "when simplifying comparison of "
12161 "absolute value and zero"),
12162 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12163 return omit_one_operand_loc (loc
, type
,
12164 constant_boolean_node (false, type
),
12168 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12169 and similarly for >= into !=. */
12170 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12171 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12172 && TREE_CODE (arg1
) == LSHIFT_EXPR
12173 && integer_onep (TREE_OPERAND (arg1
, 0)))
12174 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12175 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12176 TREE_OPERAND (arg1
, 1)),
12177 build_zero_cst (TREE_TYPE (arg0
)));
12179 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12180 otherwise Y might be >= # of bits in X's type and thus e.g.
12181 (unsigned char) (1 << Y) for Y 15 might be 0.
12182 If the cast is widening, then 1 << Y should have unsigned type,
12183 otherwise if Y is number of bits in the signed shift type minus 1,
12184 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12185 31 might be 0xffffffff80000000. */
12186 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12187 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12188 && CONVERT_EXPR_P (arg1
)
12189 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12190 && (element_precision (TREE_TYPE (arg1
))
12191 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12192 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12193 || (element_precision (TREE_TYPE (arg1
))
12194 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12195 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12197 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12198 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12199 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12200 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
12201 build_zero_cst (TREE_TYPE (arg0
)));
12206 case UNORDERED_EXPR
:
12214 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12216 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12217 if (t1
!= NULL_TREE
)
12221 /* If the first operand is NaN, the result is constant. */
12222 if (TREE_CODE (arg0
) == REAL_CST
12223 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12224 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12226 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12227 ? integer_zero_node
12228 : integer_one_node
;
12229 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
12232 /* If the second operand is NaN, the result is constant. */
12233 if (TREE_CODE (arg1
) == REAL_CST
12234 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12235 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12237 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12238 ? integer_zero_node
12239 : integer_one_node
;
12240 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
12243 /* Simplify unordered comparison of something with itself. */
12244 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12245 && operand_equal_p (arg0
, arg1
, 0))
12246 return constant_boolean_node (1, type
);
12248 if (code
== LTGT_EXPR
12249 && !flag_trapping_math
12250 && operand_equal_p (arg0
, arg1
, 0))
12251 return constant_boolean_node (0, type
);
12253 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12255 tree targ0
= strip_float_extensions (arg0
);
12256 tree targ1
= strip_float_extensions (arg1
);
12257 tree newtype
= TREE_TYPE (targ0
);
12259 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12260 newtype
= TREE_TYPE (targ1
);
12262 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12263 return fold_build2_loc (loc
, code
, type
,
12264 fold_convert_loc (loc
, newtype
, targ0
),
12265 fold_convert_loc (loc
, newtype
, targ1
));
12270 case COMPOUND_EXPR
:
12271 /* When pedantic, a compound expression can be neither an lvalue
12272 nor an integer constant expression. */
12273 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12275 /* Don't let (0, 0) be null pointer constant. */
12276 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12277 : fold_convert_loc (loc
, type
, arg1
);
12278 return pedantic_non_lvalue_loc (loc
, tem
);
12281 /* An ASSERT_EXPR should never be passed to fold_binary. */
12282 gcc_unreachable ();
12286 } /* switch (code) */
12289 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12290 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
12294 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
12296 switch (TREE_CODE (*tp
))
12302 *walk_subtrees
= 0;
12304 /* ... fall through ... */
12311 /* Return whether the sub-tree ST contains a label which is accessible from
12312 outside the sub-tree. */
12315 contains_label_p (tree st
)
12318 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
12321 /* Fold a ternary expression of code CODE and type TYPE with operands
12322 OP0, OP1, and OP2. Return the folded expression if folding is
12323 successful. Otherwise, return NULL_TREE. */
12326 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
12327 tree op0
, tree op1
, tree op2
)
12330 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
12331 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12333 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12334 && TREE_CODE_LENGTH (code
) == 3);
12336 /* If this is a commutative operation, and OP0 is a constant, move it
12337 to OP1 to reduce the number of tests below. */
12338 if (commutative_ternary_tree_code (code
)
12339 && tree_swap_operands_p (op0
, op1
, true))
12340 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
12342 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
12346 /* Strip any conversions that don't change the mode. This is safe
12347 for every expression, except for a comparison expression because
12348 its signedness is derived from its operands. So, in the latter
12349 case, only strip conversions that don't change the signedness.
12351 Note that this is done as an internal manipulation within the
12352 constant folder, in order to find the simplest representation of
12353 the arguments so that their form can be studied. In any cases,
12354 the appropriate type conversions should be put back in the tree
12355 that will get out of the constant folder. */
12376 case COMPONENT_REF
:
12377 if (TREE_CODE (arg0
) == CONSTRUCTOR
12378 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12380 unsigned HOST_WIDE_INT idx
;
12382 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12389 case VEC_COND_EXPR
:
12390 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12391 so all simple results must be passed through pedantic_non_lvalue. */
12392 if (TREE_CODE (arg0
) == INTEGER_CST
)
12394 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12395 tem
= integer_zerop (arg0
) ? op2
: op1
;
12396 /* Only optimize constant conditions when the selected branch
12397 has the same type as the COND_EXPR. This avoids optimizing
12398 away "c ? x : throw", where the throw has a void type.
12399 Avoid throwing away that operand which contains label. */
12400 if ((!TREE_SIDE_EFFECTS (unused_op
)
12401 || !contains_label_p (unused_op
))
12402 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12403 || VOID_TYPE_P (type
)))
12404 return pedantic_non_lvalue_loc (loc
, tem
);
12407 else if (TREE_CODE (arg0
) == VECTOR_CST
)
12409 if ((TREE_CODE (arg1
) == VECTOR_CST
12410 || TREE_CODE (arg1
) == CONSTRUCTOR
)
12411 && (TREE_CODE (arg2
) == VECTOR_CST
12412 || TREE_CODE (arg2
) == CONSTRUCTOR
))
12414 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
12415 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
12416 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
12417 for (i
= 0; i
< nelts
; i
++)
12419 tree val
= VECTOR_CST_ELT (arg0
, i
);
12420 if (integer_all_onesp (val
))
12422 else if (integer_zerop (val
))
12423 sel
[i
] = nelts
+ i
;
12424 else /* Currently unreachable. */
12427 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
12428 if (t
!= NULL_TREE
)
12433 /* If we have A op B ? A : C, we may be able to convert this to a
12434 simpler expression, depending on the operation and the values
12435 of B and C. Signed zeros prevent all of these transformations,
12436 for reasons given above each one.
12438 Also try swapping the arguments and inverting the conditional. */
12439 if (COMPARISON_CLASS_P (arg0
)
12440 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12441 arg1
, TREE_OPERAND (arg0
, 1))
12442 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
12444 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
12449 if (COMPARISON_CLASS_P (arg0
)
12450 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12452 TREE_OPERAND (arg0
, 1))
12453 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
12455 location_t loc0
= expr_location_or (arg0
, loc
);
12456 tem
= fold_invert_truthvalue (loc0
, arg0
);
12457 if (tem
&& COMPARISON_CLASS_P (tem
))
12459 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
12465 /* If the second operand is simpler than the third, swap them
12466 since that produces better jump optimization results. */
12467 if (truth_value_p (TREE_CODE (arg0
))
12468 && tree_swap_operands_p (op1
, op2
, false))
12470 location_t loc0
= expr_location_or (arg0
, loc
);
12471 /* See if this can be inverted. If it can't, possibly because
12472 it was a floating-point inequality comparison, don't do
12474 tem
= fold_invert_truthvalue (loc0
, arg0
);
12476 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
12479 /* Convert A ? 1 : 0 to simply A. */
12480 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
12481 : (integer_onep (op1
)
12482 && !VECTOR_TYPE_P (type
)))
12483 && integer_zerop (op2
)
12484 /* If we try to convert OP0 to our type, the
12485 call to fold will try to move the conversion inside
12486 a COND, which will recurse. In that case, the COND_EXPR
12487 is probably the best choice, so leave it alone. */
12488 && type
== TREE_TYPE (arg0
))
12489 return pedantic_non_lvalue_loc (loc
, arg0
);
12491 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12492 over COND_EXPR in cases such as floating point comparisons. */
12493 if (integer_zerop (op1
)
12494 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
12495 : (integer_onep (op2
)
12496 && !VECTOR_TYPE_P (type
)))
12497 && truth_value_p (TREE_CODE (arg0
)))
12498 return pedantic_non_lvalue_loc (loc
,
12499 fold_convert_loc (loc
, type
,
12500 invert_truthvalue_loc (loc
,
12503 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12504 if (TREE_CODE (arg0
) == LT_EXPR
12505 && integer_zerop (TREE_OPERAND (arg0
, 1))
12506 && integer_zerop (op2
)
12507 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12509 /* sign_bit_p looks through both zero and sign extensions,
12510 but for this optimization only sign extensions are
12512 tree tem2
= TREE_OPERAND (arg0
, 0);
12513 while (tem
!= tem2
)
12515 if (TREE_CODE (tem2
) != NOP_EXPR
12516 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
12521 tem2
= TREE_OPERAND (tem2
, 0);
12523 /* sign_bit_p only checks ARG1 bits within A's precision.
12524 If <sign bit of A> has wider type than A, bits outside
12525 of A's precision in <sign bit of A> need to be checked.
12526 If they are all 0, this optimization needs to be done
12527 in unsigned A's type, if they are all 1 in signed A's type,
12528 otherwise this can't be done. */
12530 && TYPE_PRECISION (TREE_TYPE (tem
))
12531 < TYPE_PRECISION (TREE_TYPE (arg1
))
12532 && TYPE_PRECISION (TREE_TYPE (tem
))
12533 < TYPE_PRECISION (type
))
12535 int inner_width
, outer_width
;
12538 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12539 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12540 if (outer_width
> TYPE_PRECISION (type
))
12541 outer_width
= TYPE_PRECISION (type
);
12543 wide_int mask
= wi::shifted_mask
12544 (inner_width
, outer_width
- inner_width
, false,
12545 TYPE_PRECISION (TREE_TYPE (arg1
)));
12547 wide_int common
= mask
& arg1
;
12548 if (common
== mask
)
12550 tem_type
= signed_type_for (TREE_TYPE (tem
));
12551 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12553 else if (common
== 0)
12555 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12556 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12564 fold_convert_loc (loc
, type
,
12565 fold_build2_loc (loc
, BIT_AND_EXPR
,
12566 TREE_TYPE (tem
), tem
,
12567 fold_convert_loc (loc
,
12572 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12573 already handled above. */
12574 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12575 && integer_onep (TREE_OPERAND (arg0
, 1))
12576 && integer_zerop (op2
)
12577 && integer_pow2p (arg1
))
12579 tree tem
= TREE_OPERAND (arg0
, 0);
12581 if (TREE_CODE (tem
) == RSHIFT_EXPR
12582 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
12583 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
12584 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
12585 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12586 TREE_OPERAND (tem
, 0), arg1
);
12589 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12590 is probably obsolete because the first operand should be a
12591 truth value (that's why we have the two cases above), but let's
12592 leave it in until we can confirm this for all front-ends. */
12593 if (integer_zerop (op2
)
12594 && TREE_CODE (arg0
) == NE_EXPR
12595 && integer_zerop (TREE_OPERAND (arg0
, 1))
12596 && integer_pow2p (arg1
)
12597 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12599 arg1
, OEP_ONLY_CONST
))
12600 return pedantic_non_lvalue_loc (loc
,
12601 fold_convert_loc (loc
, type
,
12602 TREE_OPERAND (arg0
, 0)));
12604 /* Disable the transformations below for vectors, since
12605 fold_binary_op_with_conditional_arg may undo them immediately,
12606 yielding an infinite loop. */
12607 if (code
== VEC_COND_EXPR
)
12610 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12611 if (integer_zerop (op2
)
12612 && truth_value_p (TREE_CODE (arg0
))
12613 && truth_value_p (TREE_CODE (arg1
))
12614 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12615 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
12616 : TRUTH_ANDIF_EXPR
,
12617 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
12619 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12620 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
12621 && truth_value_p (TREE_CODE (arg0
))
12622 && truth_value_p (TREE_CODE (arg1
))
12623 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12625 location_t loc0
= expr_location_or (arg0
, loc
);
12626 /* Only perform transformation if ARG0 is easily inverted. */
12627 tem
= fold_invert_truthvalue (loc0
, arg0
);
12629 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12632 type
, fold_convert_loc (loc
, type
, tem
),
12636 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12637 if (integer_zerop (arg1
)
12638 && truth_value_p (TREE_CODE (arg0
))
12639 && truth_value_p (TREE_CODE (op2
))
12640 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12642 location_t loc0
= expr_location_or (arg0
, loc
);
12643 /* Only perform transformation if ARG0 is easily inverted. */
12644 tem
= fold_invert_truthvalue (loc0
, arg0
);
12646 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12647 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
12648 type
, fold_convert_loc (loc
, type
, tem
),
12652 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12653 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
12654 && truth_value_p (TREE_CODE (arg0
))
12655 && truth_value_p (TREE_CODE (op2
))
12656 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12657 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12658 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
12659 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
12664 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12665 of fold_ternary on them. */
12666 gcc_unreachable ();
12668 case BIT_FIELD_REF
:
12669 if ((TREE_CODE (arg0
) == VECTOR_CST
12670 || (TREE_CODE (arg0
) == CONSTRUCTOR
12671 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
12672 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
12673 || (TREE_CODE (type
) == VECTOR_TYPE
12674 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
12676 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
12677 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
12678 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
12679 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
12682 && (idx
% width
) == 0
12683 && (n
% width
) == 0
12684 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
12689 if (TREE_CODE (arg0
) == VECTOR_CST
)
12692 return VECTOR_CST_ELT (arg0
, idx
);
12694 tree
*vals
= XALLOCAVEC (tree
, n
);
12695 for (unsigned i
= 0; i
< n
; ++i
)
12696 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
12697 return build_vector (type
, vals
);
12700 /* Constructor elements can be subvectors. */
12701 unsigned HOST_WIDE_INT k
= 1;
12702 if (CONSTRUCTOR_NELTS (arg0
) != 0)
12704 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
12705 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
12706 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
12709 /* We keep an exact subset of the constructor elements. */
12710 if ((idx
% k
) == 0 && (n
% k
) == 0)
12712 if (CONSTRUCTOR_NELTS (arg0
) == 0)
12713 return build_constructor (type
, NULL
);
12718 if (idx
< CONSTRUCTOR_NELTS (arg0
))
12719 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
12720 return build_zero_cst (type
);
12723 vec
<constructor_elt
, va_gc
> *vals
;
12724 vec_alloc (vals
, n
);
12725 for (unsigned i
= 0;
12726 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
12728 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
12730 (arg0
, idx
+ i
)->value
);
12731 return build_constructor (type
, vals
);
12733 /* The bitfield references a single constructor element. */
12734 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
12736 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
12737 return build_zero_cst (type
);
12739 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
12741 return fold_build3_loc (loc
, code
, type
,
12742 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
12743 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
12748 /* A bit-field-ref that referenced the full argument can be stripped. */
12749 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12750 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
12751 && integer_zerop (op2
))
12752 return fold_convert_loc (loc
, type
, arg0
);
12754 /* On constants we can use native encode/interpret to constant
12755 fold (nearly) all BIT_FIELD_REFs. */
12756 if (CONSTANT_CLASS_P (arg0
)
12757 && can_native_interpret_type_p (type
)
12758 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
12759 /* This limitation should not be necessary, we just need to
12760 round this up to mode size. */
12761 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
12762 /* Need bit-shifting of the buffer to relax the following. */
12763 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
12765 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12766 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
12767 unsigned HOST_WIDE_INT clen
;
12768 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
12769 /* ??? We cannot tell native_encode_expr to start at
12770 some random byte only. So limit us to a reasonable amount
12774 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
12775 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
12777 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
12779 tree v
= native_interpret_expr (type
,
12780 b
+ bitpos
/ BITS_PER_UNIT
,
12781 bitsize
/ BITS_PER_UNIT
);
12791 /* For integers we can decompose the FMA if possible. */
12792 if (TREE_CODE (arg0
) == INTEGER_CST
12793 && TREE_CODE (arg1
) == INTEGER_CST
)
12794 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
12795 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
12796 if (integer_zerop (arg2
))
12797 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12799 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
12801 case VEC_PERM_EXPR
:
12802 if (TREE_CODE (arg2
) == VECTOR_CST
)
12804 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
12805 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
12806 unsigned char *sel2
= sel
+ nelts
;
12807 bool need_mask_canon
= false;
12808 bool need_mask_canon2
= false;
12809 bool all_in_vec0
= true;
12810 bool all_in_vec1
= true;
12811 bool maybe_identity
= true;
12812 bool single_arg
= (op0
== op1
);
12813 bool changed
= false;
12815 mask2
= 2 * nelts
- 1;
12816 mask
= single_arg
? (nelts
- 1) : mask2
;
12817 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
12818 for (i
= 0; i
< nelts
; i
++)
12820 tree val
= VECTOR_CST_ELT (arg2
, i
);
12821 if (TREE_CODE (val
) != INTEGER_CST
)
12824 /* Make sure that the perm value is in an acceptable
12827 need_mask_canon
|= wi::gtu_p (t
, mask
);
12828 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
12829 sel
[i
] = t
.to_uhwi () & mask
;
12830 sel2
[i
] = t
.to_uhwi () & mask2
;
12832 if (sel
[i
] < nelts
)
12833 all_in_vec1
= false;
12835 all_in_vec0
= false;
12837 if ((sel
[i
] & (nelts
-1)) != i
)
12838 maybe_identity
= false;
12841 if (maybe_identity
)
12851 else if (all_in_vec1
)
12854 for (i
= 0; i
< nelts
; i
++)
12856 need_mask_canon
= true;
12859 if ((TREE_CODE (op0
) == VECTOR_CST
12860 || TREE_CODE (op0
) == CONSTRUCTOR
)
12861 && (TREE_CODE (op1
) == VECTOR_CST
12862 || TREE_CODE (op1
) == CONSTRUCTOR
))
12864 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
12865 if (t
!= NULL_TREE
)
12869 if (op0
== op1
&& !single_arg
)
12872 /* Some targets are deficient and fail to expand a single
12873 argument permutation while still allowing an equivalent
12874 2-argument version. */
12875 if (need_mask_canon
&& arg2
== op2
12876 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
12877 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
12879 need_mask_canon
= need_mask_canon2
;
12883 if (need_mask_canon
&& arg2
== op2
)
12885 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
12886 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
12887 for (i
= 0; i
< nelts
; i
++)
12888 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
12889 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
12894 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
12900 } /* switch (code) */
12903 /* Perform constant folding and related simplification of EXPR.
12904 The related simplifications include x*1 => x, x*0 => 0, etc.,
12905 and application of the associative law.
12906 NOP_EXPR conversions may be removed freely (as long as we
12907 are careful not to change the type of the overall expression).
12908 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12909 but we can constant-fold them if they have constant operands. */
12911 #ifdef ENABLE_FOLD_CHECKING
12912 # define fold(x) fold_1 (x)
12913 static tree
fold_1 (tree
);
12919 const tree t
= expr
;
12920 enum tree_code code
= TREE_CODE (t
);
12921 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12923 location_t loc
= EXPR_LOCATION (expr
);
12925 /* Return right away if a constant. */
12926 if (kind
== tcc_constant
)
12929 /* CALL_EXPR-like objects with variable numbers of operands are
12930 treated specially. */
12931 if (kind
== tcc_vl_exp
)
12933 if (code
== CALL_EXPR
)
12935 tem
= fold_call_expr (loc
, expr
, false);
12936 return tem
? tem
: expr
;
12941 if (IS_EXPR_CODE_CLASS (kind
))
12943 tree type
= TREE_TYPE (t
);
12944 tree op0
, op1
, op2
;
12946 switch (TREE_CODE_LENGTH (code
))
12949 op0
= TREE_OPERAND (t
, 0);
12950 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12951 return tem
? tem
: expr
;
12953 op0
= TREE_OPERAND (t
, 0);
12954 op1
= TREE_OPERAND (t
, 1);
12955 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12956 return tem
? tem
: expr
;
12958 op0
= TREE_OPERAND (t
, 0);
12959 op1
= TREE_OPERAND (t
, 1);
12960 op2
= TREE_OPERAND (t
, 2);
12961 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12962 return tem
? tem
: expr
;
12972 tree op0
= TREE_OPERAND (t
, 0);
12973 tree op1
= TREE_OPERAND (t
, 1);
12975 if (TREE_CODE (op1
) == INTEGER_CST
12976 && TREE_CODE (op0
) == CONSTRUCTOR
12977 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
12979 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
12980 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
12981 unsigned HOST_WIDE_INT begin
= 0;
12983 /* Find a matching index by means of a binary search. */
12984 while (begin
!= end
)
12986 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
12987 tree index
= (*elts
)[middle
].index
;
12989 if (TREE_CODE (index
) == INTEGER_CST
12990 && tree_int_cst_lt (index
, op1
))
12991 begin
= middle
+ 1;
12992 else if (TREE_CODE (index
) == INTEGER_CST
12993 && tree_int_cst_lt (op1
, index
))
12995 else if (TREE_CODE (index
) == RANGE_EXPR
12996 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
12997 begin
= middle
+ 1;
12998 else if (TREE_CODE (index
) == RANGE_EXPR
12999 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13002 return (*elts
)[middle
].value
;
13009 /* Return a VECTOR_CST if possible. */
13012 tree type
= TREE_TYPE (t
);
13013 if (TREE_CODE (type
) != VECTOR_TYPE
)
13016 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13017 unsigned HOST_WIDE_INT idx
, pos
= 0;
13020 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13022 if (!CONSTANT_CLASS_P (value
))
13024 if (TREE_CODE (value
) == VECTOR_CST
)
13026 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
13027 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
13030 vec
[pos
++] = value
;
13032 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
13033 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
13035 return build_vector (type
, vec
);
13039 return fold (DECL_INITIAL (t
));
13043 } /* switch (code) */
13046 #ifdef ENABLE_FOLD_CHECKING
13049 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13050 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
13051 static void fold_check_failed (const_tree
, const_tree
);
13052 void print_fold_checksum (const_tree
);
13054 /* When --enable-checking=fold, compute a digest of expr before
13055 and after actual fold call to see if fold did not accidentally
13056 change original expr. */
13062 struct md5_ctx ctx
;
13063 unsigned char checksum_before
[16], checksum_after
[16];
13064 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13066 md5_init_ctx (&ctx
);
13067 fold_checksum_tree (expr
, &ctx
, &ht
);
13068 md5_finish_ctx (&ctx
, checksum_before
);
13071 ret
= fold_1 (expr
);
13073 md5_init_ctx (&ctx
);
13074 fold_checksum_tree (expr
, &ctx
, &ht
);
13075 md5_finish_ctx (&ctx
, checksum_after
);
13077 if (memcmp (checksum_before
, checksum_after
, 16))
13078 fold_check_failed (expr
, ret
);
13084 print_fold_checksum (const_tree expr
)
13086 struct md5_ctx ctx
;
13087 unsigned char checksum
[16], cnt
;
13088 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13090 md5_init_ctx (&ctx
);
13091 fold_checksum_tree (expr
, &ctx
, &ht
);
13092 md5_finish_ctx (&ctx
, checksum
);
13093 for (cnt
= 0; cnt
< 16; ++cnt
)
13094 fprintf (stderr
, "%02x", checksum
[cnt
]);
13095 putc ('\n', stderr
);
13099 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13101 internal_error ("fold check: original tree changed by fold");
13105 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
13106 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
13108 const tree_node
**slot
;
13109 enum tree_code code
;
13110 union tree_node buf
;
13116 slot
= ht
->find_slot (expr
, INSERT
);
13120 code
= TREE_CODE (expr
);
13121 if (TREE_CODE_CLASS (code
) == tcc_declaration
13122 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
13124 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13125 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13126 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13127 buf
.decl_with_vis
.symtab_node
= NULL
;
13128 expr
= (tree
) &buf
;
13130 else if (TREE_CODE_CLASS (code
) == tcc_type
13131 && (TYPE_POINTER_TO (expr
)
13132 || TYPE_REFERENCE_TO (expr
)
13133 || TYPE_CACHED_VALUES_P (expr
)
13134 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13135 || TYPE_NEXT_VARIANT (expr
)))
13137 /* Allow these fields to be modified. */
13139 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13140 expr
= tmp
= (tree
) &buf
;
13141 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13142 TYPE_POINTER_TO (tmp
) = NULL
;
13143 TYPE_REFERENCE_TO (tmp
) = NULL
;
13144 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13145 if (TYPE_CACHED_VALUES_P (tmp
))
13147 TYPE_CACHED_VALUES_P (tmp
) = 0;
13148 TYPE_CACHED_VALUES (tmp
) = NULL
;
13151 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13152 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
13153 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13154 if (TREE_CODE_CLASS (code
) != tcc_type
13155 && TREE_CODE_CLASS (code
) != tcc_declaration
13156 && code
!= TREE_LIST
13157 && code
!= SSA_NAME
13158 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13159 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13160 switch (TREE_CODE_CLASS (code
))
13166 md5_process_bytes (TREE_STRING_POINTER (expr
),
13167 TREE_STRING_LENGTH (expr
), ctx
);
13170 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13171 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13174 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
13175 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
13181 case tcc_exceptional
:
13185 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13186 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13187 expr
= TREE_CHAIN (expr
);
13188 goto recursive_label
;
13191 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13192 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13198 case tcc_expression
:
13199 case tcc_reference
:
13200 case tcc_comparison
:
13203 case tcc_statement
:
13205 len
= TREE_OPERAND_LENGTH (expr
);
13206 for (i
= 0; i
< len
; ++i
)
13207 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13209 case tcc_declaration
:
13210 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13211 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13212 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13214 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13215 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13216 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13217 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13218 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13221 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13223 if (TREE_CODE (expr
) == FUNCTION_DECL
)
13225 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13226 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
13228 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13232 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13233 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13234 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13235 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13236 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13237 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13238 if (INTEGRAL_TYPE_P (expr
)
13239 || SCALAR_FLOAT_TYPE_P (expr
))
13241 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13242 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13244 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13245 if (TREE_CODE (expr
) == RECORD_TYPE
13246 || TREE_CODE (expr
) == UNION_TYPE
13247 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13248 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13249 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13256 /* Helper function for outputting the checksum of a tree T. When
13257 debugging with gdb, you can "define mynext" to be "next" followed
13258 by "call debug_fold_checksum (op0)", then just trace down till the
13261 DEBUG_FUNCTION
void
13262 debug_fold_checksum (const_tree t
)
13265 unsigned char checksum
[16];
13266 struct md5_ctx ctx
;
13267 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13269 md5_init_ctx (&ctx
);
13270 fold_checksum_tree (t
, &ctx
, &ht
);
13271 md5_finish_ctx (&ctx
, checksum
);
13274 for (i
= 0; i
< 16; i
++)
13275 fprintf (stderr
, "%d ", checksum
[i
]);
13277 fprintf (stderr
, "\n");
13282 /* Fold a unary tree expression with code CODE of type TYPE with an
13283 operand OP0. LOC is the location of the resulting expression.
13284 Return a folded expression if successful. Otherwise, return a tree
13285 expression with code CODE of type TYPE with an operand OP0. */
13288 fold_build1_stat_loc (location_t loc
,
13289 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13292 #ifdef ENABLE_FOLD_CHECKING
13293 unsigned char checksum_before
[16], checksum_after
[16];
13294 struct md5_ctx ctx
;
13295 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13297 md5_init_ctx (&ctx
);
13298 fold_checksum_tree (op0
, &ctx
, &ht
);
13299 md5_finish_ctx (&ctx
, checksum_before
);
13303 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13305 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
13307 #ifdef ENABLE_FOLD_CHECKING
13308 md5_init_ctx (&ctx
);
13309 fold_checksum_tree (op0
, &ctx
, &ht
);
13310 md5_finish_ctx (&ctx
, checksum_after
);
13312 if (memcmp (checksum_before
, checksum_after
, 16))
13313 fold_check_failed (op0
, tem
);
13318 /* Fold a binary tree expression with code CODE of type TYPE with
13319 operands OP0 and OP1. LOC is the location of the resulting
13320 expression. Return a folded expression if successful. Otherwise,
13321 return a tree expression with code CODE of type TYPE with operands
13325 fold_build2_stat_loc (location_t loc
,
13326 enum tree_code code
, tree type
, tree op0
, tree op1
13330 #ifdef ENABLE_FOLD_CHECKING
13331 unsigned char checksum_before_op0
[16],
13332 checksum_before_op1
[16],
13333 checksum_after_op0
[16],
13334 checksum_after_op1
[16];
13335 struct md5_ctx ctx
;
13336 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13338 md5_init_ctx (&ctx
);
13339 fold_checksum_tree (op0
, &ctx
, &ht
);
13340 md5_finish_ctx (&ctx
, checksum_before_op0
);
13343 md5_init_ctx (&ctx
);
13344 fold_checksum_tree (op1
, &ctx
, &ht
);
13345 md5_finish_ctx (&ctx
, checksum_before_op1
);
13349 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13351 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
13353 #ifdef ENABLE_FOLD_CHECKING
13354 md5_init_ctx (&ctx
);
13355 fold_checksum_tree (op0
, &ctx
, &ht
);
13356 md5_finish_ctx (&ctx
, checksum_after_op0
);
13359 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13360 fold_check_failed (op0
, tem
);
13362 md5_init_ctx (&ctx
);
13363 fold_checksum_tree (op1
, &ctx
, &ht
);
13364 md5_finish_ctx (&ctx
, checksum_after_op1
);
13366 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13367 fold_check_failed (op1
, tem
);
13372 /* Fold a ternary tree expression with code CODE of type TYPE with
13373 operands OP0, OP1, and OP2. Return a folded expression if
13374 successful. Otherwise, return a tree expression with code CODE of
13375 type TYPE with operands OP0, OP1, and OP2. */
13378 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
13379 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13382 #ifdef ENABLE_FOLD_CHECKING
13383 unsigned char checksum_before_op0
[16],
13384 checksum_before_op1
[16],
13385 checksum_before_op2
[16],
13386 checksum_after_op0
[16],
13387 checksum_after_op1
[16],
13388 checksum_after_op2
[16];
13389 struct md5_ctx ctx
;
13390 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13392 md5_init_ctx (&ctx
);
13393 fold_checksum_tree (op0
, &ctx
, &ht
);
13394 md5_finish_ctx (&ctx
, checksum_before_op0
);
13397 md5_init_ctx (&ctx
);
13398 fold_checksum_tree (op1
, &ctx
, &ht
);
13399 md5_finish_ctx (&ctx
, checksum_before_op1
);
13402 md5_init_ctx (&ctx
);
13403 fold_checksum_tree (op2
, &ctx
, &ht
);
13404 md5_finish_ctx (&ctx
, checksum_before_op2
);
13408 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13409 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13411 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13413 #ifdef ENABLE_FOLD_CHECKING
13414 md5_init_ctx (&ctx
);
13415 fold_checksum_tree (op0
, &ctx
, &ht
);
13416 md5_finish_ctx (&ctx
, checksum_after_op0
);
13419 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13420 fold_check_failed (op0
, tem
);
13422 md5_init_ctx (&ctx
);
13423 fold_checksum_tree (op1
, &ctx
, &ht
);
13424 md5_finish_ctx (&ctx
, checksum_after_op1
);
13427 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13428 fold_check_failed (op1
, tem
);
13430 md5_init_ctx (&ctx
);
13431 fold_checksum_tree (op2
, &ctx
, &ht
);
13432 md5_finish_ctx (&ctx
, checksum_after_op2
);
13434 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13435 fold_check_failed (op2
, tem
);
13440 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13441 arguments in ARGARRAY, and a null static chain.
13442 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13443 of type TYPE from the given operands as constructed by build_call_array. */
13446 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13447 int nargs
, tree
*argarray
)
13450 #ifdef ENABLE_FOLD_CHECKING
13451 unsigned char checksum_before_fn
[16],
13452 checksum_before_arglist
[16],
13453 checksum_after_fn
[16],
13454 checksum_after_arglist
[16];
13455 struct md5_ctx ctx
;
13456 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13459 md5_init_ctx (&ctx
);
13460 fold_checksum_tree (fn
, &ctx
, &ht
);
13461 md5_finish_ctx (&ctx
, checksum_before_fn
);
13464 md5_init_ctx (&ctx
);
13465 for (i
= 0; i
< nargs
; i
++)
13466 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13467 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13471 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13473 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13475 #ifdef ENABLE_FOLD_CHECKING
13476 md5_init_ctx (&ctx
);
13477 fold_checksum_tree (fn
, &ctx
, &ht
);
13478 md5_finish_ctx (&ctx
, checksum_after_fn
);
13481 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13482 fold_check_failed (fn
, tem
);
13484 md5_init_ctx (&ctx
);
13485 for (i
= 0; i
< nargs
; i
++)
13486 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13487 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13489 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13490 fold_check_failed (NULL_TREE
, tem
);
13495 /* Perform constant folding and related simplification of initializer
13496 expression EXPR. These behave identically to "fold_buildN" but ignore
13497 potential run-time traps and exceptions that fold must preserve. */
13499 #define START_FOLD_INIT \
13500 int saved_signaling_nans = flag_signaling_nans;\
13501 int saved_trapping_math = flag_trapping_math;\
13502 int saved_rounding_math = flag_rounding_math;\
13503 int saved_trapv = flag_trapv;\
13504 int saved_folding_initializer = folding_initializer;\
13505 flag_signaling_nans = 0;\
13506 flag_trapping_math = 0;\
13507 flag_rounding_math = 0;\
13509 folding_initializer = 1;
13511 #define END_FOLD_INIT \
13512 flag_signaling_nans = saved_signaling_nans;\
13513 flag_trapping_math = saved_trapping_math;\
13514 flag_rounding_math = saved_rounding_math;\
13515 flag_trapv = saved_trapv;\
13516 folding_initializer = saved_folding_initializer;
13519 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
13520 tree type
, tree op
)
13525 result
= fold_build1_loc (loc
, code
, type
, op
);
13532 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
13533 tree type
, tree op0
, tree op1
)
13538 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
13545 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
13546 int nargs
, tree
*argarray
)
13551 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13557 #undef START_FOLD_INIT
13558 #undef END_FOLD_INIT
13560 /* Determine if first argument is a multiple of second argument. Return 0 if
13561 it is not, or we cannot easily determined it to be.
13563 An example of the sort of thing we care about (at this point; this routine
13564 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13565 fold cases do now) is discovering that
13567 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13573 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13575 This code also handles discovering that
13577 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13579 is a multiple of 8 so we don't have to worry about dealing with a
13580 possible remainder.
13582 Note that we *look* inside a SAVE_EXPR only to determine how it was
13583 calculated; it is not safe for fold to do much of anything else with the
13584 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13585 at run time. For example, the latter example above *cannot* be implemented
13586 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13587 evaluation time of the original SAVE_EXPR is not necessarily the same at
13588 the time the new expression is evaluated. The only optimization of this
13589 sort that would be valid is changing
13591 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13595 SAVE_EXPR (I) * SAVE_EXPR (J)
13597 (where the same SAVE_EXPR (J) is used in the original and the
13598 transformed version). */
13601 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13603 if (operand_equal_p (top
, bottom
, 0))
13606 if (TREE_CODE (type
) != INTEGER_TYPE
)
13609 switch (TREE_CODE (top
))
13612 /* Bitwise and provides a power of two multiple. If the mask is
13613 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13614 if (!integer_pow2p (bottom
))
13619 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13620 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13624 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13625 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13628 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13632 op1
= TREE_OPERAND (top
, 1);
13633 /* const_binop may not detect overflow correctly,
13634 so check for it explicitly here. */
13635 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
13636 && 0 != (t1
= fold_convert (type
,
13637 const_binop (LSHIFT_EXPR
,
13640 && !TREE_OVERFLOW (t1
))
13641 return multiple_of_p (type
, t1
, bottom
);
13646 /* Can't handle conversions from non-integral or wider integral type. */
13647 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13648 || (TYPE_PRECISION (type
)
13649 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13652 /* .. fall through ... */
13655 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13658 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13659 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
13662 if (TREE_CODE (bottom
) != INTEGER_CST
13663 || integer_zerop (bottom
)
13664 || (TYPE_UNSIGNED (type
)
13665 && (tree_int_cst_sgn (top
) < 0
13666 || tree_int_cst_sgn (bottom
) < 0)))
13668 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
13676 /* Return true if CODE or TYPE is known to be non-negative. */
13679 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13681 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13682 && truth_value_p (code
))
13683 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13684 have a signed:1 type (where the value is -1 and 0). */
13689 /* Return true if (CODE OP0) is known to be non-negative. If the return
13690 value is based on the assumption that signed overflow is undefined,
13691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13692 *STRICT_OVERFLOW_P. */
13695 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13696 bool *strict_overflow_p
)
13698 if (TYPE_UNSIGNED (type
))
13704 /* We can't return 1 if flag_wrapv is set because
13705 ABS_EXPR<INT_MIN> = INT_MIN. */
13706 if (!ANY_INTEGRAL_TYPE_P (type
))
13708 if (TYPE_OVERFLOW_UNDEFINED (type
))
13710 *strict_overflow_p
= true;
13715 case NON_LVALUE_EXPR
:
13717 case FIX_TRUNC_EXPR
:
13718 return tree_expr_nonnegative_warnv_p (op0
,
13719 strict_overflow_p
);
13723 tree inner_type
= TREE_TYPE (op0
);
13724 tree outer_type
= type
;
13726 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13728 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13729 return tree_expr_nonnegative_warnv_p (op0
,
13730 strict_overflow_p
);
13731 if (INTEGRAL_TYPE_P (inner_type
))
13733 if (TYPE_UNSIGNED (inner_type
))
13735 return tree_expr_nonnegative_warnv_p (op0
,
13736 strict_overflow_p
);
13739 else if (INTEGRAL_TYPE_P (outer_type
))
13741 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13742 return tree_expr_nonnegative_warnv_p (op0
,
13743 strict_overflow_p
);
13744 if (INTEGRAL_TYPE_P (inner_type
))
13745 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13746 && TYPE_UNSIGNED (inner_type
);
13752 return tree_simple_nonnegative_warnv_p (code
, type
);
13755 /* We don't know sign of `t', so be conservative and return false. */
13759 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13760 value is based on the assumption that signed overflow is undefined,
13761 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13762 *STRICT_OVERFLOW_P. */
13765 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13766 tree op1
, bool *strict_overflow_p
)
13768 if (TYPE_UNSIGNED (type
))
13773 case POINTER_PLUS_EXPR
:
13775 if (FLOAT_TYPE_P (type
))
13776 return (tree_expr_nonnegative_warnv_p (op0
,
13778 && tree_expr_nonnegative_warnv_p (op1
,
13779 strict_overflow_p
));
13781 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13782 both unsigned and at least 2 bits shorter than the result. */
13783 if (TREE_CODE (type
) == INTEGER_TYPE
13784 && TREE_CODE (op0
) == NOP_EXPR
13785 && TREE_CODE (op1
) == NOP_EXPR
)
13787 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13788 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13789 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13790 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13792 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13793 TYPE_PRECISION (inner2
)) + 1;
13794 return prec
< TYPE_PRECISION (type
);
13800 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
13802 /* x * x is always non-negative for floating point x
13803 or without overflow. */
13804 if (operand_equal_p (op0
, op1
, 0)
13805 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
13806 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
13808 if (ANY_INTEGRAL_TYPE_P (type
)
13809 && TYPE_OVERFLOW_UNDEFINED (type
))
13810 *strict_overflow_p
= true;
13815 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13816 both unsigned and their total bits is shorter than the result. */
13817 if (TREE_CODE (type
) == INTEGER_TYPE
13818 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
13819 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
13821 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
13822 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
13824 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
13825 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
13828 bool unsigned0
= TYPE_UNSIGNED (inner0
);
13829 bool unsigned1
= TYPE_UNSIGNED (inner1
);
13831 if (TREE_CODE (op0
) == INTEGER_CST
)
13832 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
13834 if (TREE_CODE (op1
) == INTEGER_CST
)
13835 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
13837 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
13838 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
13840 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
13841 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
13842 : TYPE_PRECISION (inner0
);
13844 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
13845 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
13846 : TYPE_PRECISION (inner1
);
13848 return precision0
+ precision1
< TYPE_PRECISION (type
);
13855 return (tree_expr_nonnegative_warnv_p (op0
,
13857 || tree_expr_nonnegative_warnv_p (op1
,
13858 strict_overflow_p
));
13864 case TRUNC_DIV_EXPR
:
13865 case CEIL_DIV_EXPR
:
13866 case FLOOR_DIV_EXPR
:
13867 case ROUND_DIV_EXPR
:
13868 return (tree_expr_nonnegative_warnv_p (op0
,
13870 && tree_expr_nonnegative_warnv_p (op1
,
13871 strict_overflow_p
));
13873 case TRUNC_MOD_EXPR
:
13874 case CEIL_MOD_EXPR
:
13875 case FLOOR_MOD_EXPR
:
13876 case ROUND_MOD_EXPR
:
13877 return tree_expr_nonnegative_warnv_p (op0
,
13878 strict_overflow_p
);
13880 return tree_simple_nonnegative_warnv_p (code
, type
);
13883 /* We don't know sign of `t', so be conservative and return false. */
13887 /* Return true if T is known to be non-negative. If the return
13888 value is based on the assumption that signed overflow is undefined,
13889 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13890 *STRICT_OVERFLOW_P. */
13893 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13895 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13898 switch (TREE_CODE (t
))
13901 return tree_int_cst_sgn (t
) >= 0;
13904 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13907 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13910 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13912 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
13913 strict_overflow_p
));
13915 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
13918 /* We don't know sign of `t', so be conservative and return false. */
13922 /* Return true if T is known to be non-negative. If the return
13923 value is based on the assumption that signed overflow is undefined,
13924 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13925 *STRICT_OVERFLOW_P. */
13928 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
13929 tree arg0
, tree arg1
, bool *strict_overflow_p
)
13931 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
13932 switch (DECL_FUNCTION_CODE (fndecl
))
13934 CASE_FLT_FN (BUILT_IN_ACOS
):
13935 CASE_FLT_FN (BUILT_IN_ACOSH
):
13936 CASE_FLT_FN (BUILT_IN_CABS
):
13937 CASE_FLT_FN (BUILT_IN_COSH
):
13938 CASE_FLT_FN (BUILT_IN_ERFC
):
13939 CASE_FLT_FN (BUILT_IN_EXP
):
13940 CASE_FLT_FN (BUILT_IN_EXP10
):
13941 CASE_FLT_FN (BUILT_IN_EXP2
):
13942 CASE_FLT_FN (BUILT_IN_FABS
):
13943 CASE_FLT_FN (BUILT_IN_FDIM
):
13944 CASE_FLT_FN (BUILT_IN_HYPOT
):
13945 CASE_FLT_FN (BUILT_IN_POW10
):
13946 CASE_INT_FN (BUILT_IN_FFS
):
13947 CASE_INT_FN (BUILT_IN_PARITY
):
13948 CASE_INT_FN (BUILT_IN_POPCOUNT
):
13949 CASE_INT_FN (BUILT_IN_CLZ
):
13950 CASE_INT_FN (BUILT_IN_CLRSB
):
13951 case BUILT_IN_BSWAP32
:
13952 case BUILT_IN_BSWAP64
:
13956 CASE_FLT_FN (BUILT_IN_SQRT
):
13957 /* sqrt(-0.0) is -0.0. */
13958 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
13960 return tree_expr_nonnegative_warnv_p (arg0
,
13961 strict_overflow_p
);
13963 CASE_FLT_FN (BUILT_IN_ASINH
):
13964 CASE_FLT_FN (BUILT_IN_ATAN
):
13965 CASE_FLT_FN (BUILT_IN_ATANH
):
13966 CASE_FLT_FN (BUILT_IN_CBRT
):
13967 CASE_FLT_FN (BUILT_IN_CEIL
):
13968 CASE_FLT_FN (BUILT_IN_ERF
):
13969 CASE_FLT_FN (BUILT_IN_EXPM1
):
13970 CASE_FLT_FN (BUILT_IN_FLOOR
):
13971 CASE_FLT_FN (BUILT_IN_FMOD
):
13972 CASE_FLT_FN (BUILT_IN_FREXP
):
13973 CASE_FLT_FN (BUILT_IN_ICEIL
):
13974 CASE_FLT_FN (BUILT_IN_IFLOOR
):
13975 CASE_FLT_FN (BUILT_IN_IRINT
):
13976 CASE_FLT_FN (BUILT_IN_IROUND
):
13977 CASE_FLT_FN (BUILT_IN_LCEIL
):
13978 CASE_FLT_FN (BUILT_IN_LDEXP
):
13979 CASE_FLT_FN (BUILT_IN_LFLOOR
):
13980 CASE_FLT_FN (BUILT_IN_LLCEIL
):
13981 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
13982 CASE_FLT_FN (BUILT_IN_LLRINT
):
13983 CASE_FLT_FN (BUILT_IN_LLROUND
):
13984 CASE_FLT_FN (BUILT_IN_LRINT
):
13985 CASE_FLT_FN (BUILT_IN_LROUND
):
13986 CASE_FLT_FN (BUILT_IN_MODF
):
13987 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
13988 CASE_FLT_FN (BUILT_IN_RINT
):
13989 CASE_FLT_FN (BUILT_IN_ROUND
):
13990 CASE_FLT_FN (BUILT_IN_SCALB
):
13991 CASE_FLT_FN (BUILT_IN_SCALBLN
):
13992 CASE_FLT_FN (BUILT_IN_SCALBN
):
13993 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
13994 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
13995 CASE_FLT_FN (BUILT_IN_SINH
):
13996 CASE_FLT_FN (BUILT_IN_TANH
):
13997 CASE_FLT_FN (BUILT_IN_TRUNC
):
13998 /* True if the 1st argument is nonnegative. */
13999 return tree_expr_nonnegative_warnv_p (arg0
,
14000 strict_overflow_p
);
14002 CASE_FLT_FN (BUILT_IN_FMAX
):
14003 /* True if the 1st OR 2nd arguments are nonnegative. */
14004 return (tree_expr_nonnegative_warnv_p (arg0
,
14006 || (tree_expr_nonnegative_warnv_p (arg1
,
14007 strict_overflow_p
)));
14009 CASE_FLT_FN (BUILT_IN_FMIN
):
14010 /* True if the 1st AND 2nd arguments are nonnegative. */
14011 return (tree_expr_nonnegative_warnv_p (arg0
,
14013 && (tree_expr_nonnegative_warnv_p (arg1
,
14014 strict_overflow_p
)));
14016 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14017 /* True if the 2nd argument is nonnegative. */
14018 return tree_expr_nonnegative_warnv_p (arg1
,
14019 strict_overflow_p
);
14021 CASE_FLT_FN (BUILT_IN_POWI
):
14022 /* True if the 1st argument is nonnegative or the second
14023 argument is an even integer. */
14024 if (TREE_CODE (arg1
) == INTEGER_CST
14025 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14027 return tree_expr_nonnegative_warnv_p (arg0
,
14028 strict_overflow_p
);
14030 CASE_FLT_FN (BUILT_IN_POW
):
14031 /* True if the 1st argument is nonnegative or the second
14032 argument is an even integer valued real. */
14033 if (TREE_CODE (arg1
) == REAL_CST
)
14038 c
= TREE_REAL_CST (arg1
);
14039 n
= real_to_integer (&c
);
14042 REAL_VALUE_TYPE cint
;
14043 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14044 if (real_identical (&c
, &cint
))
14048 return tree_expr_nonnegative_warnv_p (arg0
,
14049 strict_overflow_p
);
14054 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14058 /* Return true if T is known to be non-negative. If the return
14059 value is based on the assumption that signed overflow is undefined,
14060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14061 *STRICT_OVERFLOW_P. */
14064 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14066 enum tree_code code
= TREE_CODE (t
);
14067 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14074 tree temp
= TARGET_EXPR_SLOT (t
);
14075 t
= TARGET_EXPR_INITIAL (t
);
14077 /* If the initializer is non-void, then it's a normal expression
14078 that will be assigned to the slot. */
14079 if (!VOID_TYPE_P (t
))
14080 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14082 /* Otherwise, the initializer sets the slot in some way. One common
14083 way is an assignment statement at the end of the initializer. */
14086 if (TREE_CODE (t
) == BIND_EXPR
)
14087 t
= expr_last (BIND_EXPR_BODY (t
));
14088 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14089 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14090 t
= expr_last (TREE_OPERAND (t
, 0));
14091 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14096 if (TREE_CODE (t
) == MODIFY_EXPR
14097 && TREE_OPERAND (t
, 0) == temp
)
14098 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14099 strict_overflow_p
);
14106 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14107 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14109 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14110 get_callee_fndecl (t
),
14113 strict_overflow_p
);
14115 case COMPOUND_EXPR
:
14117 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14118 strict_overflow_p
);
14120 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14121 strict_overflow_p
);
14123 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14124 strict_overflow_p
);
14127 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14131 /* We don't know sign of `t', so be conservative and return false. */
14135 /* Return true if T is known to be non-negative. If the return
14136 value is based on the assumption that signed overflow is undefined,
14137 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14138 *STRICT_OVERFLOW_P. */
14141 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14143 enum tree_code code
;
14144 if (t
== error_mark_node
)
14147 code
= TREE_CODE (t
);
14148 switch (TREE_CODE_CLASS (code
))
14151 case tcc_comparison
:
14152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14154 TREE_OPERAND (t
, 0),
14155 TREE_OPERAND (t
, 1),
14156 strict_overflow_p
);
14159 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14161 TREE_OPERAND (t
, 0),
14162 strict_overflow_p
);
14165 case tcc_declaration
:
14166 case tcc_reference
:
14167 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14175 case TRUTH_AND_EXPR
:
14176 case TRUTH_OR_EXPR
:
14177 case TRUTH_XOR_EXPR
:
14178 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14180 TREE_OPERAND (t
, 0),
14181 TREE_OPERAND (t
, 1),
14182 strict_overflow_p
);
14183 case TRUTH_NOT_EXPR
:
14184 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14186 TREE_OPERAND (t
, 0),
14187 strict_overflow_p
);
14194 case WITH_SIZE_EXPR
:
14196 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14199 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14203 /* Return true if `t' is known to be non-negative. Handle warnings
14204 about undefined signed overflow. */
14207 tree_expr_nonnegative_p (tree t
)
14209 bool ret
, strict_overflow_p
;
14211 strict_overflow_p
= false;
14212 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14213 if (strict_overflow_p
)
14214 fold_overflow_warning (("assuming signed overflow does not occur when "
14215 "determining that expression is always "
14217 WARN_STRICT_OVERFLOW_MISC
);
14222 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14223 For floating point we further ensure that T is not denormal.
14224 Similar logic is present in nonzero_address in rtlanal.h.
14226 If the return value is based on the assumption that signed overflow
14227 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14228 change *STRICT_OVERFLOW_P. */
14231 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14232 bool *strict_overflow_p
)
14237 return tree_expr_nonzero_warnv_p (op0
,
14238 strict_overflow_p
);
14242 tree inner_type
= TREE_TYPE (op0
);
14243 tree outer_type
= type
;
14245 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14246 && tree_expr_nonzero_warnv_p (op0
,
14247 strict_overflow_p
));
14251 case NON_LVALUE_EXPR
:
14252 return tree_expr_nonzero_warnv_p (op0
,
14253 strict_overflow_p
);
14262 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14263 For floating point we further ensure that T is not denormal.
14264 Similar logic is present in nonzero_address in rtlanal.h.
14266 If the return value is based on the assumption that signed overflow
14267 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14268 change *STRICT_OVERFLOW_P. */
14271 tree_binary_nonzero_warnv_p (enum tree_code code
,
14274 tree op1
, bool *strict_overflow_p
)
14276 bool sub_strict_overflow_p
;
14279 case POINTER_PLUS_EXPR
:
14281 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
14283 /* With the presence of negative values it is hard
14284 to say something. */
14285 sub_strict_overflow_p
= false;
14286 if (!tree_expr_nonnegative_warnv_p (op0
,
14287 &sub_strict_overflow_p
)
14288 || !tree_expr_nonnegative_warnv_p (op1
,
14289 &sub_strict_overflow_p
))
14291 /* One of operands must be positive and the other non-negative. */
14292 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14293 overflows, on a twos-complement machine the sum of two
14294 nonnegative numbers can never be zero. */
14295 return (tree_expr_nonzero_warnv_p (op0
,
14297 || tree_expr_nonzero_warnv_p (op1
,
14298 strict_overflow_p
));
14303 if (TYPE_OVERFLOW_UNDEFINED (type
))
14305 if (tree_expr_nonzero_warnv_p (op0
,
14307 && tree_expr_nonzero_warnv_p (op1
,
14308 strict_overflow_p
))
14310 *strict_overflow_p
= true;
14317 sub_strict_overflow_p
= false;
14318 if (tree_expr_nonzero_warnv_p (op0
,
14319 &sub_strict_overflow_p
)
14320 && tree_expr_nonzero_warnv_p (op1
,
14321 &sub_strict_overflow_p
))
14323 if (sub_strict_overflow_p
)
14324 *strict_overflow_p
= true;
14329 sub_strict_overflow_p
= false;
14330 if (tree_expr_nonzero_warnv_p (op0
,
14331 &sub_strict_overflow_p
))
14333 if (sub_strict_overflow_p
)
14334 *strict_overflow_p
= true;
14336 /* When both operands are nonzero, then MAX must be too. */
14337 if (tree_expr_nonzero_warnv_p (op1
,
14338 strict_overflow_p
))
14341 /* MAX where operand 0 is positive is positive. */
14342 return tree_expr_nonnegative_warnv_p (op0
,
14343 strict_overflow_p
);
14345 /* MAX where operand 1 is positive is positive. */
14346 else if (tree_expr_nonzero_warnv_p (op1
,
14347 &sub_strict_overflow_p
)
14348 && tree_expr_nonnegative_warnv_p (op1
,
14349 &sub_strict_overflow_p
))
14351 if (sub_strict_overflow_p
)
14352 *strict_overflow_p
= true;
14358 return (tree_expr_nonzero_warnv_p (op1
,
14360 || tree_expr_nonzero_warnv_p (op0
,
14361 strict_overflow_p
));
14370 /* Return true when T is an address and is known to be nonzero.
14371 For floating point we further ensure that T is not denormal.
14372 Similar logic is present in nonzero_address in rtlanal.h.
14374 If the return value is based on the assumption that signed overflow
14375 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14376 change *STRICT_OVERFLOW_P. */
14379 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14381 bool sub_strict_overflow_p
;
14382 switch (TREE_CODE (t
))
14385 return !integer_zerop (t
);
14389 tree base
= TREE_OPERAND (t
, 0);
14391 if (!DECL_P (base
))
14392 base
= get_base_address (base
);
14397 /* For objects in symbol table check if we know they are non-zero.
14398 Don't do anything for variables and functions before symtab is built;
14399 it is quite possible that they will be declared weak later. */
14400 if (DECL_P (base
) && decl_in_symtab_p (base
))
14402 struct symtab_node
*symbol
;
14404 symbol
= symtab_node::get_create (base
);
14406 return symbol
->nonzero_address ();
14411 /* Function local objects are never NULL. */
14413 && (DECL_CONTEXT (base
)
14414 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
14415 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
14418 /* Constants are never weak. */
14419 if (CONSTANT_CLASS_P (base
))
14426 sub_strict_overflow_p
= false;
14427 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14428 &sub_strict_overflow_p
)
14429 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14430 &sub_strict_overflow_p
))
14432 if (sub_strict_overflow_p
)
14433 *strict_overflow_p
= true;
14444 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14445 attempt to fold the expression to a constant without modifying TYPE,
14448 If the expression could be simplified to a constant, then return
14449 the constant. If the expression would not be simplified to a
14450 constant, then return NULL_TREE. */
14453 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14455 tree tem
= fold_binary (code
, type
, op0
, op1
);
14456 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14459 /* Given the components of a unary expression CODE, TYPE and OP0,
14460 attempt to fold the expression to a constant without modifying
14463 If the expression could be simplified to a constant, then return
14464 the constant. If the expression would not be simplified to a
14465 constant, then return NULL_TREE. */
14468 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14470 tree tem
= fold_unary (code
, type
, op0
);
14471 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14474 /* If EXP represents referencing an element in a constant string
14475 (either via pointer arithmetic or array indexing), return the
14476 tree representing the value accessed, otherwise return NULL. */
14479 fold_read_from_constant_string (tree exp
)
14481 if ((TREE_CODE (exp
) == INDIRECT_REF
14482 || TREE_CODE (exp
) == ARRAY_REF
)
14483 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14485 tree exp1
= TREE_OPERAND (exp
, 0);
14488 location_t loc
= EXPR_LOCATION (exp
);
14490 if (TREE_CODE (exp
) == INDIRECT_REF
)
14491 string
= string_constant (exp1
, &index
);
14494 tree low_bound
= array_ref_low_bound (exp
);
14495 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
14497 /* Optimize the special-case of a zero lower bound.
14499 We convert the low_bound to sizetype to avoid some problems
14500 with constant folding. (E.g. suppose the lower bound is 1,
14501 and its mode is QI. Without the conversion,l (ARRAY
14502 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14503 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14504 if (! integer_zerop (low_bound
))
14505 index
= size_diffop_loc (loc
, index
,
14506 fold_convert_loc (loc
, sizetype
, low_bound
));
14512 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14513 && TREE_CODE (string
) == STRING_CST
14514 && TREE_CODE (index
) == INTEGER_CST
14515 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14516 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14518 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14519 return build_int_cst_type (TREE_TYPE (exp
),
14520 (TREE_STRING_POINTER (string
)
14521 [TREE_INT_CST_LOW (index
)]));
14526 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14527 an integer constant, real, or fixed-point constant.
14529 TYPE is the type of the result. */
14532 fold_negate_const (tree arg0
, tree type
)
14534 tree t
= NULL_TREE
;
14536 switch (TREE_CODE (arg0
))
14541 wide_int val
= wi::neg (arg0
, &overflow
);
14542 t
= force_fit_type (type
, val
, 1,
14543 (overflow
| TREE_OVERFLOW (arg0
))
14544 && !TYPE_UNSIGNED (type
));
14549 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14554 FIXED_VALUE_TYPE f
;
14555 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14556 &(TREE_FIXED_CST (arg0
)), NULL
,
14557 TYPE_SATURATING (type
));
14558 t
= build_fixed (type
, f
);
14559 /* Propagate overflow flags. */
14560 if (overflow_p
| TREE_OVERFLOW (arg0
))
14561 TREE_OVERFLOW (t
) = 1;
14566 gcc_unreachable ();
14572 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14573 an integer constant or real constant.
14575 TYPE is the type of the result. */
14578 fold_abs_const (tree arg0
, tree type
)
14580 tree t
= NULL_TREE
;
14582 switch (TREE_CODE (arg0
))
14586 /* If the value is unsigned or non-negative, then the absolute value
14587 is the same as the ordinary value. */
14588 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
14591 /* If the value is negative, then the absolute value is
14596 wide_int val
= wi::neg (arg0
, &overflow
);
14597 t
= force_fit_type (type
, val
, -1,
14598 overflow
| TREE_OVERFLOW (arg0
));
14604 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14605 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14611 gcc_unreachable ();
14617 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14618 constant. TYPE is the type of the result. */
14621 fold_not_const (const_tree arg0
, tree type
)
14623 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14625 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
14628 /* Given CODE, a relational operator, the target type, TYPE and two
14629 constant operands OP0 and OP1, return the result of the
14630 relational operation. If the result is not a compile time
14631 constant, then return NULL_TREE. */
14634 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14636 int result
, invert
;
14638 /* From here on, the only cases we handle are when the result is
14639 known to be a constant. */
14641 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14643 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14644 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14646 /* Handle the cases where either operand is a NaN. */
14647 if (real_isnan (c0
) || real_isnan (c1
))
14657 case UNORDERED_EXPR
:
14671 if (flag_trapping_math
)
14677 gcc_unreachable ();
14680 return constant_boolean_node (result
, type
);
14683 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14686 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14688 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14689 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14690 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14693 /* Handle equality/inequality of complex constants. */
14694 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14696 tree rcond
= fold_relational_const (code
, type
,
14697 TREE_REALPART (op0
),
14698 TREE_REALPART (op1
));
14699 tree icond
= fold_relational_const (code
, type
,
14700 TREE_IMAGPART (op0
),
14701 TREE_IMAGPART (op1
));
14702 if (code
== EQ_EXPR
)
14703 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14704 else if (code
== NE_EXPR
)
14705 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14710 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
14712 unsigned count
= VECTOR_CST_NELTS (op0
);
14713 tree
*elts
= XALLOCAVEC (tree
, count
);
14714 gcc_assert (VECTOR_CST_NELTS (op1
) == count
14715 && TYPE_VECTOR_SUBPARTS (type
) == count
);
14717 for (unsigned i
= 0; i
< count
; i
++)
14719 tree elem_type
= TREE_TYPE (type
);
14720 tree elem0
= VECTOR_CST_ELT (op0
, i
);
14721 tree elem1
= VECTOR_CST_ELT (op1
, i
);
14723 tree tem
= fold_relational_const (code
, elem_type
,
14726 if (tem
== NULL_TREE
)
14729 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
14732 return build_vector (type
, elts
);
14735 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14737 To compute GT, swap the arguments and do LT.
14738 To compute GE, do LT and invert the result.
14739 To compute LE, swap the arguments, do LT and invert the result.
14740 To compute NE, do EQ and invert the result.
14742 Therefore, the code below must handle only EQ and LT. */
14744 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14746 std::swap (op0
, op1
);
14747 code
= swap_tree_comparison (code
);
14750 /* Note that it is safe to invert for real values here because we
14751 have already handled the one case that it matters. */
14754 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14757 code
= invert_tree_comparison (code
, false);
14760 /* Compute a result for LT or EQ if args permit;
14761 Otherwise return T. */
14762 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14764 if (code
== EQ_EXPR
)
14765 result
= tree_int_cst_equal (op0
, op1
);
14767 result
= tree_int_cst_lt (op0
, op1
);
14774 return constant_boolean_node (result
, type
);
14777 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14778 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14782 fold_build_cleanup_point_expr (tree type
, tree expr
)
14784 /* If the expression does not have side effects then we don't have to wrap
14785 it with a cleanup point expression. */
14786 if (!TREE_SIDE_EFFECTS (expr
))
14789 /* If the expression is a return, check to see if the expression inside the
14790 return has no side effects or the right hand side of the modify expression
14791 inside the return. If either don't have side effects set we don't need to
14792 wrap the expression in a cleanup point expression. Note we don't check the
14793 left hand side of the modify because it should always be a return decl. */
14794 if (TREE_CODE (expr
) == RETURN_EXPR
)
14796 tree op
= TREE_OPERAND (expr
, 0);
14797 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14799 op
= TREE_OPERAND (op
, 1);
14800 if (!TREE_SIDE_EFFECTS (op
))
14804 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14807 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14808 of an indirection through OP0, or NULL_TREE if no simplification is
14812 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
14818 subtype
= TREE_TYPE (sub
);
14819 if (!POINTER_TYPE_P (subtype
))
14822 if (TREE_CODE (sub
) == ADDR_EXPR
)
14824 tree op
= TREE_OPERAND (sub
, 0);
14825 tree optype
= TREE_TYPE (op
);
14826 /* *&CONST_DECL -> to the value of the const decl. */
14827 if (TREE_CODE (op
) == CONST_DECL
)
14828 return DECL_INITIAL (op
);
14829 /* *&p => p; make sure to handle *&"str"[cst] here. */
14830 if (type
== optype
)
14832 tree fop
= fold_read_from_constant_string (op
);
14838 /* *(foo *)&fooarray => fooarray[0] */
14839 else if (TREE_CODE (optype
) == ARRAY_TYPE
14840 && type
== TREE_TYPE (optype
)
14841 && (!in_gimple_form
14842 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14844 tree type_domain
= TYPE_DOMAIN (optype
);
14845 tree min_val
= size_zero_node
;
14846 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14847 min_val
= TYPE_MIN_VALUE (type_domain
);
14849 && TREE_CODE (min_val
) != INTEGER_CST
)
14851 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14852 NULL_TREE
, NULL_TREE
);
14854 /* *(foo *)&complexfoo => __real__ complexfoo */
14855 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14856 && type
== TREE_TYPE (optype
))
14857 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14858 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14859 else if (TREE_CODE (optype
) == VECTOR_TYPE
14860 && type
== TREE_TYPE (optype
))
14862 tree part_width
= TYPE_SIZE (type
);
14863 tree index
= bitsize_int (0);
14864 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14868 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14869 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14871 tree op00
= TREE_OPERAND (sub
, 0);
14872 tree op01
= TREE_OPERAND (sub
, 1);
14875 if (TREE_CODE (op00
) == ADDR_EXPR
)
14878 op00
= TREE_OPERAND (op00
, 0);
14879 op00type
= TREE_TYPE (op00
);
14881 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14882 if (TREE_CODE (op00type
) == VECTOR_TYPE
14883 && type
== TREE_TYPE (op00type
))
14885 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
14886 tree part_width
= TYPE_SIZE (type
);
14887 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
14888 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14889 tree index
= bitsize_int (indexi
);
14891 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
14892 return fold_build3_loc (loc
,
14893 BIT_FIELD_REF
, type
, op00
,
14894 part_width
, index
);
14897 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14898 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14899 && type
== TREE_TYPE (op00type
))
14901 tree size
= TYPE_SIZE_UNIT (type
);
14902 if (tree_int_cst_equal (size
, op01
))
14903 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14905 /* ((foo *)&fooarray)[1] => fooarray[1] */
14906 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14907 && type
== TREE_TYPE (op00type
))
14909 tree type_domain
= TYPE_DOMAIN (op00type
);
14910 tree min_val
= size_zero_node
;
14911 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14912 min_val
= TYPE_MIN_VALUE (type_domain
);
14913 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14914 TYPE_SIZE_UNIT (type
));
14915 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14916 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14917 NULL_TREE
, NULL_TREE
);
14922 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14923 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14924 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14925 && (!in_gimple_form
14926 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14929 tree min_val
= size_zero_node
;
14930 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14931 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14932 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14933 min_val
= TYPE_MIN_VALUE (type_domain
);
14935 && TREE_CODE (min_val
) != INTEGER_CST
)
14937 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14944 /* Builds an expression for an indirection through T, simplifying some
14948 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14950 tree type
= TREE_TYPE (TREE_TYPE (t
));
14951 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14956 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14959 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14962 fold_indirect_ref_loc (location_t loc
, tree t
)
14964 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14972 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14973 whose result is ignored. The type of the returned tree need not be
14974 the same as the original expression. */
14977 fold_ignored_result (tree t
)
14979 if (!TREE_SIDE_EFFECTS (t
))
14980 return integer_zero_node
;
14983 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14986 t
= TREE_OPERAND (t
, 0);
14990 case tcc_comparison
:
14991 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14992 t
= TREE_OPERAND (t
, 0);
14993 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14994 t
= TREE_OPERAND (t
, 1);
14999 case tcc_expression
:
15000 switch (TREE_CODE (t
))
15002 case COMPOUND_EXPR
:
15003 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15005 t
= TREE_OPERAND (t
, 0);
15009 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15010 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15012 t
= TREE_OPERAND (t
, 0);
15025 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15028 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15030 tree div
= NULL_TREE
;
15035 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15036 have to do anything. Only do this when we are not given a const,
15037 because in that case, this check is more expensive than just
15039 if (TREE_CODE (value
) != INTEGER_CST
)
15041 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15043 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15047 /* If divisor is a power of two, simplify this to bit manipulation. */
15048 if (divisor
== (divisor
& -divisor
))
15050 if (TREE_CODE (value
) == INTEGER_CST
)
15052 wide_int val
= value
;
15055 if ((val
& (divisor
- 1)) == 0)
15058 overflow_p
= TREE_OVERFLOW (value
);
15059 val
+= divisor
- 1;
15060 val
&= - (int) divisor
;
15064 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
15070 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15071 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15072 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
15073 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15079 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15080 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15081 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15087 /* Likewise, but round down. */
15090 round_down_loc (location_t loc
, tree value
, int divisor
)
15092 tree div
= NULL_TREE
;
15094 gcc_assert (divisor
> 0);
15098 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15099 have to do anything. Only do this when we are not given a const,
15100 because in that case, this check is more expensive than just
15102 if (TREE_CODE (value
) != INTEGER_CST
)
15104 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15106 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15110 /* If divisor is a power of two, simplify this to bit manipulation. */
15111 if (divisor
== (divisor
& -divisor
))
15115 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15116 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15121 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15122 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15123 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15129 /* Returns the pointer to the base of the object addressed by EXP and
15130 extracts the information about the offset of the access, storing it
15131 to PBITPOS and POFFSET. */
15134 split_address_to_core_and_offset (tree exp
,
15135 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15139 int unsignedp
, volatilep
;
15140 HOST_WIDE_INT bitsize
;
15141 location_t loc
= EXPR_LOCATION (exp
);
15143 if (TREE_CODE (exp
) == ADDR_EXPR
)
15145 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15146 poffset
, &mode
, &unsignedp
, &volatilep
,
15148 core
= build_fold_addr_expr_loc (loc
, core
);
15154 *poffset
= NULL_TREE
;
15160 /* Returns true if addresses of E1 and E2 differ by a constant, false
15161 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15164 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15167 HOST_WIDE_INT bitpos1
, bitpos2
;
15168 tree toffset1
, toffset2
, tdiff
, type
;
15170 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15171 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15173 if (bitpos1
% BITS_PER_UNIT
!= 0
15174 || bitpos2
% BITS_PER_UNIT
!= 0
15175 || !operand_equal_p (core1
, core2
, 0))
15178 if (toffset1
&& toffset2
)
15180 type
= TREE_TYPE (toffset1
);
15181 if (type
!= TREE_TYPE (toffset2
))
15182 toffset2
= fold_convert (type
, toffset2
);
15184 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15185 if (!cst_and_fits_in_hwi (tdiff
))
15188 *diff
= int_cst_value (tdiff
);
15190 else if (toffset1
|| toffset2
)
15192 /* If only one of the offsets is non-constant, the difference cannot
15199 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15203 /* Simplify the floating point expression EXP when the sign of the
15204 result is not significant. Return NULL_TREE if no simplification
15208 fold_strip_sign_ops (tree exp
)
15211 location_t loc
= EXPR_LOCATION (exp
);
15213 switch (TREE_CODE (exp
))
15217 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15218 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15222 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
15224 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15225 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15226 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15227 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
15228 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15229 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15232 case COMPOUND_EXPR
:
15233 arg0
= TREE_OPERAND (exp
, 0);
15234 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15236 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15240 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15241 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15243 return fold_build3_loc (loc
,
15244 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15245 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15246 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15251 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15254 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15255 /* Strip copysign function call, return the 1st argument. */
15256 arg0
= CALL_EXPR_ARG (exp
, 0);
15257 arg1
= CALL_EXPR_ARG (exp
, 1);
15258 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
15261 /* Strip sign ops from the argument of "odd" math functions. */
15262 if (negate_mathfn_p (fcode
))
15264 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15266 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);
15279 /* Return OFF converted to a pointer offset type suitable as offset for
15280 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15282 convert_to_ptrofftype_loc (location_t loc
, tree off
)
15284 return fold_convert_loc (loc
, sizetype
, off
);
15287 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15289 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
15291 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15292 ptr
, convert_to_ptrofftype_loc (loc
, off
));
15295 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15297 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
15299 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15300 ptr
, size_int (off
));