1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
52 #include "fold-const.h"
53 #include "stor-layout.h"
55 #include "tree-iterator.h"
58 #include "hard-reg-set.h"
60 #include "insn-config.h"
70 #include "diagnostic-core.h"
72 #include "langhooks.h"
75 #include "basic-block.h"
76 #include "tree-ssa-alias.h"
77 #include "internal-fn.h"
79 #include "gimple-expr.h"
85 #include "plugin-api.h"
88 #include "generic-match.h"
91 /* Nonzero if we are folding constants inside an initializer; zero
93 int folding_initializer
= 0;
95 /* The following constants represent a bit based encoding of GCC's
96 comparison operators. This encoding simplifies transformations
97 on relational comparison operators, such as AND and OR. */
98 enum comparison_code
{
117 static bool negate_mathfn_p (enum built_in_function
);
118 static bool negate_expr_p (tree
);
119 static tree
negate_expr (tree
);
120 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
121 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
122 static enum comparison_code
comparison_to_compcode (enum tree_code
);
123 static enum tree_code
compcode_to_comparison (enum comparison_code
);
124 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
125 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
126 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
127 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
128 static tree
make_bit_field_ref (location_t
, tree
, tree
,
129 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
130 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
132 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
134 machine_mode
*, int *, int *,
136 static int simple_operand_p (const_tree
);
137 static bool simple_operand_p_2 (tree
);
138 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
139 static tree
range_predecessor (tree
);
140 static tree
range_successor (tree
);
141 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
143 static tree
unextend (tree
, int, int, tree
);
144 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
146 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
147 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
148 static tree
fold_binary_op_with_conditional_arg (location_t
,
149 enum tree_code
, tree
,
152 static tree
fold_mathfn_compare (location_t
,
153 enum built_in_function
, enum tree_code
,
155 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
156 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
157 static bool reorder_operands_p (const_tree
, const_tree
);
158 static tree
fold_negate_const (tree
, tree
);
159 static tree
fold_not_const (const_tree
, tree
);
160 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
161 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
162 static tree
fold_view_convert_expr (tree
, tree
);
163 static bool vec_cst_ctor_to_array (tree
, tree
*);
166 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
167 Otherwise, return LOC. */
170 expr_location_or (tree t
, location_t loc
)
172 location_t tloc
= EXPR_LOCATION (t
);
173 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
176 /* Similar to protected_set_expr_location, but never modify x in place,
177 if location can and needs to be set, unshare it. */
180 protected_set_expr_location_unshare (tree x
, location_t loc
)
182 if (CAN_HAVE_LOCATION_P (x
)
183 && EXPR_LOCATION (x
) != loc
184 && !(TREE_CODE (x
) == SAVE_EXPR
185 || TREE_CODE (x
) == TARGET_EXPR
186 || TREE_CODE (x
) == BIND_EXPR
))
189 SET_EXPR_LOCATION (x
, loc
);
194 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
195 division and returns the quotient. Otherwise returns
199 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
203 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
205 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
210 /* This is nonzero if we should defer warnings about undefined
211 overflow. This facility exists because these warnings are a
212 special case. The code to estimate loop iterations does not want
213 to issue any warnings, since it works with expressions which do not
214 occur in user code. Various bits of cleanup code call fold(), but
215 only use the result if it has certain characteristics (e.g., is a
216 constant); that code only wants to issue a warning if the result is
219 static int fold_deferring_overflow_warnings
;
221 /* If a warning about undefined overflow is deferred, this is the
222 warning. Note that this may cause us to turn two warnings into
223 one, but that is fine since it is sufficient to only give one
224 warning per expression. */
226 static const char* fold_deferred_overflow_warning
;
228 /* If a warning about undefined overflow is deferred, this is the
229 level at which the warning should be emitted. */
231 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
233 /* Start deferring overflow warnings. We could use a stack here to
234 permit nested calls, but at present it is not necessary. */
237 fold_defer_overflow_warnings (void)
239 ++fold_deferring_overflow_warnings
;
242 /* Stop deferring overflow warnings. If there is a pending warning,
243 and ISSUE is true, then issue the warning if appropriate. STMT is
244 the statement with which the warning should be associated (used for
245 location information); STMT may be NULL. CODE is the level of the
246 warning--a warn_strict_overflow_code value. This function will use
247 the smaller of CODE and the deferred code when deciding whether to
248 issue the warning. CODE may be zero to mean to always use the
252 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
257 gcc_assert (fold_deferring_overflow_warnings
> 0);
258 --fold_deferring_overflow_warnings
;
259 if (fold_deferring_overflow_warnings
> 0)
261 if (fold_deferred_overflow_warning
!= NULL
263 && code
< (int) fold_deferred_overflow_code
)
264 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
268 warnmsg
= fold_deferred_overflow_warning
;
269 fold_deferred_overflow_warning
= NULL
;
271 if (!issue
|| warnmsg
== NULL
)
274 if (gimple_no_warning_p (stmt
))
277 /* Use the smallest code level when deciding to issue the
279 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
280 code
= fold_deferred_overflow_code
;
282 if (!issue_strict_overflow_warning (code
))
286 locus
= input_location
;
288 locus
= gimple_location (stmt
);
289 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
292 /* Stop deferring overflow warnings, ignoring any deferred
296 fold_undefer_and_ignore_overflow_warnings (void)
298 fold_undefer_overflow_warnings (false, NULL
, 0);
301 /* Whether we are deferring overflow warnings. */
304 fold_deferring_overflow_warnings_p (void)
306 return fold_deferring_overflow_warnings
> 0;
309 /* This is called when we fold something based on the fact that signed
310 overflow is undefined. */
313 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
315 if (fold_deferring_overflow_warnings
> 0)
317 if (fold_deferred_overflow_warning
== NULL
318 || wc
< fold_deferred_overflow_code
)
320 fold_deferred_overflow_warning
= gmsgid
;
321 fold_deferred_overflow_code
= wc
;
324 else if (issue_strict_overflow_warning (wc
))
325 warning (OPT_Wstrict_overflow
, gmsgid
);
328 /* Return true if the built-in mathematical function specified by CODE
329 is odd, i.e. -f(x) == f(-x). */
332 negate_mathfn_p (enum built_in_function code
)
336 CASE_FLT_FN (BUILT_IN_ASIN
):
337 CASE_FLT_FN (BUILT_IN_ASINH
):
338 CASE_FLT_FN (BUILT_IN_ATAN
):
339 CASE_FLT_FN (BUILT_IN_ATANH
):
340 CASE_FLT_FN (BUILT_IN_CASIN
):
341 CASE_FLT_FN (BUILT_IN_CASINH
):
342 CASE_FLT_FN (BUILT_IN_CATAN
):
343 CASE_FLT_FN (BUILT_IN_CATANH
):
344 CASE_FLT_FN (BUILT_IN_CBRT
):
345 CASE_FLT_FN (BUILT_IN_CPROJ
):
346 CASE_FLT_FN (BUILT_IN_CSIN
):
347 CASE_FLT_FN (BUILT_IN_CSINH
):
348 CASE_FLT_FN (BUILT_IN_CTAN
):
349 CASE_FLT_FN (BUILT_IN_CTANH
):
350 CASE_FLT_FN (BUILT_IN_ERF
):
351 CASE_FLT_FN (BUILT_IN_LLROUND
):
352 CASE_FLT_FN (BUILT_IN_LROUND
):
353 CASE_FLT_FN (BUILT_IN_ROUND
):
354 CASE_FLT_FN (BUILT_IN_SIN
):
355 CASE_FLT_FN (BUILT_IN_SINH
):
356 CASE_FLT_FN (BUILT_IN_TAN
):
357 CASE_FLT_FN (BUILT_IN_TANH
):
358 CASE_FLT_FN (BUILT_IN_TRUNC
):
361 CASE_FLT_FN (BUILT_IN_LLRINT
):
362 CASE_FLT_FN (BUILT_IN_LRINT
):
363 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
364 CASE_FLT_FN (BUILT_IN_RINT
):
365 return !flag_rounding_math
;
373 /* Check whether we may negate an integer constant T without causing
377 may_negate_without_overflow_p (const_tree t
)
381 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
383 type
= TREE_TYPE (t
);
384 if (TYPE_UNSIGNED (type
))
387 return !wi::only_sign_bit_p (t
);
390 /* Determine whether an expression T can be cheaply negated using
391 the function negate_expr without introducing undefined overflow. */
394 negate_expr_p (tree t
)
401 type
= TREE_TYPE (t
);
404 switch (TREE_CODE (t
))
407 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
410 /* Check that -CST will not overflow type. */
411 return may_negate_without_overflow_p (t
);
413 return (INTEGRAL_TYPE_P (type
)
414 && TYPE_OVERFLOW_WRAPS (type
));
420 return !TYPE_OVERFLOW_SANITIZED (type
);
423 /* We want to canonicalize to positive real constants. Pretend
424 that only negative ones can be easily negated. */
425 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
428 return negate_expr_p (TREE_REALPART (t
))
429 && negate_expr_p (TREE_IMAGPART (t
));
433 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
436 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
438 for (i
= 0; i
< count
; i
++)
439 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
446 return negate_expr_p (TREE_OPERAND (t
, 0))
447 && negate_expr_p (TREE_OPERAND (t
, 1));
450 return negate_expr_p (TREE_OPERAND (t
, 0));
453 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
454 || HONOR_SIGNED_ZEROS (element_mode (type
)))
456 /* -(A + B) -> (-B) - A. */
457 if (negate_expr_p (TREE_OPERAND (t
, 1))
458 && reorder_operands_p (TREE_OPERAND (t
, 0),
459 TREE_OPERAND (t
, 1)))
461 /* -(A + B) -> (-A) - B. */
462 return negate_expr_p (TREE_OPERAND (t
, 0));
465 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
466 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
467 && !HONOR_SIGNED_ZEROS (element_mode (type
))
468 && reorder_operands_p (TREE_OPERAND (t
, 0),
469 TREE_OPERAND (t
, 1));
472 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
478 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
479 return negate_expr_p (TREE_OPERAND (t
, 1))
480 || negate_expr_p (TREE_OPERAND (t
, 0));
486 /* In general we can't negate A / B, because if A is INT_MIN and
487 B is 1, we may turn this into INT_MIN / -1 which is undefined
488 and actually traps on some architectures. But if overflow is
489 undefined, we can negate, because - (INT_MIN / 1) is an
491 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
493 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
495 /* If overflow is undefined then we have to be careful because
496 we ask whether it's ok to associate the negate with the
497 division which is not ok for example for
498 -((a - b) / c) where (-(a - b)) / c may invoke undefined
499 overflow because of negating INT_MIN. So do not use
500 negate_expr_p here but open-code the two important cases. */
501 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
502 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
503 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
506 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
508 return negate_expr_p (TREE_OPERAND (t
, 1));
511 /* Negate -((double)float) as (double)(-float). */
512 if (TREE_CODE (type
) == REAL_TYPE
)
514 tree tem
= strip_float_extensions (t
);
516 return negate_expr_p (tem
);
521 /* Negate -f(x) as f(-x). */
522 if (negate_mathfn_p (builtin_mathfn_code (t
)))
523 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
527 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
528 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
530 tree op1
= TREE_OPERAND (t
, 1);
531 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
542 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
543 simplification is possible.
544 If negate_expr_p would return true for T, NULL_TREE will never be
548 fold_negate_expr (location_t loc
, tree t
)
550 tree type
= TREE_TYPE (t
);
553 switch (TREE_CODE (t
))
555 /* Convert - (~A) to A + 1. */
557 if (INTEGRAL_TYPE_P (type
))
558 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
559 build_one_cst (type
));
563 tem
= fold_negate_const (t
, type
);
564 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
565 || (ANY_INTEGRAL_TYPE_P (type
)
566 && !TYPE_OVERFLOW_TRAPS (type
)
567 && TYPE_OVERFLOW_WRAPS (type
))
568 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
573 tem
= fold_negate_const (t
, type
);
577 tem
= fold_negate_const (t
, type
);
582 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
583 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
585 return build_complex (type
, rpart
, ipart
);
591 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
592 tree
*elts
= XALLOCAVEC (tree
, count
);
594 for (i
= 0; i
< count
; i
++)
596 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
597 if (elts
[i
] == NULL_TREE
)
601 return build_vector (type
, elts
);
605 if (negate_expr_p (t
))
606 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
607 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
608 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
612 if (negate_expr_p (t
))
613 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
614 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
618 if (!TYPE_OVERFLOW_SANITIZED (type
))
619 return TREE_OPERAND (t
, 0);
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
624 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
626 /* -(A + B) -> (-B) - A. */
627 if (negate_expr_p (TREE_OPERAND (t
, 1))
628 && reorder_operands_p (TREE_OPERAND (t
, 0),
629 TREE_OPERAND (t
, 1)))
631 tem
= negate_expr (TREE_OPERAND (t
, 1));
632 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
633 tem
, TREE_OPERAND (t
, 0));
636 /* -(A + B) -> (-A) - B. */
637 if (negate_expr_p (TREE_OPERAND (t
, 0)))
639 tem
= negate_expr (TREE_OPERAND (t
, 0));
640 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
641 tem
, TREE_OPERAND (t
, 1));
647 /* - (A - B) -> B - A */
648 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
649 && !HONOR_SIGNED_ZEROS (element_mode (type
))
650 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
651 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
652 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
656 if (TYPE_UNSIGNED (type
))
662 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
664 tem
= TREE_OPERAND (t
, 1);
665 if (negate_expr_p (tem
))
666 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
667 TREE_OPERAND (t
, 0), negate_expr (tem
));
668 tem
= TREE_OPERAND (t
, 0);
669 if (negate_expr_p (tem
))
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 negate_expr (tem
), TREE_OPERAND (t
, 1));
678 /* In general we can't negate A / B, because if A is INT_MIN and
679 B is 1, we may turn this into INT_MIN / -1 which is undefined
680 and actually traps on some architectures. But if overflow is
681 undefined, we can negate, because - (INT_MIN / 1) is an
683 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
685 const char * const warnmsg
= G_("assuming signed overflow does not "
686 "occur when negating a division");
687 tem
= TREE_OPERAND (t
, 1);
688 if (negate_expr_p (tem
))
690 if (INTEGRAL_TYPE_P (type
)
691 && (TREE_CODE (tem
) != INTEGER_CST
692 || integer_onep (tem
)))
693 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
694 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
695 TREE_OPERAND (t
, 0), negate_expr (tem
));
697 /* If overflow is undefined then we have to be careful because
698 we ask whether it's ok to associate the negate with the
699 division which is not ok for example for
700 -((a - b) / c) where (-(a - b)) / c may invoke undefined
701 overflow because of negating INT_MIN. So do not use
702 negate_expr_p here but open-code the two important cases. */
703 tem
= TREE_OPERAND (t
, 0);
704 if ((INTEGRAL_TYPE_P (type
)
705 && (TREE_CODE (tem
) == NEGATE_EXPR
706 || (TREE_CODE (tem
) == INTEGER_CST
707 && may_negate_without_overflow_p (tem
))))
708 || !INTEGRAL_TYPE_P (type
))
709 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
710 negate_expr (tem
), TREE_OPERAND (t
, 1));
715 /* Convert -((double)float) into (double)(-float). */
716 if (TREE_CODE (type
) == REAL_TYPE
)
718 tem
= strip_float_extensions (t
);
719 if (tem
!= t
&& negate_expr_p (tem
))
720 return fold_convert_loc (loc
, type
, negate_expr (tem
));
725 /* Negate -f(x) as f(-x). */
726 if (negate_mathfn_p (builtin_mathfn_code (t
))
727 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
731 fndecl
= get_callee_fndecl (t
);
732 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
733 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
738 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
739 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
741 tree op1
= TREE_OPERAND (t
, 1);
742 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
744 tree ntype
= TYPE_UNSIGNED (type
)
745 ? signed_type_for (type
)
746 : unsigned_type_for (type
);
747 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
748 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
749 return fold_convert_loc (loc
, type
, temp
);
761 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
762 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
774 loc
= EXPR_LOCATION (t
);
775 type
= TREE_TYPE (t
);
778 tem
= fold_negate_expr (loc
, t
);
780 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
781 return fold_convert_loc (loc
, type
, tem
);
784 /* Split a tree IN into a constant, literal and variable parts that could be
785 combined with CODE to make IN. "constant" means an expression with
786 TREE_CONSTANT but that isn't an actual constant. CODE must be a
787 commutative arithmetic operation. Store the constant part into *CONP,
788 the literal in *LITP and return the variable part. If a part isn't
789 present, set it to null. If the tree does not decompose in this way,
790 return the entire tree as the variable part and the other parts as null.
792 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
793 case, we negate an operand that was subtracted. Except if it is a
794 literal for which we use *MINUS_LITP instead.
796 If NEGATE_P is true, we are negating all of IN, again except a literal
797 for which we use *MINUS_LITP instead.
799 If IN is itself a literal or constant, return it as appropriate.
801 Note that we do not guarantee that any of the three values will be the
802 same type as IN, but they will have the same signedness and mode. */
805 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
806 tree
*minus_litp
, int negate_p
)
814 /* Strip any conversions that don't change the machine mode or signedness. */
815 STRIP_SIGN_NOPS (in
);
817 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
818 || TREE_CODE (in
) == FIXED_CST
)
820 else if (TREE_CODE (in
) == code
821 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
822 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
823 /* We can associate addition and subtraction together (even
824 though the C standard doesn't say so) for integers because
825 the value is not affected. For reals, the value might be
826 affected, so we can't. */
827 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
828 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
830 tree op0
= TREE_OPERAND (in
, 0);
831 tree op1
= TREE_OPERAND (in
, 1);
832 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
833 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
835 /* First see if either of the operands is a literal, then a constant. */
836 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
837 || TREE_CODE (op0
) == FIXED_CST
)
838 *litp
= op0
, op0
= 0;
839 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
840 || TREE_CODE (op1
) == FIXED_CST
)
841 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
843 if (op0
!= 0 && TREE_CONSTANT (op0
))
844 *conp
= op0
, op0
= 0;
845 else if (op1
!= 0 && TREE_CONSTANT (op1
))
846 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
848 /* If we haven't dealt with either operand, this is not a case we can
849 decompose. Otherwise, VAR is either of the ones remaining, if any. */
850 if (op0
!= 0 && op1
!= 0)
855 var
= op1
, neg_var_p
= neg1_p
;
857 /* Now do any needed negations. */
859 *minus_litp
= *litp
, *litp
= 0;
861 *conp
= negate_expr (*conp
);
863 var
= negate_expr (var
);
865 else if (TREE_CODE (in
) == BIT_NOT_EXPR
866 && code
== PLUS_EXPR
)
868 /* -X - 1 is folded to ~X, undo that here. */
869 *minus_litp
= build_one_cst (TREE_TYPE (in
));
870 var
= negate_expr (TREE_OPERAND (in
, 0));
872 else if (TREE_CONSTANT (in
))
880 *minus_litp
= *litp
, *litp
= 0;
881 else if (*minus_litp
)
882 *litp
= *minus_litp
, *minus_litp
= 0;
883 *conp
= negate_expr (*conp
);
884 var
= negate_expr (var
);
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
896 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
903 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
904 try to fold this since we will have infinite recursion. But do
905 deal with any NEGATE_EXPRs. */
906 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
907 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
909 if (code
== PLUS_EXPR
)
911 if (TREE_CODE (t1
) == NEGATE_EXPR
)
912 return build2_loc (loc
, MINUS_EXPR
, type
,
913 fold_convert_loc (loc
, type
, t2
),
914 fold_convert_loc (loc
, type
,
915 TREE_OPERAND (t1
, 0)));
916 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
917 return build2_loc (loc
, MINUS_EXPR
, type
,
918 fold_convert_loc (loc
, type
, t1
),
919 fold_convert_loc (loc
, type
,
920 TREE_OPERAND (t2
, 0)));
921 else if (integer_zerop (t2
))
922 return fold_convert_loc (loc
, type
, t1
);
924 else if (code
== MINUS_EXPR
)
926 if (integer_zerop (t2
))
927 return fold_convert_loc (loc
, type
, t1
);
930 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
931 fold_convert_loc (loc
, type
, t2
));
934 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
935 fold_convert_loc (loc
, type
, t2
));
938 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
939 for use in int_const_binop, size_binop and size_diffop. */
942 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
944 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
946 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
961 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
962 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
963 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
967 /* Combine two integer constants ARG1 and ARG2 under operation CODE
968 to produce a new constant. Return NULL_TREE if we don't know how
969 to evaluate CODE at compile-time. */
972 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
977 tree type
= TREE_TYPE (arg1
);
978 signop sign
= TYPE_SIGN (type
);
979 bool overflow
= false;
981 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
982 TYPE_SIGN (TREE_TYPE (parg2
)));
987 res
= wi::bit_or (arg1
, arg2
);
991 res
= wi::bit_xor (arg1
, arg2
);
995 res
= wi::bit_and (arg1
, arg2
);
1000 if (wi::neg_p (arg2
))
1003 if (code
== RSHIFT_EXPR
)
1009 if (code
== RSHIFT_EXPR
)
1010 /* It's unclear from the C standard whether shifts can overflow.
1011 The following code ignores overflow; perhaps a C standard
1012 interpretation ruling is needed. */
1013 res
= wi::rshift (arg1
, arg2
, sign
);
1015 res
= wi::lshift (arg1
, arg2
);
1020 if (wi::neg_p (arg2
))
1023 if (code
== RROTATE_EXPR
)
1024 code
= LROTATE_EXPR
;
1026 code
= RROTATE_EXPR
;
1029 if (code
== RROTATE_EXPR
)
1030 res
= wi::rrotate (arg1
, arg2
);
1032 res
= wi::lrotate (arg1
, arg2
);
1036 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1040 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1044 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1047 case MULT_HIGHPART_EXPR
:
1048 res
= wi::mul_high (arg1
, arg2
, sign
);
1051 case TRUNC_DIV_EXPR
:
1052 case EXACT_DIV_EXPR
:
1055 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1058 case FLOOR_DIV_EXPR
:
1061 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1067 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1070 case ROUND_DIV_EXPR
:
1073 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1076 case TRUNC_MOD_EXPR
:
1079 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1082 case FLOOR_MOD_EXPR
:
1085 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1091 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1094 case ROUND_MOD_EXPR
:
1097 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1101 res
= wi::min (arg1
, arg2
, sign
);
1105 res
= wi::max (arg1
, arg2
, sign
);
1112 t
= force_fit_type (type
, res
, overflowable
,
1113 (((sign
== SIGNED
|| overflowable
== -1)
1115 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1121 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1123 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1126 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1127 constant. We assume ARG1 and ARG2 have the same data type, or at least
1128 are the same kind of constant and the same machine mode. Return zero if
1129 combining the constants is not allowed in the current operating mode. */
1132 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1134 /* Sanity check for the recursive cases. */
1141 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1143 if (code
== POINTER_PLUS_EXPR
)
1144 return int_const_binop (PLUS_EXPR
,
1145 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1147 return int_const_binop (code
, arg1
, arg2
);
1150 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1155 REAL_VALUE_TYPE value
;
1156 REAL_VALUE_TYPE result
;
1160 /* The following codes are handled by real_arithmetic. */
1175 d1
= TREE_REAL_CST (arg1
);
1176 d2
= TREE_REAL_CST (arg2
);
1178 type
= TREE_TYPE (arg1
);
1179 mode
= TYPE_MODE (type
);
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode
)
1184 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code
== RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2
, dconst0
)
1191 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1
))
1198 else if (REAL_VALUE_ISNAN (d2
))
1201 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1202 real_convert (&result
, mode
, &value
);
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode
)
1208 && REAL_VALUE_ISINF (result
)
1209 && !REAL_VALUE_ISINF (d1
)
1210 && !REAL_VALUE_ISINF (d2
))
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1219 && (inexact
|| !real_identical (&result
, &value
)))
1222 t
= build_real (type
, result
);
1224 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1228 if (TREE_CODE (arg1
) == FIXED_CST
)
1230 FIXED_VALUE_TYPE f1
;
1231 FIXED_VALUE_TYPE f2
;
1232 FIXED_VALUE_TYPE result
;
1237 /* The following codes are handled by fixed_arithmetic. */
1243 case TRUNC_DIV_EXPR
:
1244 if (TREE_CODE (arg2
) != FIXED_CST
)
1246 f2
= TREE_FIXED_CST (arg2
);
1252 if (TREE_CODE (arg2
) != INTEGER_CST
)
1255 f2
.data
.high
= w2
.elt (1);
1256 f2
.data
.low
= w2
.elt (0);
1265 f1
= TREE_FIXED_CST (arg1
);
1266 type
= TREE_TYPE (arg1
);
1267 sat_p
= TYPE_SATURATING (type
);
1268 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1269 t
= build_fixed (type
, result
);
1270 /* Propagate overflow flags. */
1271 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1272 TREE_OVERFLOW (t
) = 1;
1276 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1278 tree type
= TREE_TYPE (arg1
);
1279 tree r1
= TREE_REALPART (arg1
);
1280 tree i1
= TREE_IMAGPART (arg1
);
1281 tree r2
= TREE_REALPART (arg2
);
1282 tree i2
= TREE_IMAGPART (arg2
);
1289 real
= const_binop (code
, r1
, r2
);
1290 imag
= const_binop (code
, i1
, i2
);
1294 if (COMPLEX_FLOAT_TYPE_P (type
))
1295 return do_mpc_arg2 (arg1
, arg2
, type
,
1296 /* do_nonfinite= */ folding_initializer
,
1299 real
= const_binop (MINUS_EXPR
,
1300 const_binop (MULT_EXPR
, r1
, r2
),
1301 const_binop (MULT_EXPR
, i1
, i2
));
1302 imag
= const_binop (PLUS_EXPR
,
1303 const_binop (MULT_EXPR
, r1
, i2
),
1304 const_binop (MULT_EXPR
, i1
, r2
));
1308 if (COMPLEX_FLOAT_TYPE_P (type
))
1309 return do_mpc_arg2 (arg1
, arg2
, type
,
1310 /* do_nonfinite= */ folding_initializer
,
1313 case TRUNC_DIV_EXPR
:
1315 case FLOOR_DIV_EXPR
:
1316 case ROUND_DIV_EXPR
:
1317 if (flag_complex_method
== 0)
1319 /* Keep this algorithm in sync with
1320 tree-complex.c:expand_complex_div_straight().
1322 Expand complex division to scalars, straightforward algorithm.
1323 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1327 = const_binop (PLUS_EXPR
,
1328 const_binop (MULT_EXPR
, r2
, r2
),
1329 const_binop (MULT_EXPR
, i2
, i2
));
1331 = const_binop (PLUS_EXPR
,
1332 const_binop (MULT_EXPR
, r1
, r2
),
1333 const_binop (MULT_EXPR
, i1
, i2
));
1335 = const_binop (MINUS_EXPR
,
1336 const_binop (MULT_EXPR
, i1
, r2
),
1337 const_binop (MULT_EXPR
, r1
, i2
));
1339 real
= const_binop (code
, t1
, magsquared
);
1340 imag
= const_binop (code
, t2
, magsquared
);
1344 /* Keep this algorithm in sync with
1345 tree-complex.c:expand_complex_div_wide().
1347 Expand complex division to scalars, modified algorithm to minimize
1348 overflow with wide input ranges. */
1349 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1350 fold_abs_const (r2
, TREE_TYPE (type
)),
1351 fold_abs_const (i2
, TREE_TYPE (type
)));
1353 if (integer_nonzerop (compare
))
1355 /* In the TRUE branch, we compute
1357 div = (br * ratio) + bi;
1358 tr = (ar * ratio) + ai;
1359 ti = (ai * ratio) - ar;
1362 tree ratio
= const_binop (code
, r2
, i2
);
1363 tree div
= const_binop (PLUS_EXPR
, i2
,
1364 const_binop (MULT_EXPR
, r2
, ratio
));
1365 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1366 real
= const_binop (PLUS_EXPR
, real
, i1
);
1367 real
= const_binop (code
, real
, div
);
1369 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1370 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1371 imag
= const_binop (code
, imag
, div
);
1375 /* In the FALSE branch, we compute
1377 divisor = (d * ratio) + c;
1378 tr = (b * ratio) + a;
1379 ti = b - (a * ratio);
1382 tree ratio
= const_binop (code
, i2
, r2
);
1383 tree div
= const_binop (PLUS_EXPR
, r2
,
1384 const_binop (MULT_EXPR
, i2
, ratio
));
1386 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1387 real
= const_binop (PLUS_EXPR
, real
, r1
);
1388 real
= const_binop (code
, real
, div
);
1390 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1391 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1392 imag
= const_binop (code
, imag
, div
);
1402 return build_complex (type
, real
, imag
);
1405 if (TREE_CODE (arg1
) == VECTOR_CST
1406 && TREE_CODE (arg2
) == VECTOR_CST
)
1408 tree type
= TREE_TYPE (arg1
);
1409 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1410 tree
*elts
= XALLOCAVEC (tree
, count
);
1412 for (i
= 0; i
< count
; i
++)
1414 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1415 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1417 elts
[i
] = const_binop (code
, elem1
, elem2
);
1419 /* It is possible that const_binop cannot handle the given
1420 code and return NULL_TREE */
1421 if (elts
[i
] == NULL_TREE
)
1425 return build_vector (type
, elts
);
1428 /* Shifts allow a scalar offset for a vector. */
1429 if (TREE_CODE (arg1
) == VECTOR_CST
1430 && TREE_CODE (arg2
) == INTEGER_CST
)
1432 tree type
= TREE_TYPE (arg1
);
1433 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1434 tree
*elts
= XALLOCAVEC (tree
, count
);
1436 for (i
= 0; i
< count
; i
++)
1438 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1440 elts
[i
] = const_binop (code
, elem1
, arg2
);
1442 /* It is possible that const_binop cannot handle the given
1443 code and return NULL_TREE. */
1444 if (elts
[i
] == NULL_TREE
)
1448 return build_vector (type
, elts
);
1453 /* Overload that adds a TYPE parameter to be able to dispatch
1454 to fold_relational_const. */
1457 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1459 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1460 return fold_relational_const (code
, type
, arg1
, arg2
);
1462 /* ??? Until we make the const_binop worker take the type of the
1463 result as argument put those cases that need it here. */
1467 if ((TREE_CODE (arg1
) == REAL_CST
1468 && TREE_CODE (arg2
) == REAL_CST
)
1469 || (TREE_CODE (arg1
) == INTEGER_CST
1470 && TREE_CODE (arg2
) == INTEGER_CST
))
1471 return build_complex (type
, arg1
, arg2
);
1474 case VEC_PACK_TRUNC_EXPR
:
1475 case VEC_PACK_FIX_TRUNC_EXPR
:
1477 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1480 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1481 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1482 if (TREE_CODE (arg1
) != VECTOR_CST
1483 || TREE_CODE (arg2
) != VECTOR_CST
)
1486 elts
= XALLOCAVEC (tree
, nelts
);
1487 if (!vec_cst_ctor_to_array (arg1
, elts
)
1488 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1491 for (i
= 0; i
< nelts
; i
++)
1493 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1494 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1495 TREE_TYPE (type
), elts
[i
]);
1496 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1500 return build_vector (type
, elts
);
1503 case VEC_WIDEN_MULT_LO_EXPR
:
1504 case VEC_WIDEN_MULT_HI_EXPR
:
1505 case VEC_WIDEN_MULT_EVEN_EXPR
:
1506 case VEC_WIDEN_MULT_ODD_EXPR
:
1508 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1509 unsigned int out
, ofs
, scale
;
1512 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1513 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1514 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1517 elts
= XALLOCAVEC (tree
, nelts
* 4);
1518 if (!vec_cst_ctor_to_array (arg1
, elts
)
1519 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1522 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1523 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1524 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1525 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1526 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1528 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1531 for (out
= 0; out
< nelts
; out
++)
1533 unsigned int in1
= (out
<< scale
) + ofs
;
1534 unsigned int in2
= in1
+ nelts
* 2;
1537 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1538 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1540 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1542 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1543 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1547 return build_vector (type
, elts
);
1553 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1556 /* Make sure type and arg0 have the same saturating flag. */
1557 gcc_checking_assert (TYPE_SATURATING (type
)
1558 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1560 return const_binop (code
, arg1
, arg2
);
1563 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1564 Return zero if computing the constants is not possible. */
1567 const_unop (enum tree_code code
, tree type
, tree arg0
)
1573 case FIX_TRUNC_EXPR
:
1574 case FIXED_CONVERT_EXPR
:
1575 return fold_convert_const (code
, type
, arg0
);
1577 case ADDR_SPACE_CONVERT_EXPR
:
1578 if (integer_zerop (arg0
))
1579 return fold_convert_const (code
, type
, arg0
);
1582 case VIEW_CONVERT_EXPR
:
1583 return fold_view_convert_expr (type
, arg0
);
1587 /* Can't call fold_negate_const directly here as that doesn't
1588 handle all cases and we might not be able to negate some
1590 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1591 if (tem
&& CONSTANT_CLASS_P (tem
))
1597 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1598 return fold_abs_const (arg0
, type
);
1602 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1604 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1606 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1611 if (TREE_CODE (arg0
) == INTEGER_CST
)
1612 return fold_not_const (arg0
, type
);
1613 /* Perform BIT_NOT_EXPR on each element individually. */
1614 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1618 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1620 elements
= XALLOCAVEC (tree
, count
);
1621 for (i
= 0; i
< count
; i
++)
1623 elem
= VECTOR_CST_ELT (arg0
, i
);
1624 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1625 if (elem
== NULL_TREE
)
1630 return build_vector (type
, elements
);
1634 case TRUTH_NOT_EXPR
:
1635 if (TREE_CODE (arg0
) == INTEGER_CST
)
1636 return constant_boolean_node (integer_zerop (arg0
), type
);
1640 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1641 return fold_convert (type
, TREE_REALPART (arg0
));
1645 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1646 return fold_convert (type
, TREE_IMAGPART (arg0
));
1649 case VEC_UNPACK_LO_EXPR
:
1650 case VEC_UNPACK_HI_EXPR
:
1651 case VEC_UNPACK_FLOAT_LO_EXPR
:
1652 case VEC_UNPACK_FLOAT_HI_EXPR
:
1654 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1656 enum tree_code subcode
;
1658 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1659 if (TREE_CODE (arg0
) != VECTOR_CST
)
1662 elts
= XALLOCAVEC (tree
, nelts
* 2);
1663 if (!vec_cst_ctor_to_array (arg0
, elts
))
1666 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1667 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1670 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1673 subcode
= FLOAT_EXPR
;
1675 for (i
= 0; i
< nelts
; i
++)
1677 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1678 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1682 return build_vector (type
, elts
);
1685 case REDUC_MIN_EXPR
:
1686 case REDUC_MAX_EXPR
:
1687 case REDUC_PLUS_EXPR
:
1689 unsigned int nelts
, i
;
1691 enum tree_code subcode
;
1693 if (TREE_CODE (arg0
) != VECTOR_CST
)
1695 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1697 elts
= XALLOCAVEC (tree
, nelts
);
1698 if (!vec_cst_ctor_to_array (arg0
, elts
))
1703 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1704 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1705 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1706 default: gcc_unreachable ();
1709 for (i
= 1; i
< nelts
; i
++)
1711 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1712 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1726 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1727 indicates which particular sizetype to create. */
1730 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1732 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1735 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1736 is a tree code. The type of the result is taken from the operands.
1737 Both must be equivalent integer types, ala int_binop_types_match_p.
1738 If the operands are constant, so is the result. */
1741 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1743 tree type
= TREE_TYPE (arg0
);
1745 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1746 return error_mark_node
;
1748 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1751 /* Handle the special case of two integer constants faster. */
1752 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1754 /* And some specific cases even faster than that. */
1755 if (code
== PLUS_EXPR
)
1757 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1759 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1762 else if (code
== MINUS_EXPR
)
1764 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1767 else if (code
== MULT_EXPR
)
1769 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1773 /* Handle general case of two integer constants. For sizetype
1774 constant calculations we always want to know about overflow,
1775 even in the unsigned case. */
1776 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1779 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1782 /* Given two values, either both of sizetype or both of bitsizetype,
1783 compute the difference between the two values. Return the value
1784 in signed type corresponding to the type of the operands. */
1787 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1789 tree type
= TREE_TYPE (arg0
);
1792 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1795 /* If the type is already signed, just do the simple thing. */
1796 if (!TYPE_UNSIGNED (type
))
1797 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1799 if (type
== sizetype
)
1801 else if (type
== bitsizetype
)
1802 ctype
= sbitsizetype
;
1804 ctype
= signed_type_for (type
);
1806 /* If either operand is not a constant, do the conversions to the signed
1807 type and subtract. The hardware will do the right thing with any
1808 overflow in the subtraction. */
1809 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1810 return size_binop_loc (loc
, MINUS_EXPR
,
1811 fold_convert_loc (loc
, ctype
, arg0
),
1812 fold_convert_loc (loc
, ctype
, arg1
));
1814 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1815 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1816 overflow) and negate (which can't either). Special-case a result
1817 of zero while we're here. */
1818 if (tree_int_cst_equal (arg0
, arg1
))
1819 return build_int_cst (ctype
, 0);
1820 else if (tree_int_cst_lt (arg1
, arg0
))
1821 return fold_convert_loc (loc
, ctype
,
1822 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1824 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1825 fold_convert_loc (loc
, ctype
,
1826 size_binop_loc (loc
,
1831 /* A subroutine of fold_convert_const handling conversions of an
1832 INTEGER_CST to another integer type. */
1835 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1837 /* Given an integer constant, make new constant with new type,
1838 appropriately sign-extended or truncated. Use widest_int
1839 so that any extension is done according ARG1's type. */
1840 return force_fit_type (type
, wi::to_widest (arg1
),
1841 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1842 TREE_OVERFLOW (arg1
));
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to an integer type. */
1849 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1851 bool overflow
= false;
1854 /* The following code implements the floating point to integer
1855 conversion rules required by the Java Language Specification,
1856 that IEEE NaNs are mapped to zero and values that overflow
1857 the target precision saturate, i.e. values greater than
1858 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1859 are mapped to INT_MIN. These semantics are allowed by the
1860 C and C++ standards that simply state that the behavior of
1861 FP-to-integer conversion is unspecified upon overflow. */
1865 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1869 case FIX_TRUNC_EXPR
:
1870 real_trunc (&r
, VOIDmode
, &x
);
1877 /* If R is NaN, return zero and show we have an overflow. */
1878 if (REAL_VALUE_ISNAN (r
))
1881 val
= wi::zero (TYPE_PRECISION (type
));
1884 /* See if R is less than the lower bound or greater than the
1889 tree lt
= TYPE_MIN_VALUE (type
);
1890 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1891 if (REAL_VALUES_LESS (r
, l
))
1900 tree ut
= TYPE_MAX_VALUE (type
);
1903 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1904 if (REAL_VALUES_LESS (u
, r
))
1913 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1915 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1919 /* A subroutine of fold_convert_const handling conversions of a
1920 FIXED_CST to an integer type. */
1923 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1926 double_int temp
, temp_trunc
;
1929 /* Right shift FIXED_CST to temp by fbit. */
1930 temp
= TREE_FIXED_CST (arg1
).data
;
1931 mode
= TREE_FIXED_CST (arg1
).mode
;
1932 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1934 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1935 HOST_BITS_PER_DOUBLE_INT
,
1936 SIGNED_FIXED_POINT_MODE_P (mode
));
1938 /* Left shift temp to temp_trunc by fbit. */
1939 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1940 HOST_BITS_PER_DOUBLE_INT
,
1941 SIGNED_FIXED_POINT_MODE_P (mode
));
1945 temp
= double_int_zero
;
1946 temp_trunc
= double_int_zero
;
1949 /* If FIXED_CST is negative, we need to round the value toward 0.
1950 By checking if the fractional bits are not zero to add 1 to temp. */
1951 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1952 && temp_trunc
.is_negative ()
1953 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1954 temp
+= double_int_one
;
1956 /* Given a fixed-point constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t
= force_fit_type (type
, temp
, -1,
1959 (temp
.is_negative ()
1960 && (TYPE_UNSIGNED (type
)
1961 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1962 | TREE_OVERFLOW (arg1
));
1967 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1968 to another floating point type. */
1971 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1973 REAL_VALUE_TYPE value
;
1976 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1977 t
= build_real (type
, value
);
1979 /* If converting an infinity or NAN to a representation that doesn't
1980 have one, set the overflow bit so that we can produce some kind of
1981 error message at the appropriate point if necessary. It's not the
1982 most user-friendly message, but it's better than nothing. */
1983 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1984 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1985 TREE_OVERFLOW (t
) = 1;
1986 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1987 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1988 TREE_OVERFLOW (t
) = 1;
1989 /* Regular overflow, conversion produced an infinity in a mode that
1990 can't represent them. */
1991 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1992 && REAL_VALUE_ISINF (value
)
1993 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1994 TREE_OVERFLOW (t
) = 1;
1996 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2000 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2001 to a floating point type. */
2004 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2006 REAL_VALUE_TYPE value
;
2009 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2010 t
= build_real (type
, value
);
2012 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2016 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2017 to another fixed-point type. */
2020 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2022 FIXED_VALUE_TYPE value
;
2026 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2027 TYPE_SATURATING (type
));
2028 t
= build_fixed (type
, value
);
2030 /* Propagate overflow flags. */
2031 if (overflow_p
| TREE_OVERFLOW (arg1
))
2032 TREE_OVERFLOW (t
) = 1;
2036 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2037 to a fixed-point type. */
2040 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2042 FIXED_VALUE_TYPE value
;
2047 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2049 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2050 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2051 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2053 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2055 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2056 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2057 TYPE_SATURATING (type
));
2058 t
= build_fixed (type
, value
);
2060 /* Propagate overflow flags. */
2061 if (overflow_p
| TREE_OVERFLOW (arg1
))
2062 TREE_OVERFLOW (t
) = 1;
2066 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to a fixed-point type. */
2070 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2072 FIXED_VALUE_TYPE value
;
2076 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2077 &TREE_REAL_CST (arg1
),
2078 TYPE_SATURATING (type
));
2079 t
= build_fixed (type
, value
);
2081 /* Propagate overflow flags. */
2082 if (overflow_p
| TREE_OVERFLOW (arg1
))
2083 TREE_OVERFLOW (t
) = 1;
2087 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2088 type TYPE. If no simplification can be done return NULL_TREE. */
2091 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2093 if (TREE_TYPE (arg1
) == type
)
2096 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2097 || TREE_CODE (type
) == OFFSET_TYPE
)
2099 if (TREE_CODE (arg1
) == INTEGER_CST
)
2100 return fold_convert_const_int_from_int (type
, arg1
);
2101 else if (TREE_CODE (arg1
) == REAL_CST
)
2102 return fold_convert_const_int_from_real (code
, type
, arg1
);
2103 else if (TREE_CODE (arg1
) == FIXED_CST
)
2104 return fold_convert_const_int_from_fixed (type
, arg1
);
2106 else if (TREE_CODE (type
) == REAL_TYPE
)
2108 if (TREE_CODE (arg1
) == INTEGER_CST
)
2109 return build_real_from_int_cst (type
, arg1
);
2110 else if (TREE_CODE (arg1
) == REAL_CST
)
2111 return fold_convert_const_real_from_real (type
, arg1
);
2112 else if (TREE_CODE (arg1
) == FIXED_CST
)
2113 return fold_convert_const_real_from_fixed (type
, arg1
);
2115 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2117 if (TREE_CODE (arg1
) == FIXED_CST
)
2118 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2119 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2120 return fold_convert_const_fixed_from_int (type
, arg1
);
2121 else if (TREE_CODE (arg1
) == REAL_CST
)
2122 return fold_convert_const_fixed_from_real (type
, arg1
);
2127 /* Construct a vector of zero elements of vector type TYPE. */
2130 build_zero_vector (tree type
)
2134 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2135 return build_vector_from_val (type
, t
);
2138 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2141 fold_convertible_p (const_tree type
, const_tree arg
)
2143 tree orig
= TREE_TYPE (arg
);
2148 if (TREE_CODE (arg
) == ERROR_MARK
2149 || TREE_CODE (type
) == ERROR_MARK
2150 || TREE_CODE (orig
) == ERROR_MARK
)
2153 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2156 switch (TREE_CODE (type
))
2158 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2159 case POINTER_TYPE
: case REFERENCE_TYPE
:
2161 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2162 || TREE_CODE (orig
) == OFFSET_TYPE
)
2164 return (TREE_CODE (orig
) == VECTOR_TYPE
2165 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2168 case FIXED_POINT_TYPE
:
2172 return TREE_CODE (type
) == TREE_CODE (orig
);
2179 /* Convert expression ARG to type TYPE. Used by the middle-end for
2180 simple conversions in preference to calling the front-end's convert. */
2183 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2185 tree orig
= TREE_TYPE (arg
);
2191 if (TREE_CODE (arg
) == ERROR_MARK
2192 || TREE_CODE (type
) == ERROR_MARK
2193 || TREE_CODE (orig
) == ERROR_MARK
)
2194 return error_mark_node
;
2196 switch (TREE_CODE (type
))
2199 case REFERENCE_TYPE
:
2200 /* Handle conversions between pointers to different address spaces. */
2201 if (POINTER_TYPE_P (orig
)
2202 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2203 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2204 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2207 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2209 if (TREE_CODE (arg
) == INTEGER_CST
)
2211 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2212 if (tem
!= NULL_TREE
)
2215 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2216 || TREE_CODE (orig
) == OFFSET_TYPE
)
2217 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2218 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2219 return fold_convert_loc (loc
, type
,
2220 fold_build1_loc (loc
, REALPART_EXPR
,
2221 TREE_TYPE (orig
), arg
));
2222 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2223 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2224 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2227 if (TREE_CODE (arg
) == INTEGER_CST
)
2229 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2230 if (tem
!= NULL_TREE
)
2233 else if (TREE_CODE (arg
) == REAL_CST
)
2235 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2236 if (tem
!= NULL_TREE
)
2239 else if (TREE_CODE (arg
) == FIXED_CST
)
2241 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2242 if (tem
!= NULL_TREE
)
2246 switch (TREE_CODE (orig
))
2249 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2250 case POINTER_TYPE
: case REFERENCE_TYPE
:
2251 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2254 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2256 case FIXED_POINT_TYPE
:
2257 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2260 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2261 return fold_convert_loc (loc
, type
, tem
);
2267 case FIXED_POINT_TYPE
:
2268 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2269 || TREE_CODE (arg
) == REAL_CST
)
2271 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2272 if (tem
!= NULL_TREE
)
2273 goto fold_convert_exit
;
2276 switch (TREE_CODE (orig
))
2278 case FIXED_POINT_TYPE
:
2283 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2286 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2287 return fold_convert_loc (loc
, type
, tem
);
2294 switch (TREE_CODE (orig
))
2297 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2298 case POINTER_TYPE
: case REFERENCE_TYPE
:
2300 case FIXED_POINT_TYPE
:
2301 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2302 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2303 fold_convert_loc (loc
, TREE_TYPE (type
),
2304 integer_zero_node
));
2309 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2311 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2312 TREE_OPERAND (arg
, 0));
2313 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2314 TREE_OPERAND (arg
, 1));
2315 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2318 arg
= save_expr (arg
);
2319 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2320 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2321 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2322 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2323 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2331 if (integer_zerop (arg
))
2332 return build_zero_vector (type
);
2333 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2334 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2335 || TREE_CODE (orig
) == VECTOR_TYPE
);
2336 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2339 tem
= fold_ignored_result (arg
);
2340 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2343 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2344 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2348 protected_set_expr_location_unshare (tem
, loc
);
2352 /* Return false if expr can be assumed not to be an lvalue, true
2356 maybe_lvalue_p (const_tree x
)
2358 /* We only need to wrap lvalue tree codes. */
2359 switch (TREE_CODE (x
))
2372 case ARRAY_RANGE_REF
:
2378 case PREINCREMENT_EXPR
:
2379 case PREDECREMENT_EXPR
:
2381 case TRY_CATCH_EXPR
:
2382 case WITH_CLEANUP_EXPR
:
2391 /* Assume the worst for front-end tree codes. */
2392 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2400 /* Return an expr equal to X but certainly not valid as an lvalue. */
2403 non_lvalue_loc (location_t loc
, tree x
)
2405 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2410 if (! maybe_lvalue_p (x
))
2412 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2415 /* When pedantic, return an expr equal to X but certainly not valid as a
2416 pedantic lvalue. Otherwise, return X. */
2419 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2421 return protected_set_expr_location_unshare (x
, loc
);
2424 /* Given a tree comparison code, return the code that is the logical inverse.
2425 It is generally not safe to do this for floating-point comparisons, except
2426 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2427 ERROR_MARK in this case. */
2430 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2432 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2433 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2443 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2445 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2447 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2449 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2463 return UNORDERED_EXPR
;
2464 case UNORDERED_EXPR
:
2465 return ORDERED_EXPR
;
2471 /* Similar, but return the comparison that results if the operands are
2472 swapped. This is safe for floating-point. */
2475 swap_tree_comparison (enum tree_code code
)
2482 case UNORDERED_EXPR
:
2508 /* Convert a comparison tree code from an enum tree_code representation
2509 into a compcode bit-based encoding. This function is the inverse of
2510 compcode_to_comparison. */
2512 static enum comparison_code
2513 comparison_to_compcode (enum tree_code code
)
2530 return COMPCODE_ORD
;
2531 case UNORDERED_EXPR
:
2532 return COMPCODE_UNORD
;
2534 return COMPCODE_UNLT
;
2536 return COMPCODE_UNEQ
;
2538 return COMPCODE_UNLE
;
2540 return COMPCODE_UNGT
;
2542 return COMPCODE_LTGT
;
2544 return COMPCODE_UNGE
;
2550 /* Convert a compcode bit-based encoding of a comparison operator back
2551 to GCC's enum tree_code representation. This function is the
2552 inverse of comparison_to_compcode. */
2554 static enum tree_code
2555 compcode_to_comparison (enum comparison_code code
)
2572 return ORDERED_EXPR
;
2573 case COMPCODE_UNORD
:
2574 return UNORDERED_EXPR
;
2592 /* Return a tree for the comparison which is the combination of
2593 doing the AND or OR (depending on CODE) of the two operations LCODE
2594 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2595 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2596 if this makes the transformation invalid. */
2599 combine_comparisons (location_t loc
,
2600 enum tree_code code
, enum tree_code lcode
,
2601 enum tree_code rcode
, tree truth_type
,
2602 tree ll_arg
, tree lr_arg
)
2604 bool honor_nans
= HONOR_NANS (ll_arg
);
2605 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2606 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2611 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2612 compcode
= lcompcode
& rcompcode
;
2615 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2616 compcode
= lcompcode
| rcompcode
;
2625 /* Eliminate unordered comparisons, as well as LTGT and ORD
2626 which are not used unless the mode has NaNs. */
2627 compcode
&= ~COMPCODE_UNORD
;
2628 if (compcode
== COMPCODE_LTGT
)
2629 compcode
= COMPCODE_NE
;
2630 else if (compcode
== COMPCODE_ORD
)
2631 compcode
= COMPCODE_TRUE
;
2633 else if (flag_trapping_math
)
2635 /* Check that the original operation and the optimized ones will trap
2636 under the same condition. */
2637 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2638 && (lcompcode
!= COMPCODE_EQ
)
2639 && (lcompcode
!= COMPCODE_ORD
);
2640 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2641 && (rcompcode
!= COMPCODE_EQ
)
2642 && (rcompcode
!= COMPCODE_ORD
);
2643 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2644 && (compcode
!= COMPCODE_EQ
)
2645 && (compcode
!= COMPCODE_ORD
);
2647 /* In a short-circuited boolean expression the LHS might be
2648 such that the RHS, if evaluated, will never trap. For
2649 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2650 if neither x nor y is NaN. (This is a mixed blessing: for
2651 example, the expression above will never trap, hence
2652 optimizing it to x < y would be invalid). */
2653 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2654 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2657 /* If the comparison was short-circuited, and only the RHS
2658 trapped, we may now generate a spurious trap. */
2660 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2663 /* If we changed the conditions that cause a trap, we lose. */
2664 if ((ltrap
|| rtrap
) != trap
)
2668 if (compcode
== COMPCODE_TRUE
)
2669 return constant_boolean_node (true, truth_type
);
2670 else if (compcode
== COMPCODE_FALSE
)
2671 return constant_boolean_node (false, truth_type
);
2674 enum tree_code tcode
;
2676 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2677 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2681 /* Return nonzero if two operands (typically of the same tree node)
2682 are necessarily equal. If either argument has side-effects this
2683 function returns zero. FLAGS modifies behavior as follows:
2685 If OEP_ONLY_CONST is set, only return nonzero for constants.
2686 This function tests whether the operands are indistinguishable;
2687 it does not test whether they are equal using C's == operation.
2688 The distinction is important for IEEE floating point, because
2689 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2690 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2692 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2693 even though it may hold multiple values during a function.
2694 This is because a GCC tree node guarantees that nothing else is
2695 executed between the evaluation of its "operands" (which may often
2696 be evaluated in arbitrary order). Hence if the operands themselves
2697 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2698 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2699 unset means assuming isochronic (or instantaneous) tree equivalence.
2700 Unless comparing arbitrary expression trees, such as from different
2701 statements, this flag can usually be left unset.
2703 If OEP_PURE_SAME is set, then pure functions with identical arguments
2704 are considered the same. It is used when the caller has other ways
2705 to ensure that global memory is unchanged in between. */
2708 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2710 /* If either is ERROR_MARK, they aren't equal. */
2711 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2712 || TREE_TYPE (arg0
) == error_mark_node
2713 || TREE_TYPE (arg1
) == error_mark_node
)
2716 /* Similar, if either does not have a type (like a released SSA name),
2717 they aren't equal. */
2718 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2721 /* Check equality of integer constants before bailing out due to
2722 precision differences. */
2723 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2724 return tree_int_cst_equal (arg0
, arg1
);
2726 /* If both types don't have the same signedness, then we can't consider
2727 them equal. We must check this before the STRIP_NOPS calls
2728 because they may change the signedness of the arguments. As pointers
2729 strictly don't have a signedness, require either two pointers or
2730 two non-pointers as well. */
2731 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2732 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2735 /* We cannot consider pointers to different address space equal. */
2736 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2737 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2738 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2741 /* If both types don't have the same precision, then it is not safe
2743 if (element_precision (TREE_TYPE (arg0
))
2744 != element_precision (TREE_TYPE (arg1
)))
2750 /* In case both args are comparisons but with different comparison
2751 code, try to swap the comparison operands of one arg to produce
2752 a match and compare that variant. */
2753 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2754 && COMPARISON_CLASS_P (arg0
)
2755 && COMPARISON_CLASS_P (arg1
))
2757 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2759 if (TREE_CODE (arg0
) == swap_code
)
2760 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2761 TREE_OPERAND (arg1
, 1), flags
)
2762 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2763 TREE_OPERAND (arg1
, 0), flags
);
2766 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2767 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2768 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2771 /* This is needed for conversions and for COMPONENT_REF.
2772 Might as well play it safe and always test this. */
2773 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2774 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2775 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2778 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2779 We don't care about side effects in that case because the SAVE_EXPR
2780 takes care of that for us. In all other cases, two expressions are
2781 equal if they have no side effects. If we have two identical
2782 expressions with side effects that should be treated the same due
2783 to the only side effects being identical SAVE_EXPR's, that will
2784 be detected in the recursive calls below.
2785 If we are taking an invariant address of two identical objects
2786 they are necessarily equal as well. */
2787 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2788 && (TREE_CODE (arg0
) == SAVE_EXPR
2789 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2790 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2793 /* Next handle constant cases, those for which we can return 1 even
2794 if ONLY_CONST is set. */
2795 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2796 switch (TREE_CODE (arg0
))
2799 return tree_int_cst_equal (arg0
, arg1
);
2802 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2803 TREE_FIXED_CST (arg1
));
2806 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2807 TREE_REAL_CST (arg1
)))
2811 if (!HONOR_SIGNED_ZEROS (arg0
))
2813 /* If we do not distinguish between signed and unsigned zero,
2814 consider them equal. */
2815 if (real_zerop (arg0
) && real_zerop (arg1
))
2824 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2827 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2829 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2830 VECTOR_CST_ELT (arg1
, i
), flags
))
2837 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2839 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2843 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2844 && ! memcmp (TREE_STRING_POINTER (arg0
),
2845 TREE_STRING_POINTER (arg1
),
2846 TREE_STRING_LENGTH (arg0
)));
2849 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2850 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2851 ? OEP_CONSTANT_ADDRESS_OF
| OEP_ADDRESS_OF
: 0);
2856 if (flags
& OEP_ONLY_CONST
)
2859 /* Define macros to test an operand from arg0 and arg1 for equality and a
2860 variant that allows null and views null as being different from any
2861 non-null value. In the latter case, if either is null, the both
2862 must be; otherwise, do the normal comparison. */
2863 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2864 TREE_OPERAND (arg1, N), flags)
2866 #define OP_SAME_WITH_NULL(N) \
2867 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2868 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2870 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2873 /* Two conversions are equal only if signedness and modes match. */
2874 switch (TREE_CODE (arg0
))
2877 case FIX_TRUNC_EXPR
:
2878 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2879 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2889 case tcc_comparison
:
2891 if (OP_SAME (0) && OP_SAME (1))
2894 /* For commutative ops, allow the other order. */
2895 return (commutative_tree_code (TREE_CODE (arg0
))
2896 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2897 TREE_OPERAND (arg1
, 1), flags
)
2898 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2899 TREE_OPERAND (arg1
, 0), flags
));
2902 /* If either of the pointer (or reference) expressions we are
2903 dereferencing contain a side effect, these cannot be equal,
2904 but their addresses can be. */
2905 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2906 && (TREE_SIDE_EFFECTS (arg0
)
2907 || TREE_SIDE_EFFECTS (arg1
)))
2910 switch (TREE_CODE (arg0
))
2913 if (!(flags
& OEP_ADDRESS_OF
)
2914 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2915 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2917 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2924 case TARGET_MEM_REF
:
2926 /* Require equal access sizes, and similar pointer types.
2927 We can have incomplete types for array references of
2928 variable-sized arrays from the Fortran frontend
2929 though. Also verify the types are compatible. */
2930 if (!((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2931 || (TYPE_SIZE (TREE_TYPE (arg0
))
2932 && TYPE_SIZE (TREE_TYPE (arg1
))
2933 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2934 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2935 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2936 && ((flags
& OEP_ADDRESS_OF
)
2937 || (alias_ptr_types_compatible_p
2938 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2939 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2940 && (MR_DEPENDENCE_CLIQUE (arg0
)
2941 == MR_DEPENDENCE_CLIQUE (arg1
))
2942 && (MR_DEPENDENCE_BASE (arg0
)
2943 == MR_DEPENDENCE_BASE (arg1
))
2944 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2945 == TYPE_ALIGN (TREE_TYPE (arg1
)))))))
2947 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2948 return (OP_SAME (0) && OP_SAME (1)
2949 /* TARGET_MEM_REF require equal extra operands. */
2950 && (TREE_CODE (arg0
) != TARGET_MEM_REF
2951 || (OP_SAME_WITH_NULL (2)
2952 && OP_SAME_WITH_NULL (3)
2953 && OP_SAME_WITH_NULL (4))));
2956 case ARRAY_RANGE_REF
:
2957 /* Operands 2 and 3 may be null.
2958 Compare the array index by value if it is constant first as we
2959 may have different types but same value here. */
2962 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2963 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2964 TREE_OPERAND (arg1
, 1))
2966 && OP_SAME_WITH_NULL (2)
2967 && OP_SAME_WITH_NULL (3));
2970 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2971 may be NULL when we're called to compare MEM_EXPRs. */
2972 if (!OP_SAME_WITH_NULL (0)
2975 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2976 return OP_SAME_WITH_NULL (2);
2981 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2982 return OP_SAME (1) && OP_SAME (2);
2988 case tcc_expression
:
2989 switch (TREE_CODE (arg0
))
2992 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2993 TREE_OPERAND (arg1
, 0),
2994 flags
| OEP_ADDRESS_OF
);
2996 case TRUTH_NOT_EXPR
:
2999 case TRUTH_ANDIF_EXPR
:
3000 case TRUTH_ORIF_EXPR
:
3001 return OP_SAME (0) && OP_SAME (1);
3004 case WIDEN_MULT_PLUS_EXPR
:
3005 case WIDEN_MULT_MINUS_EXPR
:
3008 /* The multiplcation operands are commutative. */
3011 case TRUTH_AND_EXPR
:
3013 case TRUTH_XOR_EXPR
:
3014 if (OP_SAME (0) && OP_SAME (1))
3017 /* Otherwise take into account this is a commutative operation. */
3018 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3019 TREE_OPERAND (arg1
, 1), flags
)
3020 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3021 TREE_OPERAND (arg1
, 0), flags
));
3026 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3033 switch (TREE_CODE (arg0
))
3036 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3037 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3038 /* If not both CALL_EXPRs are either internal or normal function
3039 functions, then they are not equal. */
3041 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3043 /* If the CALL_EXPRs call different internal functions, then they
3045 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3050 /* If the CALL_EXPRs call different functions, then they are not
3052 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3058 unsigned int cef
= call_expr_flags (arg0
);
3059 if (flags
& OEP_PURE_SAME
)
3060 cef
&= ECF_CONST
| ECF_PURE
;
3067 /* Now see if all the arguments are the same. */
3069 const_call_expr_arg_iterator iter0
, iter1
;
3071 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3072 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3074 a0
= next_const_call_expr_arg (&iter0
),
3075 a1
= next_const_call_expr_arg (&iter1
))
3076 if (! operand_equal_p (a0
, a1
, flags
))
3079 /* If we get here and both argument lists are exhausted
3080 then the CALL_EXPRs are equal. */
3081 return ! (a0
|| a1
);
3087 case tcc_declaration
:
3088 /* Consider __builtin_sqrt equal to sqrt. */
3089 return (TREE_CODE (arg0
) == FUNCTION_DECL
3090 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3091 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3092 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3099 #undef OP_SAME_WITH_NULL
3102 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3103 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3105 When in doubt, return 0. */
3108 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3110 int unsignedp1
, unsignedpo
;
3111 tree primarg0
, primarg1
, primother
;
3112 unsigned int correct_width
;
3114 if (operand_equal_p (arg0
, arg1
, 0))
3117 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3118 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3121 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3122 and see if the inner values are the same. This removes any
3123 signedness comparison, which doesn't matter here. */
3124 primarg0
= arg0
, primarg1
= arg1
;
3125 STRIP_NOPS (primarg0
);
3126 STRIP_NOPS (primarg1
);
3127 if (operand_equal_p (primarg0
, primarg1
, 0))
3130 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3131 actual comparison operand, ARG0.
3133 First throw away any conversions to wider types
3134 already present in the operands. */
3136 primarg1
= get_narrower (arg1
, &unsignedp1
);
3137 primother
= get_narrower (other
, &unsignedpo
);
3139 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3140 if (unsignedp1
== unsignedpo
3141 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3142 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3144 tree type
= TREE_TYPE (arg0
);
3146 /* Make sure shorter operand is extended the right way
3147 to match the longer operand. */
3148 primarg1
= fold_convert (signed_or_unsigned_type_for
3149 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3151 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3158 /* See if ARG is an expression that is either a comparison or is performing
3159 arithmetic on comparisons. The comparisons must only be comparing
3160 two different values, which will be stored in *CVAL1 and *CVAL2; if
3161 they are nonzero it means that some operands have already been found.
3162 No variables may be used anywhere else in the expression except in the
3163 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3164 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3166 If this is true, return 1. Otherwise, return zero. */
3169 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3171 enum tree_code code
= TREE_CODE (arg
);
3172 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3174 /* We can handle some of the tcc_expression cases here. */
3175 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3177 else if (tclass
== tcc_expression
3178 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3179 || code
== COMPOUND_EXPR
))
3180 tclass
= tcc_binary
;
3182 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3183 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3185 /* If we've already found a CVAL1 or CVAL2, this expression is
3186 two complex to handle. */
3187 if (*cval1
|| *cval2
)
3197 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3200 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3201 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3202 cval1
, cval2
, save_p
));
3207 case tcc_expression
:
3208 if (code
== COND_EXPR
)
3209 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3210 cval1
, cval2
, save_p
)
3211 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3212 cval1
, cval2
, save_p
)
3213 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3214 cval1
, cval2
, save_p
));
3217 case tcc_comparison
:
3218 /* First see if we can handle the first operand, then the second. For
3219 the second operand, we know *CVAL1 can't be zero. It must be that
3220 one side of the comparison is each of the values; test for the
3221 case where this isn't true by failing if the two operands
3224 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3225 TREE_OPERAND (arg
, 1), 0))
3229 *cval1
= TREE_OPERAND (arg
, 0);
3230 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3232 else if (*cval2
== 0)
3233 *cval2
= TREE_OPERAND (arg
, 0);
3234 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3239 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3241 else if (*cval2
== 0)
3242 *cval2
= TREE_OPERAND (arg
, 1);
3243 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3255 /* ARG is a tree that is known to contain just arithmetic operations and
3256 comparisons. Evaluate the operations in the tree substituting NEW0 for
3257 any occurrence of OLD0 as an operand of a comparison and likewise for
3261 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3262 tree old1
, tree new1
)
3264 tree type
= TREE_TYPE (arg
);
3265 enum tree_code code
= TREE_CODE (arg
);
3266 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3268 /* We can handle some of the tcc_expression cases here. */
3269 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3271 else if (tclass
== tcc_expression
3272 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3273 tclass
= tcc_binary
;
3278 return fold_build1_loc (loc
, code
, type
,
3279 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3280 old0
, new0
, old1
, new1
));
3283 return fold_build2_loc (loc
, code
, type
,
3284 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3285 old0
, new0
, old1
, new1
),
3286 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3287 old0
, new0
, old1
, new1
));
3289 case tcc_expression
:
3293 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3297 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3301 return fold_build3_loc (loc
, code
, type
,
3302 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3303 old0
, new0
, old1
, new1
),
3304 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3305 old0
, new0
, old1
, new1
),
3306 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3307 old0
, new0
, old1
, new1
));
3311 /* Fall through - ??? */
3313 case tcc_comparison
:
3315 tree arg0
= TREE_OPERAND (arg
, 0);
3316 tree arg1
= TREE_OPERAND (arg
, 1);
3318 /* We need to check both for exact equality and tree equality. The
3319 former will be true if the operand has a side-effect. In that
3320 case, we know the operand occurred exactly once. */
3322 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3324 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3327 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3329 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3332 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3340 /* Return a tree for the case when the result of an expression is RESULT
3341 converted to TYPE and OMITTED was previously an operand of the expression
3342 but is now not needed (e.g., we folded OMITTED * 0).
3344 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3345 the conversion of RESULT to TYPE. */
3348 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3350 tree t
= fold_convert_loc (loc
, type
, result
);
3352 /* If the resulting operand is an empty statement, just return the omitted
3353 statement casted to void. */
3354 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3355 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3356 fold_ignored_result (omitted
));
3358 if (TREE_SIDE_EFFECTS (omitted
))
3359 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3360 fold_ignored_result (omitted
), t
);
3362 return non_lvalue_loc (loc
, t
);
3365 /* Return a tree for the case when the result of an expression is RESULT
3366 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3367 of the expression but are now not needed.
3369 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3370 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3371 evaluated before OMITTED2. Otherwise, if neither has side effects,
3372 just do the conversion of RESULT to TYPE. */
3375 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3376 tree omitted1
, tree omitted2
)
3378 tree t
= fold_convert_loc (loc
, type
, result
);
3380 if (TREE_SIDE_EFFECTS (omitted2
))
3381 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3382 if (TREE_SIDE_EFFECTS (omitted1
))
3383 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3385 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3389 /* Return a simplified tree node for the truth-negation of ARG. This
3390 never alters ARG itself. We assume that ARG is an operation that
3391 returns a truth value (0 or 1).
3393 FIXME: one would think we would fold the result, but it causes
3394 problems with the dominator optimizer. */
3397 fold_truth_not_expr (location_t loc
, tree arg
)
3399 tree type
= TREE_TYPE (arg
);
3400 enum tree_code code
= TREE_CODE (arg
);
3401 location_t loc1
, loc2
;
3403 /* If this is a comparison, we can simply invert it, except for
3404 floating-point non-equality comparisons, in which case we just
3405 enclose a TRUTH_NOT_EXPR around what we have. */
3407 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3409 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3410 if (FLOAT_TYPE_P (op_type
)
3411 && flag_trapping_math
3412 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3413 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3416 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3417 if (code
== ERROR_MARK
)
3420 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3421 TREE_OPERAND (arg
, 1));
3427 return constant_boolean_node (integer_zerop (arg
), type
);
3429 case TRUTH_AND_EXPR
:
3430 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3431 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3432 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3433 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3434 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3437 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3438 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3439 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3440 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3441 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3443 case TRUTH_XOR_EXPR
:
3444 /* Here we can invert either operand. We invert the first operand
3445 unless the second operand is a TRUTH_NOT_EXPR in which case our
3446 result is the XOR of the first operand with the inside of the
3447 negation of the second operand. */
3449 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3450 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3451 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3453 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3454 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3455 TREE_OPERAND (arg
, 1));
3457 case TRUTH_ANDIF_EXPR
:
3458 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3459 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3460 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3461 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3462 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3464 case TRUTH_ORIF_EXPR
:
3465 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3466 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3467 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3468 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3469 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3471 case TRUTH_NOT_EXPR
:
3472 return TREE_OPERAND (arg
, 0);
3476 tree arg1
= TREE_OPERAND (arg
, 1);
3477 tree arg2
= TREE_OPERAND (arg
, 2);
3479 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3480 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3482 /* A COND_EXPR may have a throw as one operand, which
3483 then has void type. Just leave void operands
3485 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3486 VOID_TYPE_P (TREE_TYPE (arg1
))
3487 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3488 VOID_TYPE_P (TREE_TYPE (arg2
))
3489 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3493 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3494 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3495 TREE_OPERAND (arg
, 0),
3496 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3498 case NON_LVALUE_EXPR
:
3499 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3500 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3503 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3504 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3506 /* ... fall through ... */
3509 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3510 return build1_loc (loc
, TREE_CODE (arg
), type
,
3511 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3514 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3516 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3519 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3521 case CLEANUP_POINT_EXPR
:
3522 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3523 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3524 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3531 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3532 assume that ARG is an operation that returns a truth value (0 or 1
3533 for scalars, 0 or -1 for vectors). Return the folded expression if
3534 folding is successful. Otherwise, return NULL_TREE. */
3537 fold_invert_truthvalue (location_t loc
, tree arg
)
3539 tree type
= TREE_TYPE (arg
);
3540 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3546 /* Return a simplified tree node for the truth-negation of ARG. This
3547 never alters ARG itself. We assume that ARG is an operation that
3548 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3551 invert_truthvalue_loc (location_t loc
, tree arg
)
3553 if (TREE_CODE (arg
) == ERROR_MARK
)
3556 tree type
= TREE_TYPE (arg
);
3557 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3563 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3564 operands are another bit-wise operation with a common input. If so,
3565 distribute the bit operations to save an operation and possibly two if
3566 constants are involved. For example, convert
3567 (A | B) & (A | C) into A | (B & C)
3568 Further simplification will occur if B and C are constants.
3570 If this optimization cannot be done, 0 will be returned. */
3573 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3574 tree arg0
, tree arg1
)
3579 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3580 || TREE_CODE (arg0
) == code
3581 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3582 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3585 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3587 common
= TREE_OPERAND (arg0
, 0);
3588 left
= TREE_OPERAND (arg0
, 1);
3589 right
= TREE_OPERAND (arg1
, 1);
3591 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3593 common
= TREE_OPERAND (arg0
, 0);
3594 left
= TREE_OPERAND (arg0
, 1);
3595 right
= TREE_OPERAND (arg1
, 0);
3597 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3599 common
= TREE_OPERAND (arg0
, 1);
3600 left
= TREE_OPERAND (arg0
, 0);
3601 right
= TREE_OPERAND (arg1
, 1);
3603 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3605 common
= TREE_OPERAND (arg0
, 1);
3606 left
= TREE_OPERAND (arg0
, 0);
3607 right
= TREE_OPERAND (arg1
, 0);
3612 common
= fold_convert_loc (loc
, type
, common
);
3613 left
= fold_convert_loc (loc
, type
, left
);
3614 right
= fold_convert_loc (loc
, type
, right
);
3615 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3616 fold_build2_loc (loc
, code
, type
, left
, right
));
3619 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3620 with code CODE. This optimization is unsafe. */
3622 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3623 tree arg0
, tree arg1
)
3625 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3626 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3628 /* (A / C) +- (B / C) -> (A +- B) / C. */
3630 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3631 TREE_OPERAND (arg1
, 1), 0))
3632 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3633 fold_build2_loc (loc
, code
, type
,
3634 TREE_OPERAND (arg0
, 0),
3635 TREE_OPERAND (arg1
, 0)),
3636 TREE_OPERAND (arg0
, 1));
3638 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3639 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3640 TREE_OPERAND (arg1
, 0), 0)
3641 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3642 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3644 REAL_VALUE_TYPE r0
, r1
;
3645 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3646 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3648 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3650 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3651 real_arithmetic (&r0
, code
, &r0
, &r1
);
3652 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3653 TREE_OPERAND (arg0
, 0),
3654 build_real (type
, r0
));
3660 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3661 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3664 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3665 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3667 tree result
, bftype
;
3671 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3672 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3673 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3674 && tree_fits_shwi_p (size
)
3675 && tree_to_shwi (size
) == bitsize
)
3676 return fold_convert_loc (loc
, type
, inner
);
3680 if (TYPE_PRECISION (bftype
) != bitsize
3681 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3682 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3684 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3685 size_int (bitsize
), bitsize_int (bitpos
));
3688 result
= fold_convert_loc (loc
, type
, result
);
3693 /* Optimize a bit-field compare.
3695 There are two cases: First is a compare against a constant and the
3696 second is a comparison of two items where the fields are at the same
3697 bit position relative to the start of a chunk (byte, halfword, word)
3698 large enough to contain it. In these cases we can avoid the shift
3699 implicit in bitfield extractions.
3701 For constants, we emit a compare of the shifted constant with the
3702 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3703 compared. For two fields at the same position, we do the ANDs with the
3704 similar mask and compare the result of the ANDs.
3706 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3707 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3708 are the left and right operands of the comparison, respectively.
3710 If the optimization described above can be done, we return the resulting
3711 tree. Otherwise we return zero. */
3714 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3715 tree compare_type
, tree lhs
, tree rhs
)
3717 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3718 tree type
= TREE_TYPE (lhs
);
3720 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3721 machine_mode lmode
, rmode
, nmode
;
3722 int lunsignedp
, runsignedp
;
3723 int lvolatilep
= 0, rvolatilep
= 0;
3724 tree linner
, rinner
= NULL_TREE
;
3728 /* Get all the information about the extractions being done. If the bit size
3729 if the same as the size of the underlying object, we aren't doing an
3730 extraction at all and so can do nothing. We also don't want to
3731 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3732 then will no longer be able to replace it. */
3733 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3734 &lunsignedp
, &lvolatilep
, false);
3735 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3736 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3741 /* If this is not a constant, we can only do something if bit positions,
3742 sizes, and signedness are the same. */
3743 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3744 &runsignedp
, &rvolatilep
, false);
3746 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3747 || lunsignedp
!= runsignedp
|| offset
!= 0
3748 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3752 /* See if we can find a mode to refer to this field. We should be able to,
3753 but fail if we can't. */
3754 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3755 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3756 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3757 TYPE_ALIGN (TREE_TYPE (rinner
))),
3759 if (nmode
== VOIDmode
)
3762 /* Set signed and unsigned types of the precision of this mode for the
3764 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3766 /* Compute the bit position and size for the new reference and our offset
3767 within it. If the new reference is the same size as the original, we
3768 won't optimize anything, so return zero. */
3769 nbitsize
= GET_MODE_BITSIZE (nmode
);
3770 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3772 if (nbitsize
== lbitsize
)
3775 if (BYTES_BIG_ENDIAN
)
3776 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3778 /* Make the mask to be used against the extracted field. */
3779 mask
= build_int_cst_type (unsigned_type
, -1);
3780 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3781 mask
= const_binop (RSHIFT_EXPR
, mask
,
3782 size_int (nbitsize
- lbitsize
- lbitpos
));
3785 /* If not comparing with constant, just rework the comparison
3787 return fold_build2_loc (loc
, code
, compare_type
,
3788 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3789 make_bit_field_ref (loc
, linner
,
3794 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3795 make_bit_field_ref (loc
, rinner
,
3801 /* Otherwise, we are handling the constant case. See if the constant is too
3802 big for the field. Warn and return a tree of for 0 (false) if so. We do
3803 this not only for its own sake, but to avoid having to test for this
3804 error case below. If we didn't, we might generate wrong code.
3806 For unsigned fields, the constant shifted right by the field length should
3807 be all zero. For signed fields, the high-order bits should agree with
3812 if (wi::lrshift (rhs
, lbitsize
) != 0)
3814 warning (0, "comparison is always %d due to width of bit-field",
3816 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3821 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3822 if (tem
!= 0 && tem
!= -1)
3824 warning (0, "comparison is always %d due to width of bit-field",
3826 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3830 /* Single-bit compares should always be against zero. */
3831 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3833 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3834 rhs
= build_int_cst (type
, 0);
3837 /* Make a new bitfield reference, shift the constant over the
3838 appropriate number of bits and mask it with the computed mask
3839 (in case this was a signed field). If we changed it, make a new one. */
3840 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3842 rhs
= const_binop (BIT_AND_EXPR
,
3843 const_binop (LSHIFT_EXPR
,
3844 fold_convert_loc (loc
, unsigned_type
, rhs
),
3845 size_int (lbitpos
)),
3848 lhs
= build2_loc (loc
, code
, compare_type
,
3849 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3853 /* Subroutine for fold_truth_andor_1: decode a field reference.
3855 If EXP is a comparison reference, we return the innermost reference.
3857 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3858 set to the starting bit number.
3860 If the innermost field can be completely contained in a mode-sized
3861 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3863 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3864 otherwise it is not changed.
3866 *PUNSIGNEDP is set to the signedness of the field.
3868 *PMASK is set to the mask used. This is either contained in a
3869 BIT_AND_EXPR or derived from the width of the field.
3871 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3873 Return 0 if this is not a component reference or is one that we can't
3874 do anything with. */
3877 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3878 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3879 int *punsignedp
, int *pvolatilep
,
3880 tree
*pmask
, tree
*pand_mask
)
3882 tree outer_type
= 0;
3884 tree mask
, inner
, offset
;
3886 unsigned int precision
;
3888 /* All the optimizations using this function assume integer fields.
3889 There are problems with FP fields since the type_for_size call
3890 below can fail for, e.g., XFmode. */
3891 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3894 /* We are interested in the bare arrangement of bits, so strip everything
3895 that doesn't affect the machine mode. However, record the type of the
3896 outermost expression if it may matter below. */
3897 if (CONVERT_EXPR_P (exp
)
3898 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3899 outer_type
= TREE_TYPE (exp
);
3902 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3904 and_mask
= TREE_OPERAND (exp
, 1);
3905 exp
= TREE_OPERAND (exp
, 0);
3906 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3907 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3911 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3912 punsignedp
, pvolatilep
, false);
3913 if ((inner
== exp
&& and_mask
== 0)
3914 || *pbitsize
< 0 || offset
!= 0
3915 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3918 /* If the number of bits in the reference is the same as the bitsize of
3919 the outer type, then the outer type gives the signedness. Otherwise
3920 (in case of a small bitfield) the signedness is unchanged. */
3921 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3922 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3924 /* Compute the mask to access the bitfield. */
3925 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3926 precision
= TYPE_PRECISION (unsigned_type
);
3928 mask
= build_int_cst_type (unsigned_type
, -1);
3930 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3931 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3933 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3935 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3936 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3939 *pand_mask
= and_mask
;
3943 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3944 bit positions and MASK is SIGNED. */
3947 all_ones_mask_p (const_tree mask
, unsigned int size
)
3949 tree type
= TREE_TYPE (mask
);
3950 unsigned int precision
= TYPE_PRECISION (type
);
3952 /* If this function returns true when the type of the mask is
3953 UNSIGNED, then there will be errors. In particular see
3954 gcc.c-torture/execute/990326-1.c. There does not appear to be
3955 any documentation paper trail as to why this is so. But the pre
3956 wide-int worked with that restriction and it has been preserved
3958 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3961 return wi::mask (size
, false, precision
) == mask
;
3964 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3965 represents the sign bit of EXP's type. If EXP represents a sign
3966 or zero extension, also test VAL against the unextended type.
3967 The return value is the (sub)expression whose sign bit is VAL,
3968 or NULL_TREE otherwise. */
3971 sign_bit_p (tree exp
, const_tree val
)
3976 /* Tree EXP must have an integral type. */
3977 t
= TREE_TYPE (exp
);
3978 if (! INTEGRAL_TYPE_P (t
))
3981 /* Tree VAL must be an integer constant. */
3982 if (TREE_CODE (val
) != INTEGER_CST
3983 || TREE_OVERFLOW (val
))
3986 width
= TYPE_PRECISION (t
);
3987 if (wi::only_sign_bit_p (val
, width
))
3990 /* Handle extension from a narrower type. */
3991 if (TREE_CODE (exp
) == NOP_EXPR
3992 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3993 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3998 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3999 to be evaluated unconditionally. */
4002 simple_operand_p (const_tree exp
)
4004 /* Strip any conversions that don't change the machine mode. */
4007 return (CONSTANT_CLASS_P (exp
)
4008 || TREE_CODE (exp
) == SSA_NAME
4010 && ! TREE_ADDRESSABLE (exp
)
4011 && ! TREE_THIS_VOLATILE (exp
)
4012 && ! DECL_NONLOCAL (exp
)
4013 /* Don't regard global variables as simple. They may be
4014 allocated in ways unknown to the compiler (shared memory,
4015 #pragma weak, etc). */
4016 && ! TREE_PUBLIC (exp
)
4017 && ! DECL_EXTERNAL (exp
)
4018 /* Weakrefs are not safe to be read, since they can be NULL.
4019 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4020 have DECL_WEAK flag set. */
4021 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4022 /* Loading a static variable is unduly expensive, but global
4023 registers aren't expensive. */
4024 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4027 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4028 to be evaluated unconditionally.
4029 I addition to simple_operand_p, we assume that comparisons, conversions,
4030 and logic-not operations are simple, if their operands are simple, too. */
4033 simple_operand_p_2 (tree exp
)
4035 enum tree_code code
;
4037 if (TREE_SIDE_EFFECTS (exp
)
4038 || tree_could_trap_p (exp
))
4041 while (CONVERT_EXPR_P (exp
))
4042 exp
= TREE_OPERAND (exp
, 0);
4044 code
= TREE_CODE (exp
);
4046 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4047 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4048 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4050 if (code
== TRUTH_NOT_EXPR
)
4051 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4053 return simple_operand_p (exp
);
4057 /* The following functions are subroutines to fold_range_test and allow it to
4058 try to change a logical combination of comparisons into a range test.
4061 X == 2 || X == 3 || X == 4 || X == 5
4065 (unsigned) (X - 2) <= 3
4067 We describe each set of comparisons as being either inside or outside
4068 a range, using a variable named like IN_P, and then describe the
4069 range with a lower and upper bound. If one of the bounds is omitted,
4070 it represents either the highest or lowest value of the type.
4072 In the comments below, we represent a range by two numbers in brackets
4073 preceded by a "+" to designate being inside that range, or a "-" to
4074 designate being outside that range, so the condition can be inverted by
4075 flipping the prefix. An omitted bound is represented by a "-". For
4076 example, "- [-, 10]" means being outside the range starting at the lowest
4077 possible value and ending at 10, in other words, being greater than 10.
4078 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4081 We set up things so that the missing bounds are handled in a consistent
4082 manner so neither a missing bound nor "true" and "false" need to be
4083 handled using a special case. */
4085 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4086 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4087 and UPPER1_P are nonzero if the respective argument is an upper bound
4088 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4089 must be specified for a comparison. ARG1 will be converted to ARG0's
4090 type if both are specified. */
4093 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4094 tree arg1
, int upper1_p
)
4100 /* If neither arg represents infinity, do the normal operation.
4101 Else, if not a comparison, return infinity. Else handle the special
4102 comparison rules. Note that most of the cases below won't occur, but
4103 are handled for consistency. */
4105 if (arg0
!= 0 && arg1
!= 0)
4107 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4108 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4110 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4113 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4116 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4117 for neither. In real maths, we cannot assume open ended ranges are
4118 the same. But, this is computer arithmetic, where numbers are finite.
4119 We can therefore make the transformation of any unbounded range with
4120 the value Z, Z being greater than any representable number. This permits
4121 us to treat unbounded ranges as equal. */
4122 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4123 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4127 result
= sgn0
== sgn1
;
4130 result
= sgn0
!= sgn1
;
4133 result
= sgn0
< sgn1
;
4136 result
= sgn0
<= sgn1
;
4139 result
= sgn0
> sgn1
;
4142 result
= sgn0
>= sgn1
;
4148 return constant_boolean_node (result
, type
);
4151 /* Helper routine for make_range. Perform one step for it, return
4152 new expression if the loop should continue or NULL_TREE if it should
4156 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4157 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4158 bool *strict_overflow_p
)
4160 tree arg0_type
= TREE_TYPE (arg0
);
4161 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4162 int in_p
= *p_in_p
, n_in_p
;
4166 case TRUTH_NOT_EXPR
:
4167 /* We can only do something if the range is testing for zero. */
4168 if (low
== NULL_TREE
|| high
== NULL_TREE
4169 || ! integer_zerop (low
) || ! integer_zerop (high
))
4174 case EQ_EXPR
: case NE_EXPR
:
4175 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4176 /* We can only do something if the range is testing for zero
4177 and if the second operand is an integer constant. Note that
4178 saying something is "in" the range we make is done by
4179 complementing IN_P since it will set in the initial case of
4180 being not equal to zero; "out" is leaving it alone. */
4181 if (low
== NULL_TREE
|| high
== NULL_TREE
4182 || ! integer_zerop (low
) || ! integer_zerop (high
)
4183 || TREE_CODE (arg1
) != INTEGER_CST
)
4188 case NE_EXPR
: /* - [c, c] */
4191 case EQ_EXPR
: /* + [c, c] */
4192 in_p
= ! in_p
, low
= high
= arg1
;
4194 case GT_EXPR
: /* - [-, c] */
4195 low
= 0, high
= arg1
;
4197 case GE_EXPR
: /* + [c, -] */
4198 in_p
= ! in_p
, low
= arg1
, high
= 0;
4200 case LT_EXPR
: /* - [c, -] */
4201 low
= arg1
, high
= 0;
4203 case LE_EXPR
: /* + [-, c] */
4204 in_p
= ! in_p
, low
= 0, high
= arg1
;
4210 /* If this is an unsigned comparison, we also know that EXP is
4211 greater than or equal to zero. We base the range tests we make
4212 on that fact, so we record it here so we can parse existing
4213 range tests. We test arg0_type since often the return type
4214 of, e.g. EQ_EXPR, is boolean. */
4215 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4217 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4219 build_int_cst (arg0_type
, 0),
4223 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4225 /* If the high bound is missing, but we have a nonzero low
4226 bound, reverse the range so it goes from zero to the low bound
4228 if (high
== 0 && low
&& ! integer_zerop (low
))
4231 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4232 build_int_cst (TREE_TYPE (low
), 1), 0);
4233 low
= build_int_cst (arg0_type
, 0);
4243 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4244 low and high are non-NULL, then normalize will DTRT. */
4245 if (!TYPE_UNSIGNED (arg0_type
)
4246 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4248 if (low
== NULL_TREE
)
4249 low
= TYPE_MIN_VALUE (arg0_type
);
4250 if (high
== NULL_TREE
)
4251 high
= TYPE_MAX_VALUE (arg0_type
);
4254 /* (-x) IN [a,b] -> x in [-b, -a] */
4255 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4256 build_int_cst (exp_type
, 0),
4258 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4259 build_int_cst (exp_type
, 0),
4261 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4267 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4268 build_int_cst (exp_type
, 1));
4272 if (TREE_CODE (arg1
) != INTEGER_CST
)
4275 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4276 move a constant to the other side. */
4277 if (!TYPE_UNSIGNED (arg0_type
)
4278 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4281 /* If EXP is signed, any overflow in the computation is undefined,
4282 so we don't worry about it so long as our computations on
4283 the bounds don't overflow. For unsigned, overflow is defined
4284 and this is exactly the right thing. */
4285 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4286 arg0_type
, low
, 0, arg1
, 0);
4287 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4288 arg0_type
, high
, 1, arg1
, 0);
4289 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4290 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4293 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4294 *strict_overflow_p
= true;
4297 /* Check for an unsigned range which has wrapped around the maximum
4298 value thus making n_high < n_low, and normalize it. */
4299 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4301 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4302 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4303 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4304 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4306 /* If the range is of the form +/- [ x+1, x ], we won't
4307 be able to normalize it. But then, it represents the
4308 whole range or the empty set, so make it
4310 if (tree_int_cst_equal (n_low
, low
)
4311 && tree_int_cst_equal (n_high
, high
))
4317 low
= n_low
, high
= n_high
;
4325 case NON_LVALUE_EXPR
:
4326 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4329 if (! INTEGRAL_TYPE_P (arg0_type
)
4330 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4331 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4334 n_low
= low
, n_high
= high
;
4337 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4340 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4342 /* If we're converting arg0 from an unsigned type, to exp,
4343 a signed type, we will be doing the comparison as unsigned.
4344 The tests above have already verified that LOW and HIGH
4347 So we have to ensure that we will handle large unsigned
4348 values the same way that the current signed bounds treat
4351 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4355 /* For fixed-point modes, we need to pass the saturating flag
4356 as the 2nd parameter. */
4357 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4359 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4360 TYPE_SATURATING (arg0_type
));
4363 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4365 /* A range without an upper bound is, naturally, unbounded.
4366 Since convert would have cropped a very large value, use
4367 the max value for the destination type. */
4369 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4370 : TYPE_MAX_VALUE (arg0_type
);
4372 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4373 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4374 fold_convert_loc (loc
, arg0_type
,
4376 build_int_cst (arg0_type
, 1));
4378 /* If the low bound is specified, "and" the range with the
4379 range for which the original unsigned value will be
4383 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4384 1, fold_convert_loc (loc
, arg0_type
,
4389 in_p
= (n_in_p
== in_p
);
4393 /* Otherwise, "or" the range with the range of the input
4394 that will be interpreted as negative. */
4395 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4396 1, fold_convert_loc (loc
, arg0_type
,
4401 in_p
= (in_p
!= n_in_p
);
4415 /* Given EXP, a logical expression, set the range it is testing into
4416 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4417 actually being tested. *PLOW and *PHIGH will be made of the same
4418 type as the returned expression. If EXP is not a comparison, we
4419 will most likely not be returning a useful value and range. Set
4420 *STRICT_OVERFLOW_P to true if the return value is only valid
4421 because signed overflow is undefined; otherwise, do not change
4422 *STRICT_OVERFLOW_P. */
4425 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4426 bool *strict_overflow_p
)
4428 enum tree_code code
;
4429 tree arg0
, arg1
= NULL_TREE
;
4430 tree exp_type
, nexp
;
4433 location_t loc
= EXPR_LOCATION (exp
);
4435 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4436 and see if we can refine the range. Some of the cases below may not
4437 happen, but it doesn't seem worth worrying about this. We "continue"
4438 the outer loop when we've changed something; otherwise we "break"
4439 the switch, which will "break" the while. */
4442 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4446 code
= TREE_CODE (exp
);
4447 exp_type
= TREE_TYPE (exp
);
4450 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4452 if (TREE_OPERAND_LENGTH (exp
) > 0)
4453 arg0
= TREE_OPERAND (exp
, 0);
4454 if (TREE_CODE_CLASS (code
) == tcc_binary
4455 || TREE_CODE_CLASS (code
) == tcc_comparison
4456 || (TREE_CODE_CLASS (code
) == tcc_expression
4457 && TREE_OPERAND_LENGTH (exp
) > 1))
4458 arg1
= TREE_OPERAND (exp
, 1);
4460 if (arg0
== NULL_TREE
)
4463 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4464 &high
, &in_p
, strict_overflow_p
);
4465 if (nexp
== NULL_TREE
)
4470 /* If EXP is a constant, we can evaluate whether this is true or false. */
4471 if (TREE_CODE (exp
) == INTEGER_CST
)
4473 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4475 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4481 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4485 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4486 type, TYPE, return an expression to test if EXP is in (or out of, depending
4487 on IN_P) the range. Return 0 if the test couldn't be created. */
4490 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4491 tree low
, tree high
)
4493 tree etype
= TREE_TYPE (exp
), value
;
4495 #ifdef HAVE_canonicalize_funcptr_for_compare
4496 /* Disable this optimization for function pointer expressions
4497 on targets that require function pointer canonicalization. */
4498 if (HAVE_canonicalize_funcptr_for_compare
4499 && TREE_CODE (etype
) == POINTER_TYPE
4500 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4506 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4508 return invert_truthvalue_loc (loc
, value
);
4513 if (low
== 0 && high
== 0)
4514 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4517 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4518 fold_convert_loc (loc
, etype
, high
));
4521 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4522 fold_convert_loc (loc
, etype
, low
));
4524 if (operand_equal_p (low
, high
, 0))
4525 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4526 fold_convert_loc (loc
, etype
, low
));
4528 if (integer_zerop (low
))
4530 if (! TYPE_UNSIGNED (etype
))
4532 etype
= unsigned_type_for (etype
);
4533 high
= fold_convert_loc (loc
, etype
, high
);
4534 exp
= fold_convert_loc (loc
, etype
, exp
);
4536 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4539 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4540 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4542 int prec
= TYPE_PRECISION (etype
);
4544 if (wi::mask (prec
- 1, false, prec
) == high
)
4546 if (TYPE_UNSIGNED (etype
))
4548 tree signed_etype
= signed_type_for (etype
);
4549 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4551 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4553 etype
= signed_etype
;
4554 exp
= fold_convert_loc (loc
, etype
, exp
);
4556 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4557 build_int_cst (etype
, 0));
4561 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4562 This requires wrap-around arithmetics for the type of the expression.
4563 First make sure that arithmetics in this type is valid, then make sure
4564 that it wraps around. */
4565 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4566 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4567 TYPE_UNSIGNED (etype
));
4569 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4571 tree utype
, minv
, maxv
;
4573 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4574 for the type in question, as we rely on this here. */
4575 utype
= unsigned_type_for (etype
);
4576 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4577 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4578 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4579 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4581 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4588 high
= fold_convert_loc (loc
, etype
, high
);
4589 low
= fold_convert_loc (loc
, etype
, low
);
4590 exp
= fold_convert_loc (loc
, etype
, exp
);
4592 value
= const_binop (MINUS_EXPR
, high
, low
);
4595 if (POINTER_TYPE_P (etype
))
4597 if (value
!= 0 && !TREE_OVERFLOW (value
))
4599 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4600 return build_range_check (loc
, type
,
4601 fold_build_pointer_plus_loc (loc
, exp
, low
),
4602 1, build_int_cst (etype
, 0), value
);
4607 if (value
!= 0 && !TREE_OVERFLOW (value
))
4608 return build_range_check (loc
, type
,
4609 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4610 1, build_int_cst (etype
, 0), value
);
4615 /* Return the predecessor of VAL in its type, handling the infinite case. */
4618 range_predecessor (tree val
)
4620 tree type
= TREE_TYPE (val
);
4622 if (INTEGRAL_TYPE_P (type
)
4623 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4626 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4627 build_int_cst (TREE_TYPE (val
), 1), 0);
4630 /* Return the successor of VAL in its type, handling the infinite case. */
4633 range_successor (tree val
)
4635 tree type
= TREE_TYPE (val
);
4637 if (INTEGRAL_TYPE_P (type
)
4638 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4641 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4642 build_int_cst (TREE_TYPE (val
), 1), 0);
4645 /* Given two ranges, see if we can merge them into one. Return 1 if we
4646 can, 0 if we can't. Set the output range into the specified parameters. */
4649 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4650 tree high0
, int in1_p
, tree low1
, tree high1
)
4658 int lowequal
= ((low0
== 0 && low1
== 0)
4659 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4660 low0
, 0, low1
, 0)));
4661 int highequal
= ((high0
== 0 && high1
== 0)
4662 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4663 high0
, 1, high1
, 1)));
4665 /* Make range 0 be the range that starts first, or ends last if they
4666 start at the same value. Swap them if it isn't. */
4667 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4670 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4671 high1
, 1, high0
, 1))))
4673 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4674 tem
= low0
, low0
= low1
, low1
= tem
;
4675 tem
= high0
, high0
= high1
, high1
= tem
;
4678 /* Now flag two cases, whether the ranges are disjoint or whether the
4679 second range is totally subsumed in the first. Note that the tests
4680 below are simplified by the ones above. */
4681 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4682 high0
, 1, low1
, 0));
4683 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4684 high1
, 1, high0
, 1));
4686 /* We now have four cases, depending on whether we are including or
4687 excluding the two ranges. */
4690 /* If they don't overlap, the result is false. If the second range
4691 is a subset it is the result. Otherwise, the range is from the start
4692 of the second to the end of the first. */
4694 in_p
= 0, low
= high
= 0;
4696 in_p
= 1, low
= low1
, high
= high1
;
4698 in_p
= 1, low
= low1
, high
= high0
;
4701 else if (in0_p
&& ! in1_p
)
4703 /* If they don't overlap, the result is the first range. If they are
4704 equal, the result is false. If the second range is a subset of the
4705 first, and the ranges begin at the same place, we go from just after
4706 the end of the second range to the end of the first. If the second
4707 range is not a subset of the first, or if it is a subset and both
4708 ranges end at the same place, the range starts at the start of the
4709 first range and ends just before the second range.
4710 Otherwise, we can't describe this as a single range. */
4712 in_p
= 1, low
= low0
, high
= high0
;
4713 else if (lowequal
&& highequal
)
4714 in_p
= 0, low
= high
= 0;
4715 else if (subset
&& lowequal
)
4717 low
= range_successor (high1
);
4722 /* We are in the weird situation where high0 > high1 but
4723 high1 has no successor. Punt. */
4727 else if (! subset
|| highequal
)
4730 high
= range_predecessor (low1
);
4734 /* low0 < low1 but low1 has no predecessor. Punt. */
4742 else if (! in0_p
&& in1_p
)
4744 /* If they don't overlap, the result is the second range. If the second
4745 is a subset of the first, the result is false. Otherwise,
4746 the range starts just after the first range and ends at the
4747 end of the second. */
4749 in_p
= 1, low
= low1
, high
= high1
;
4750 else if (subset
|| highequal
)
4751 in_p
= 0, low
= high
= 0;
4754 low
= range_successor (high0
);
4759 /* high1 > high0 but high0 has no successor. Punt. */
4767 /* The case where we are excluding both ranges. Here the complex case
4768 is if they don't overlap. In that case, the only time we have a
4769 range is if they are adjacent. If the second is a subset of the
4770 first, the result is the first. Otherwise, the range to exclude
4771 starts at the beginning of the first range and ends at the end of the
4775 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4776 range_successor (high0
),
4778 in_p
= 0, low
= low0
, high
= high1
;
4781 /* Canonicalize - [min, x] into - [-, x]. */
4782 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4783 switch (TREE_CODE (TREE_TYPE (low0
)))
4786 if (TYPE_PRECISION (TREE_TYPE (low0
))
4787 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4791 if (tree_int_cst_equal (low0
,
4792 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4796 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4797 && integer_zerop (low0
))
4804 /* Canonicalize - [x, max] into - [x, -]. */
4805 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4806 switch (TREE_CODE (TREE_TYPE (high1
)))
4809 if (TYPE_PRECISION (TREE_TYPE (high1
))
4810 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4814 if (tree_int_cst_equal (high1
,
4815 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4819 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4820 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4822 build_int_cst (TREE_TYPE (high1
), 1),
4830 /* The ranges might be also adjacent between the maximum and
4831 minimum values of the given type. For
4832 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4833 return + [x + 1, y - 1]. */
4834 if (low0
== 0 && high1
== 0)
4836 low
= range_successor (high0
);
4837 high
= range_predecessor (low1
);
4838 if (low
== 0 || high
== 0)
4848 in_p
= 0, low
= low0
, high
= high0
;
4850 in_p
= 0, low
= low0
, high
= high1
;
4853 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4858 /* Subroutine of fold, looking inside expressions of the form
4859 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4860 of the COND_EXPR. This function is being used also to optimize
4861 A op B ? C : A, by reversing the comparison first.
4863 Return a folded expression whose code is not a COND_EXPR
4864 anymore, or NULL_TREE if no folding opportunity is found. */
4867 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4868 tree arg0
, tree arg1
, tree arg2
)
4870 enum tree_code comp_code
= TREE_CODE (arg0
);
4871 tree arg00
= TREE_OPERAND (arg0
, 0);
4872 tree arg01
= TREE_OPERAND (arg0
, 1);
4873 tree arg1_type
= TREE_TYPE (arg1
);
4879 /* If we have A op 0 ? A : -A, consider applying the following
4882 A == 0? A : -A same as -A
4883 A != 0? A : -A same as A
4884 A >= 0? A : -A same as abs (A)
4885 A > 0? A : -A same as abs (A)
4886 A <= 0? A : -A same as -abs (A)
4887 A < 0? A : -A same as -abs (A)
4889 None of these transformations work for modes with signed
4890 zeros. If A is +/-0, the first two transformations will
4891 change the sign of the result (from +0 to -0, or vice
4892 versa). The last four will fix the sign of the result,
4893 even though the original expressions could be positive or
4894 negative, depending on the sign of A.
4896 Note that all these transformations are correct if A is
4897 NaN, since the two alternatives (A and -A) are also NaNs. */
4898 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4899 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4900 ? real_zerop (arg01
)
4901 : integer_zerop (arg01
))
4902 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4903 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4904 /* In the case that A is of the form X-Y, '-A' (arg2) may
4905 have already been folded to Y-X, check for that. */
4906 || (TREE_CODE (arg1
) == MINUS_EXPR
4907 && TREE_CODE (arg2
) == MINUS_EXPR
4908 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4909 TREE_OPERAND (arg2
, 1), 0)
4910 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4911 TREE_OPERAND (arg2
, 0), 0))))
4916 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4917 return pedantic_non_lvalue_loc (loc
,
4918 fold_convert_loc (loc
, type
,
4919 negate_expr (tem
)));
4922 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4925 if (flag_trapping_math
)
4930 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4931 arg1
= fold_convert_loc (loc
, signed_type_for
4932 (TREE_TYPE (arg1
)), arg1
);
4933 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4934 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4937 if (flag_trapping_math
)
4941 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4942 arg1
= fold_convert_loc (loc
, signed_type_for
4943 (TREE_TYPE (arg1
)), arg1
);
4944 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4945 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4947 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4951 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4952 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4953 both transformations are correct when A is NaN: A != 0
4954 is then true, and A == 0 is false. */
4956 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4957 && integer_zerop (arg01
) && integer_zerop (arg2
))
4959 if (comp_code
== NE_EXPR
)
4960 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4961 else if (comp_code
== EQ_EXPR
)
4962 return build_zero_cst (type
);
4965 /* Try some transformations of A op B ? A : B.
4967 A == B? A : B same as B
4968 A != B? A : B same as A
4969 A >= B? A : B same as max (A, B)
4970 A > B? A : B same as max (B, A)
4971 A <= B? A : B same as min (A, B)
4972 A < B? A : B same as min (B, A)
4974 As above, these transformations don't work in the presence
4975 of signed zeros. For example, if A and B are zeros of
4976 opposite sign, the first two transformations will change
4977 the sign of the result. In the last four, the original
4978 expressions give different results for (A=+0, B=-0) and
4979 (A=-0, B=+0), but the transformed expressions do not.
4981 The first two transformations are correct if either A or B
4982 is a NaN. In the first transformation, the condition will
4983 be false, and B will indeed be chosen. In the case of the
4984 second transformation, the condition A != B will be true,
4985 and A will be chosen.
4987 The conversions to max() and min() are not correct if B is
4988 a number and A is not. The conditions in the original
4989 expressions will be false, so all four give B. The min()
4990 and max() versions would give a NaN instead. */
4991 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4992 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4993 /* Avoid these transformations if the COND_EXPR may be used
4994 as an lvalue in the C++ front-end. PR c++/19199. */
4996 || VECTOR_TYPE_P (type
)
4997 || (! lang_GNU_CXX ()
4998 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4999 || ! maybe_lvalue_p (arg1
)
5000 || ! maybe_lvalue_p (arg2
)))
5002 tree comp_op0
= arg00
;
5003 tree comp_op1
= arg01
;
5004 tree comp_type
= TREE_TYPE (comp_op0
);
5006 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5007 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5017 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5019 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5024 /* In C++ a ?: expression can be an lvalue, so put the
5025 operand which will be used if they are equal first
5026 so that we can convert this back to the
5027 corresponding COND_EXPR. */
5028 if (!HONOR_NANS (arg1
))
5030 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5031 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5032 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5033 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5034 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5035 comp_op1
, comp_op0
);
5036 return pedantic_non_lvalue_loc (loc
,
5037 fold_convert_loc (loc
, type
, tem
));
5044 if (!HONOR_NANS (arg1
))
5046 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5047 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5048 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5049 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5050 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5051 comp_op1
, comp_op0
);
5052 return pedantic_non_lvalue_loc (loc
,
5053 fold_convert_loc (loc
, type
, tem
));
5057 if (!HONOR_NANS (arg1
))
5058 return pedantic_non_lvalue_loc (loc
,
5059 fold_convert_loc (loc
, type
, arg2
));
5062 if (!HONOR_NANS (arg1
))
5063 return pedantic_non_lvalue_loc (loc
,
5064 fold_convert_loc (loc
, type
, arg1
));
5067 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5072 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5073 we might still be able to simplify this. For example,
5074 if C1 is one less or one more than C2, this might have started
5075 out as a MIN or MAX and been transformed by this function.
5076 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5078 if (INTEGRAL_TYPE_P (type
)
5079 && TREE_CODE (arg01
) == INTEGER_CST
5080 && TREE_CODE (arg2
) == INTEGER_CST
)
5084 if (TREE_CODE (arg1
) == INTEGER_CST
)
5086 /* We can replace A with C1 in this case. */
5087 arg1
= fold_convert_loc (loc
, type
, arg01
);
5088 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5091 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5092 MIN_EXPR, to preserve the signedness of the comparison. */
5093 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5095 && operand_equal_p (arg01
,
5096 const_binop (PLUS_EXPR
, arg2
,
5097 build_int_cst (type
, 1)),
5100 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5101 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5103 return pedantic_non_lvalue_loc (loc
,
5104 fold_convert_loc (loc
, type
, tem
));
5109 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5111 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5113 && operand_equal_p (arg01
,
5114 const_binop (MINUS_EXPR
, arg2
,
5115 build_int_cst (type
, 1)),
5118 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5119 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5121 return pedantic_non_lvalue_loc (loc
,
5122 fold_convert_loc (loc
, type
, tem
));
5127 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5128 MAX_EXPR, to preserve the signedness of the comparison. */
5129 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5131 && operand_equal_p (arg01
,
5132 const_binop (MINUS_EXPR
, arg2
,
5133 build_int_cst (type
, 1)),
5136 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5137 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5139 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5144 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5145 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5147 && operand_equal_p (arg01
,
5148 const_binop (PLUS_EXPR
, arg2
,
5149 build_int_cst (type
, 1)),
5152 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5153 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5155 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5169 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5170 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5171 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5175 /* EXP is some logical combination of boolean tests. See if we can
5176 merge it into some range test. Return the new tree if so. */
5179 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5182 int or_op
= (code
== TRUTH_ORIF_EXPR
5183 || code
== TRUTH_OR_EXPR
);
5184 int in0_p
, in1_p
, in_p
;
5185 tree low0
, low1
, low
, high0
, high1
, high
;
5186 bool strict_overflow_p
= false;
5188 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5189 "when simplifying range test");
5191 if (!INTEGRAL_TYPE_P (type
))
5194 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5195 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5197 /* If this is an OR operation, invert both sides; we will invert
5198 again at the end. */
5200 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5202 /* If both expressions are the same, if we can merge the ranges, and we
5203 can build the range test, return it or it inverted. If one of the
5204 ranges is always true or always false, consider it to be the same
5205 expression as the other. */
5206 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5207 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5209 && 0 != (tem
= (build_range_check (loc
, type
,
5211 : rhs
!= 0 ? rhs
: integer_zero_node
,
5214 if (strict_overflow_p
)
5215 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5216 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5219 /* On machines where the branch cost is expensive, if this is a
5220 short-circuited branch and the underlying object on both sides
5221 is the same, make a non-short-circuit operation. */
5222 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5223 && lhs
!= 0 && rhs
!= 0
5224 && (code
== TRUTH_ANDIF_EXPR
5225 || code
== TRUTH_ORIF_EXPR
)
5226 && operand_equal_p (lhs
, rhs
, 0))
5228 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5229 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5230 which cases we can't do this. */
5231 if (simple_operand_p (lhs
))
5232 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5233 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5236 else if (!lang_hooks
.decls
.global_bindings_p ()
5237 && !CONTAINS_PLACEHOLDER_P (lhs
))
5239 tree common
= save_expr (lhs
);
5241 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5242 or_op
? ! in0_p
: in0_p
,
5244 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5245 or_op
? ! in1_p
: in1_p
,
5248 if (strict_overflow_p
)
5249 fold_overflow_warning (warnmsg
,
5250 WARN_STRICT_OVERFLOW_COMPARISON
);
5251 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5252 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5261 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5262 bit value. Arrange things so the extra bits will be set to zero if and
5263 only if C is signed-extended to its full width. If MASK is nonzero,
5264 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5267 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5269 tree type
= TREE_TYPE (c
);
5270 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5273 if (p
== modesize
|| unsignedp
)
5276 /* We work by getting just the sign bit into the low-order bit, then
5277 into the high-order bit, then sign-extend. We then XOR that value
5279 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5281 /* We must use a signed type in order to get an arithmetic right shift.
5282 However, we must also avoid introducing accidental overflows, so that
5283 a subsequent call to integer_zerop will work. Hence we must
5284 do the type conversion here. At this point, the constant is either
5285 zero or one, and the conversion to a signed type can never overflow.
5286 We could get an overflow if this conversion is done anywhere else. */
5287 if (TYPE_UNSIGNED (type
))
5288 temp
= fold_convert (signed_type_for (type
), temp
);
5290 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5291 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5293 temp
= const_binop (BIT_AND_EXPR
, temp
,
5294 fold_convert (TREE_TYPE (c
), mask
));
5295 /* If necessary, convert the type back to match the type of C. */
5296 if (TYPE_UNSIGNED (type
))
5297 temp
= fold_convert (type
, temp
);
5299 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5302 /* For an expression that has the form
5306 we can drop one of the inner expressions and simplify to
5310 LOC is the location of the resulting expression. OP is the inner
5311 logical operation; the left-hand side in the examples above, while CMPOP
5312 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5313 removing a condition that guards another, as in
5314 (A != NULL && A->...) || A == NULL
5315 which we must not transform. If RHS_ONLY is true, only eliminate the
5316 right-most operand of the inner logical operation. */
5319 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5322 tree type
= TREE_TYPE (cmpop
);
5323 enum tree_code code
= TREE_CODE (cmpop
);
5324 enum tree_code truthop_code
= TREE_CODE (op
);
5325 tree lhs
= TREE_OPERAND (op
, 0);
5326 tree rhs
= TREE_OPERAND (op
, 1);
5327 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5328 enum tree_code rhs_code
= TREE_CODE (rhs
);
5329 enum tree_code lhs_code
= TREE_CODE (lhs
);
5330 enum tree_code inv_code
;
5332 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5335 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5338 if (rhs_code
== truthop_code
)
5340 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5341 if (newrhs
!= NULL_TREE
)
5344 rhs_code
= TREE_CODE (rhs
);
5347 if (lhs_code
== truthop_code
&& !rhs_only
)
5349 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5350 if (newlhs
!= NULL_TREE
)
5353 lhs_code
= TREE_CODE (lhs
);
5357 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5358 if (inv_code
== rhs_code
5359 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5360 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5362 if (!rhs_only
&& inv_code
== lhs_code
5363 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5364 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5366 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5367 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5372 /* Find ways of folding logical expressions of LHS and RHS:
5373 Try to merge two comparisons to the same innermost item.
5374 Look for range tests like "ch >= '0' && ch <= '9'".
5375 Look for combinations of simple terms on machines with expensive branches
5376 and evaluate the RHS unconditionally.
5378 For example, if we have p->a == 2 && p->b == 4 and we can make an
5379 object large enough to span both A and B, we can do this with a comparison
5380 against the object ANDed with the a mask.
5382 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5383 operations to do this with one comparison.
5385 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5386 function and the one above.
5388 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5389 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5391 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5394 We return the simplified tree or 0 if no optimization is possible. */
5397 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5400 /* If this is the "or" of two comparisons, we can do something if
5401 the comparisons are NE_EXPR. If this is the "and", we can do something
5402 if the comparisons are EQ_EXPR. I.e.,
5403 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5405 WANTED_CODE is this operation code. For single bit fields, we can
5406 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5407 comparison for one-bit fields. */
5409 enum tree_code wanted_code
;
5410 enum tree_code lcode
, rcode
;
5411 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5412 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5413 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5414 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5415 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5416 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5417 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5418 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5419 machine_mode lnmode
, rnmode
;
5420 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5421 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5422 tree l_const
, r_const
;
5423 tree lntype
, rntype
, result
;
5424 HOST_WIDE_INT first_bit
, end_bit
;
5427 /* Start by getting the comparison codes. Fail if anything is volatile.
5428 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5429 it were surrounded with a NE_EXPR. */
5431 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5434 lcode
= TREE_CODE (lhs
);
5435 rcode
= TREE_CODE (rhs
);
5437 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5439 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5440 build_int_cst (TREE_TYPE (lhs
), 0));
5444 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5446 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5447 build_int_cst (TREE_TYPE (rhs
), 0));
5451 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5452 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5455 ll_arg
= TREE_OPERAND (lhs
, 0);
5456 lr_arg
= TREE_OPERAND (lhs
, 1);
5457 rl_arg
= TREE_OPERAND (rhs
, 0);
5458 rr_arg
= TREE_OPERAND (rhs
, 1);
5460 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5461 if (simple_operand_p (ll_arg
)
5462 && simple_operand_p (lr_arg
))
5464 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5465 && operand_equal_p (lr_arg
, rr_arg
, 0))
5467 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5468 truth_type
, ll_arg
, lr_arg
);
5472 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5473 && operand_equal_p (lr_arg
, rl_arg
, 0))
5475 result
= combine_comparisons (loc
, code
, lcode
,
5476 swap_tree_comparison (rcode
),
5477 truth_type
, ll_arg
, lr_arg
);
5483 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5484 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5486 /* If the RHS can be evaluated unconditionally and its operands are
5487 simple, it wins to evaluate the RHS unconditionally on machines
5488 with expensive branches. In this case, this isn't a comparison
5489 that can be merged. */
5491 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5493 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5494 && simple_operand_p (rl_arg
)
5495 && simple_operand_p (rr_arg
))
5497 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5498 if (code
== TRUTH_OR_EXPR
5499 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5500 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5501 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5502 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5503 return build2_loc (loc
, NE_EXPR
, truth_type
,
5504 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5506 build_int_cst (TREE_TYPE (ll_arg
), 0));
5508 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5509 if (code
== TRUTH_AND_EXPR
5510 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5511 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5512 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5513 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5514 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5515 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5517 build_int_cst (TREE_TYPE (ll_arg
), 0));
5520 /* See if the comparisons can be merged. Then get all the parameters for
5523 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5524 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5528 ll_inner
= decode_field_reference (loc
, ll_arg
,
5529 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5530 &ll_unsignedp
, &volatilep
, &ll_mask
,
5532 lr_inner
= decode_field_reference (loc
, lr_arg
,
5533 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5534 &lr_unsignedp
, &volatilep
, &lr_mask
,
5536 rl_inner
= decode_field_reference (loc
, rl_arg
,
5537 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5538 &rl_unsignedp
, &volatilep
, &rl_mask
,
5540 rr_inner
= decode_field_reference (loc
, rr_arg
,
5541 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5542 &rr_unsignedp
, &volatilep
, &rr_mask
,
5545 /* It must be true that the inner operation on the lhs of each
5546 comparison must be the same if we are to be able to do anything.
5547 Then see if we have constants. If not, the same must be true for
5549 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5550 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5553 if (TREE_CODE (lr_arg
) == INTEGER_CST
5554 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5555 l_const
= lr_arg
, r_const
= rr_arg
;
5556 else if (lr_inner
== 0 || rr_inner
== 0
5557 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5560 l_const
= r_const
= 0;
5562 /* If either comparison code is not correct for our logical operation,
5563 fail. However, we can convert a one-bit comparison against zero into
5564 the opposite comparison against that bit being set in the field. */
5566 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5567 if (lcode
!= wanted_code
)
5569 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5571 /* Make the left operand unsigned, since we are only interested
5572 in the value of one bit. Otherwise we are doing the wrong
5581 /* This is analogous to the code for l_const above. */
5582 if (rcode
!= wanted_code
)
5584 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5593 /* See if we can find a mode that contains both fields being compared on
5594 the left. If we can't, fail. Otherwise, update all constants and masks
5595 to be relative to a field of that size. */
5596 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5597 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5598 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5599 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5601 if (lnmode
== VOIDmode
)
5604 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5605 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5606 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5607 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5609 if (BYTES_BIG_ENDIAN
)
5611 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5612 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5615 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5616 size_int (xll_bitpos
));
5617 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5618 size_int (xrl_bitpos
));
5622 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5623 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5624 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5625 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5626 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5629 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5631 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5636 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5637 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5638 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5639 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5640 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5643 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5645 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5649 /* If the right sides are not constant, do the same for it. Also,
5650 disallow this optimization if a size or signedness mismatch occurs
5651 between the left and right sides. */
5654 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5655 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5656 /* Make sure the two fields on the right
5657 correspond to the left without being swapped. */
5658 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5661 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5662 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5663 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5664 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5666 if (rnmode
== VOIDmode
)
5669 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5670 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5671 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5672 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5674 if (BYTES_BIG_ENDIAN
)
5676 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5677 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5680 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5682 size_int (xlr_bitpos
));
5683 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5685 size_int (xrr_bitpos
));
5687 /* Make a mask that corresponds to both fields being compared.
5688 Do this for both items being compared. If the operands are the
5689 same size and the bits being compared are in the same position
5690 then we can do this by masking both and comparing the masked
5692 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5693 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5694 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5696 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5697 ll_unsignedp
|| rl_unsignedp
);
5698 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5699 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5701 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5702 lr_unsignedp
|| rr_unsignedp
);
5703 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5704 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5706 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5709 /* There is still another way we can do something: If both pairs of
5710 fields being compared are adjacent, we may be able to make a wider
5711 field containing them both.
5713 Note that we still must mask the lhs/rhs expressions. Furthermore,
5714 the mask must be shifted to account for the shift done by
5715 make_bit_field_ref. */
5716 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5717 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5718 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5719 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5723 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5724 ll_bitsize
+ rl_bitsize
,
5725 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5726 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5727 lr_bitsize
+ rr_bitsize
,
5728 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5730 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5731 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5732 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5733 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5735 /* Convert to the smaller type before masking out unwanted bits. */
5737 if (lntype
!= rntype
)
5739 if (lnbitsize
> rnbitsize
)
5741 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5742 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5745 else if (lnbitsize
< rnbitsize
)
5747 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5748 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5753 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5754 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5756 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5757 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5759 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5765 /* Handle the case of comparisons with constants. If there is something in
5766 common between the masks, those bits of the constants must be the same.
5767 If not, the condition is always false. Test for this to avoid generating
5768 incorrect code below. */
5769 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5770 if (! integer_zerop (result
)
5771 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5772 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5774 if (wanted_code
== NE_EXPR
)
5776 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5777 return constant_boolean_node (true, truth_type
);
5781 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5782 return constant_boolean_node (false, truth_type
);
5786 /* Construct the expression we will return. First get the component
5787 reference we will make. Unless the mask is all ones the width of
5788 that field, perform the mask operation. Then compare with the
5790 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5791 ll_unsignedp
|| rl_unsignedp
);
5793 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5794 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5795 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5797 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5798 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5801 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5805 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5809 enum tree_code op_code
;
5812 int consts_equal
, consts_lt
;
5815 STRIP_SIGN_NOPS (arg0
);
5817 op_code
= TREE_CODE (arg0
);
5818 minmax_const
= TREE_OPERAND (arg0
, 1);
5819 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5820 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5821 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5822 inner
= TREE_OPERAND (arg0
, 0);
5824 /* If something does not permit us to optimize, return the original tree. */
5825 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5826 || TREE_CODE (comp_const
) != INTEGER_CST
5827 || TREE_OVERFLOW (comp_const
)
5828 || TREE_CODE (minmax_const
) != INTEGER_CST
5829 || TREE_OVERFLOW (minmax_const
))
5832 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5833 and GT_EXPR, doing the rest with recursive calls using logical
5837 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5840 = optimize_minmax_comparison (loc
,
5841 invert_tree_comparison (code
, false),
5844 return invert_truthvalue_loc (loc
, tem
);
5850 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5851 optimize_minmax_comparison
5852 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5853 optimize_minmax_comparison
5854 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5857 if (op_code
== MAX_EXPR
&& consts_equal
)
5858 /* MAX (X, 0) == 0 -> X <= 0 */
5859 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5861 else if (op_code
== MAX_EXPR
&& consts_lt
)
5862 /* MAX (X, 0) == 5 -> X == 5 */
5863 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5865 else if (op_code
== MAX_EXPR
)
5866 /* MAX (X, 0) == -1 -> false */
5867 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5869 else if (consts_equal
)
5870 /* MIN (X, 0) == 0 -> X >= 0 */
5871 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5874 /* MIN (X, 0) == 5 -> false */
5875 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5878 /* MIN (X, 0) == -1 -> X == -1 */
5879 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5882 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5883 /* MAX (X, 0) > 0 -> X > 0
5884 MAX (X, 0) > 5 -> X > 5 */
5885 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5887 else if (op_code
== MAX_EXPR
)
5888 /* MAX (X, 0) > -1 -> true */
5889 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5891 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5892 /* MIN (X, 0) > 0 -> false
5893 MIN (X, 0) > 5 -> false */
5894 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5897 /* MIN (X, 0) > -1 -> X > -1 */
5898 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5905 /* T is an integer expression that is being multiplied, divided, or taken a
5906 modulus (CODE says which and what kind of divide or modulus) by a
5907 constant C. See if we can eliminate that operation by folding it with
5908 other operations already in T. WIDE_TYPE, if non-null, is a type that
5909 should be used for the computation if wider than our type.
5911 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5912 (X * 2) + (Y * 4). We must, however, be assured that either the original
5913 expression would not overflow or that overflow is undefined for the type
5914 in the language in question.
5916 If we return a non-null expression, it is an equivalent form of the
5917 original computation, but need not be in the original type.
5919 We set *STRICT_OVERFLOW_P to true if the return values depends on
5920 signed overflow being undefined. Otherwise we do not change
5921 *STRICT_OVERFLOW_P. */
5924 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5925 bool *strict_overflow_p
)
5927 /* To avoid exponential search depth, refuse to allow recursion past
5928 three levels. Beyond that (1) it's highly unlikely that we'll find
5929 something interesting and (2) we've probably processed it before
5930 when we built the inner expression. */
5939 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5946 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5947 bool *strict_overflow_p
)
5949 tree type
= TREE_TYPE (t
);
5950 enum tree_code tcode
= TREE_CODE (t
);
5951 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5952 > GET_MODE_SIZE (TYPE_MODE (type
)))
5953 ? wide_type
: type
);
5955 int same_p
= tcode
== code
;
5956 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5957 bool sub_strict_overflow_p
;
5959 /* Don't deal with constants of zero here; they confuse the code below. */
5960 if (integer_zerop (c
))
5963 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5964 op0
= TREE_OPERAND (t
, 0);
5966 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5967 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5969 /* Note that we need not handle conditional operations here since fold
5970 already handles those cases. So just do arithmetic here. */
5974 /* For a constant, we can always simplify if we are a multiply
5975 or (for divide and modulus) if it is a multiple of our constant. */
5976 if (code
== MULT_EXPR
5977 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5978 return const_binop (code
, fold_convert (ctype
, t
),
5979 fold_convert (ctype
, c
));
5982 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5983 /* If op0 is an expression ... */
5984 if ((COMPARISON_CLASS_P (op0
)
5985 || UNARY_CLASS_P (op0
)
5986 || BINARY_CLASS_P (op0
)
5987 || VL_EXP_CLASS_P (op0
)
5988 || EXPRESSION_CLASS_P (op0
))
5989 /* ... and has wrapping overflow, and its type is smaller
5990 than ctype, then we cannot pass through as widening. */
5991 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5992 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
5993 && (TYPE_PRECISION (ctype
)
5994 > TYPE_PRECISION (TREE_TYPE (op0
))))
5995 /* ... or this is a truncation (t is narrower than op0),
5996 then we cannot pass through this narrowing. */
5997 || (TYPE_PRECISION (type
)
5998 < TYPE_PRECISION (TREE_TYPE (op0
)))
5999 /* ... or signedness changes for division or modulus,
6000 then we cannot pass through this conversion. */
6001 || (code
!= MULT_EXPR
6002 && (TYPE_UNSIGNED (ctype
)
6003 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6004 /* ... or has undefined overflow while the converted to
6005 type has not, we cannot do the operation in the inner type
6006 as that would introduce undefined overflow. */
6007 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6008 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6009 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6012 /* Pass the constant down and see if we can make a simplification. If
6013 we can, replace this expression with the inner simplification for
6014 possible later conversion to our or some other type. */
6015 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6016 && TREE_CODE (t2
) == INTEGER_CST
6017 && !TREE_OVERFLOW (t2
)
6018 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6020 ? ctype
: NULL_TREE
,
6021 strict_overflow_p
))))
6026 /* If widening the type changes it from signed to unsigned, then we
6027 must avoid building ABS_EXPR itself as unsigned. */
6028 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6030 tree cstype
= (*signed_type_for
) (ctype
);
6031 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6034 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6035 return fold_convert (ctype
, t1
);
6039 /* If the constant is negative, we cannot simplify this. */
6040 if (tree_int_cst_sgn (c
) == -1)
6044 /* For division and modulus, type can't be unsigned, as e.g.
6045 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6046 For signed types, even with wrapping overflow, this is fine. */
6047 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6049 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6051 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6054 case MIN_EXPR
: case MAX_EXPR
:
6055 /* If widening the type changes the signedness, then we can't perform
6056 this optimization as that changes the result. */
6057 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6060 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6061 sub_strict_overflow_p
= false;
6062 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6063 &sub_strict_overflow_p
)) != 0
6064 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6065 &sub_strict_overflow_p
)) != 0)
6067 if (tree_int_cst_sgn (c
) < 0)
6068 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6069 if (sub_strict_overflow_p
)
6070 *strict_overflow_p
= true;
6071 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6072 fold_convert (ctype
, t2
));
6076 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6077 /* If the second operand is constant, this is a multiplication
6078 or floor division, by a power of two, so we can treat it that
6079 way unless the multiplier or divisor overflows. Signed
6080 left-shift overflow is implementation-defined rather than
6081 undefined in C90, so do not convert signed left shift into
6083 if (TREE_CODE (op1
) == INTEGER_CST
6084 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6085 /* const_binop may not detect overflow correctly,
6086 so check for it explicitly here. */
6087 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6088 && 0 != (t1
= fold_convert (ctype
,
6089 const_binop (LSHIFT_EXPR
,
6092 && !TREE_OVERFLOW (t1
))
6093 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6094 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6096 fold_convert (ctype
, op0
),
6098 c
, code
, wide_type
, strict_overflow_p
);
6101 case PLUS_EXPR
: case MINUS_EXPR
:
6102 /* See if we can eliminate the operation on both sides. If we can, we
6103 can return a new PLUS or MINUS. If we can't, the only remaining
6104 cases where we can do anything are if the second operand is a
6106 sub_strict_overflow_p
= false;
6107 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6108 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6109 if (t1
!= 0 && t2
!= 0
6110 && (code
== MULT_EXPR
6111 /* If not multiplication, we can only do this if both operands
6112 are divisible by c. */
6113 || (multiple_of_p (ctype
, op0
, c
)
6114 && multiple_of_p (ctype
, op1
, c
))))
6116 if (sub_strict_overflow_p
)
6117 *strict_overflow_p
= true;
6118 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6119 fold_convert (ctype
, t2
));
6122 /* If this was a subtraction, negate OP1 and set it to be an addition.
6123 This simplifies the logic below. */
6124 if (tcode
== MINUS_EXPR
)
6126 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6127 /* If OP1 was not easily negatable, the constant may be OP0. */
6128 if (TREE_CODE (op0
) == INTEGER_CST
)
6130 std::swap (op0
, op1
);
6135 if (TREE_CODE (op1
) != INTEGER_CST
)
6138 /* If either OP1 or C are negative, this optimization is not safe for
6139 some of the division and remainder types while for others we need
6140 to change the code. */
6141 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6143 if (code
== CEIL_DIV_EXPR
)
6144 code
= FLOOR_DIV_EXPR
;
6145 else if (code
== FLOOR_DIV_EXPR
)
6146 code
= CEIL_DIV_EXPR
;
6147 else if (code
!= MULT_EXPR
6148 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6152 /* If it's a multiply or a division/modulus operation of a multiple
6153 of our constant, do the operation and verify it doesn't overflow. */
6154 if (code
== MULT_EXPR
6155 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6157 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6158 fold_convert (ctype
, c
));
6159 /* We allow the constant to overflow with wrapping semantics. */
6161 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6167 /* If we have an unsigned type, we cannot widen the operation since it
6168 will change the result if the original computation overflowed. */
6169 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6172 /* If we were able to eliminate our operation from the first side,
6173 apply our operation to the second side and reform the PLUS. */
6174 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6175 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6177 /* The last case is if we are a multiply. In that case, we can
6178 apply the distributive law to commute the multiply and addition
6179 if the multiplication of the constants doesn't overflow
6180 and overflow is defined. With undefined overflow
6181 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6182 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6183 return fold_build2 (tcode
, ctype
,
6184 fold_build2 (code
, ctype
,
6185 fold_convert (ctype
, op0
),
6186 fold_convert (ctype
, c
)),
6192 /* We have a special case here if we are doing something like
6193 (C * 8) % 4 since we know that's zero. */
6194 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6195 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6196 /* If the multiplication can overflow we cannot optimize this. */
6197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6198 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6199 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6201 *strict_overflow_p
= true;
6202 return omit_one_operand (type
, integer_zero_node
, op0
);
6205 /* ... fall through ... */
6207 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6208 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6209 /* If we can extract our operation from the LHS, do so and return a
6210 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6211 do something only if the second operand is a constant. */
6213 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6214 strict_overflow_p
)) != 0)
6215 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6216 fold_convert (ctype
, op1
));
6217 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6218 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6219 strict_overflow_p
)) != 0)
6220 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6221 fold_convert (ctype
, t1
));
6222 else if (TREE_CODE (op1
) != INTEGER_CST
)
6225 /* If these are the same operation types, we can associate them
6226 assuming no overflow. */
6229 bool overflow_p
= false;
6230 bool overflow_mul_p
;
6231 signop sign
= TYPE_SIGN (ctype
);
6232 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6233 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6235 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6238 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6239 wide_int_to_tree (ctype
, mul
));
6242 /* If these operations "cancel" each other, we have the main
6243 optimizations of this pass, which occur when either constant is a
6244 multiple of the other, in which case we replace this with either an
6245 operation or CODE or TCODE.
6247 If we have an unsigned type, we cannot do this since it will change
6248 the result if the original computation overflowed. */
6249 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6250 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6251 || (tcode
== MULT_EXPR
6252 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6253 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6254 && code
!= MULT_EXPR
)))
6256 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6258 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6259 *strict_overflow_p
= true;
6260 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6261 fold_convert (ctype
,
6262 const_binop (TRUNC_DIV_EXPR
,
6265 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6267 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6268 *strict_overflow_p
= true;
6269 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6270 fold_convert (ctype
,
6271 const_binop (TRUNC_DIV_EXPR
,
6284 /* Return a node which has the indicated constant VALUE (either 0 or
6285 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6286 and is of the indicated TYPE. */
6289 constant_boolean_node (bool value
, tree type
)
6291 if (type
== integer_type_node
)
6292 return value
? integer_one_node
: integer_zero_node
;
6293 else if (type
== boolean_type_node
)
6294 return value
? boolean_true_node
: boolean_false_node
;
6295 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6296 return build_vector_from_val (type
,
6297 build_int_cst (TREE_TYPE (type
),
6300 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6304 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6305 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6306 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6307 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6308 COND is the first argument to CODE; otherwise (as in the example
6309 given here), it is the second argument. TYPE is the type of the
6310 original expression. Return NULL_TREE if no simplification is
6314 fold_binary_op_with_conditional_arg (location_t loc
,
6315 enum tree_code code
,
6316 tree type
, tree op0
, tree op1
,
6317 tree cond
, tree arg
, int cond_first_p
)
6319 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6320 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6321 tree test
, true_value
, false_value
;
6322 tree lhs
= NULL_TREE
;
6323 tree rhs
= NULL_TREE
;
6324 enum tree_code cond_code
= COND_EXPR
;
6326 if (TREE_CODE (cond
) == COND_EXPR
6327 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6329 test
= TREE_OPERAND (cond
, 0);
6330 true_value
= TREE_OPERAND (cond
, 1);
6331 false_value
= TREE_OPERAND (cond
, 2);
6332 /* If this operand throws an expression, then it does not make
6333 sense to try to perform a logical or arithmetic operation
6335 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6337 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6342 tree testtype
= TREE_TYPE (cond
);
6344 true_value
= constant_boolean_node (true, testtype
);
6345 false_value
= constant_boolean_node (false, testtype
);
6348 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6349 cond_code
= VEC_COND_EXPR
;
6351 /* This transformation is only worthwhile if we don't have to wrap ARG
6352 in a SAVE_EXPR and the operation can be simplified without recursing
6353 on at least one of the branches once its pushed inside the COND_EXPR. */
6354 if (!TREE_CONSTANT (arg
)
6355 && (TREE_SIDE_EFFECTS (arg
)
6356 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6357 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6360 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6363 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6365 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6367 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6371 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6373 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6375 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6378 /* Check that we have simplified at least one of the branches. */
6379 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6382 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6386 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6388 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6389 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6390 ADDEND is the same as X.
6392 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6393 and finite. The problematic cases are when X is zero, and its mode
6394 has signed zeros. In the case of rounding towards -infinity,
6395 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6396 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6399 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6401 if (!real_zerop (addend
))
6404 /* Don't allow the fold with -fsignaling-nans. */
6405 if (HONOR_SNANS (element_mode (type
)))
6408 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6409 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6412 /* In a vector or complex, we would need to check the sign of all zeros. */
6413 if (TREE_CODE (addend
) != REAL_CST
)
6416 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6417 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6420 /* The mode has signed zeros, and we have to honor their sign.
6421 In this situation, there is only one case we can return true for.
6422 X - 0 is the same as X unless rounding towards -infinity is
6424 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6427 /* Subroutine of fold() that checks comparisons of built-in math
6428 functions against real constants.
6430 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6431 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6432 is the type of the result and ARG0 and ARG1 are the operands of the
6433 comparison. ARG1 must be a TREE_REAL_CST.
6435 The function returns the constant folded tree if a simplification
6436 can be made, and NULL_TREE otherwise. */
6439 fold_mathfn_compare (location_t loc
,
6440 enum built_in_function fcode
, enum tree_code code
,
6441 tree type
, tree arg0
, tree arg1
)
6445 if (BUILTIN_SQRT_P (fcode
))
6447 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6448 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6450 c
= TREE_REAL_CST (arg1
);
6451 if (REAL_VALUE_NEGATIVE (c
))
6453 /* sqrt(x) < y is always false, if y is negative. */
6454 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6455 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6457 /* sqrt(x) > y is always true, if y is negative and we
6458 don't care about NaNs, i.e. negative values of x. */
6459 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6460 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6462 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6463 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6464 build_real (TREE_TYPE (arg
), dconst0
));
6466 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6470 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6471 real_convert (&c2
, mode
, &c2
);
6473 if (REAL_VALUE_ISINF (c2
))
6475 /* sqrt(x) > y is x == +Inf, when y is very large. */
6476 if (HONOR_INFINITIES (mode
))
6477 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6478 build_real (TREE_TYPE (arg
), c2
));
6480 /* sqrt(x) > y is always false, when y is very large
6481 and we don't care about infinities. */
6482 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6485 /* sqrt(x) > c is the same as x > c*c. */
6486 return fold_build2_loc (loc
, code
, type
, arg
,
6487 build_real (TREE_TYPE (arg
), c2
));
6489 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6493 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6494 real_convert (&c2
, mode
, &c2
);
6496 if (REAL_VALUE_ISINF (c2
))
6498 /* sqrt(x) < y is always true, when y is a very large
6499 value and we don't care about NaNs or Infinities. */
6500 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6501 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6503 /* sqrt(x) < y is x != +Inf when y is very large and we
6504 don't care about NaNs. */
6505 if (! HONOR_NANS (mode
))
6506 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6507 build_real (TREE_TYPE (arg
), c2
));
6509 /* sqrt(x) < y is x >= 0 when y is very large and we
6510 don't care about Infinities. */
6511 if (! HONOR_INFINITIES (mode
))
6512 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6513 build_real (TREE_TYPE (arg
), dconst0
));
6515 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6516 arg
= save_expr (arg
);
6517 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6518 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6519 build_real (TREE_TYPE (arg
),
6521 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6522 build_real (TREE_TYPE (arg
),
6526 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6527 if (! HONOR_NANS (mode
))
6528 return fold_build2_loc (loc
, code
, type
, arg
,
6529 build_real (TREE_TYPE (arg
), c2
));
6531 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6532 arg
= save_expr (arg
);
6533 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6534 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6535 build_real (TREE_TYPE (arg
),
6537 fold_build2_loc (loc
, code
, type
, arg
,
6538 build_real (TREE_TYPE (arg
),
6546 /* Subroutine of fold() that optimizes comparisons against Infinities,
6547 either +Inf or -Inf.
6549 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6550 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6551 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6553 The function returns the constant folded tree if a simplification
6554 can be made, and NULL_TREE otherwise. */
6557 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6558 tree arg0
, tree arg1
)
6561 REAL_VALUE_TYPE max
;
6565 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6567 /* For negative infinity swap the sense of the comparison. */
6568 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6570 code
= swap_tree_comparison (code
);
6575 /* x > +Inf is always false, if with ignore sNANs. */
6576 if (HONOR_SNANS (mode
))
6578 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6581 /* x <= +Inf is always true, if we don't case about NaNs. */
6582 if (! HONOR_NANS (mode
))
6583 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6585 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6586 arg0
= save_expr (arg0
);
6587 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6591 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6592 real_maxval (&max
, neg
, mode
);
6593 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6594 arg0
, build_real (TREE_TYPE (arg0
), max
));
6597 /* x < +Inf is always equal to x <= DBL_MAX. */
6598 real_maxval (&max
, neg
, mode
);
6599 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6600 arg0
, build_real (TREE_TYPE (arg0
), max
));
6603 /* x != +Inf is always equal to !(x > DBL_MAX). */
6604 real_maxval (&max
, neg
, mode
);
6605 if (! HONOR_NANS (mode
))
6606 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6607 arg0
, build_real (TREE_TYPE (arg0
), max
));
6609 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6610 arg0
, build_real (TREE_TYPE (arg0
), max
));
6611 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6620 /* Subroutine of fold() that optimizes comparisons of a division by
6621 a nonzero integer constant against an integer constant, i.e.
6624 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6625 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6626 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6628 The function returns the constant folded tree if a simplification
6629 can be made, and NULL_TREE otherwise. */
6632 fold_div_compare (location_t loc
,
6633 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6635 tree prod
, tmp
, hi
, lo
;
6636 tree arg00
= TREE_OPERAND (arg0
, 0);
6637 tree arg01
= TREE_OPERAND (arg0
, 1);
6638 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6639 bool neg_overflow
= false;
6642 /* We have to do this the hard way to detect unsigned overflow.
6643 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6644 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6645 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6646 neg_overflow
= false;
6648 if (sign
== UNSIGNED
)
6650 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6651 build_int_cst (TREE_TYPE (arg01
), 1));
6654 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6655 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6656 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6657 -1, overflow
| TREE_OVERFLOW (prod
));
6659 else if (tree_int_cst_sgn (arg01
) >= 0)
6661 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6662 build_int_cst (TREE_TYPE (arg01
), 1));
6663 switch (tree_int_cst_sgn (arg1
))
6666 neg_overflow
= true;
6667 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6672 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6677 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6687 /* A negative divisor reverses the relational operators. */
6688 code
= swap_tree_comparison (code
);
6690 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6691 build_int_cst (TREE_TYPE (arg01
), 1));
6692 switch (tree_int_cst_sgn (arg1
))
6695 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6700 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6705 neg_overflow
= true;
6706 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6718 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6719 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6720 if (TREE_OVERFLOW (hi
))
6721 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6722 if (TREE_OVERFLOW (lo
))
6723 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6724 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6727 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6728 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6729 if (TREE_OVERFLOW (hi
))
6730 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6731 if (TREE_OVERFLOW (lo
))
6732 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6733 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6736 if (TREE_OVERFLOW (lo
))
6738 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6739 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6741 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6744 if (TREE_OVERFLOW (hi
))
6746 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6747 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6749 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6752 if (TREE_OVERFLOW (hi
))
6754 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6755 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6757 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6760 if (TREE_OVERFLOW (lo
))
6762 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6763 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6765 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6775 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6776 equality/inequality test, then return a simplified form of the test
6777 using a sign testing. Otherwise return NULL. TYPE is the desired
6781 fold_single_bit_test_into_sign_test (location_t loc
,
6782 enum tree_code code
, tree arg0
, tree arg1
,
6785 /* If this is testing a single bit, we can optimize the test. */
6786 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6787 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6788 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6790 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6791 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6792 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6794 if (arg00
!= NULL_TREE
6795 /* This is only a win if casting to a signed type is cheap,
6796 i.e. when arg00's type is not a partial mode. */
6797 && TYPE_PRECISION (TREE_TYPE (arg00
))
6798 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6800 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6801 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6803 fold_convert_loc (loc
, stype
, arg00
),
6804 build_int_cst (stype
, 0));
6811 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6812 equality/inequality test, then return a simplified form of
6813 the test using shifts and logical operations. Otherwise return
6814 NULL. TYPE is the desired result type. */
6817 fold_single_bit_test (location_t loc
, enum tree_code code
,
6818 tree arg0
, tree arg1
, tree result_type
)
6820 /* If this is testing a single bit, we can optimize the test. */
6821 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6822 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6823 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6825 tree inner
= TREE_OPERAND (arg0
, 0);
6826 tree type
= TREE_TYPE (arg0
);
6827 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6828 machine_mode operand_mode
= TYPE_MODE (type
);
6830 tree signed_type
, unsigned_type
, intermediate_type
;
6833 /* First, see if we can fold the single bit test into a sign-bit
6835 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6840 /* Otherwise we have (A & C) != 0 where C is a single bit,
6841 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6842 Similarly for (A & C) == 0. */
6844 /* If INNER is a right shift of a constant and it plus BITNUM does
6845 not overflow, adjust BITNUM and INNER. */
6846 if (TREE_CODE (inner
) == RSHIFT_EXPR
6847 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6848 && bitnum
< TYPE_PRECISION (type
)
6849 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6850 TYPE_PRECISION (type
) - bitnum
))
6852 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6853 inner
= TREE_OPERAND (inner
, 0);
6856 /* If we are going to be able to omit the AND below, we must do our
6857 operations as unsigned. If we must use the AND, we have a choice.
6858 Normally unsigned is faster, but for some machines signed is. */
6859 #ifdef LOAD_EXTEND_OP
6860 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6861 && !flag_syntax_only
) ? 0 : 1;
6866 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6867 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6868 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6869 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6872 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6873 inner
, size_int (bitnum
));
6875 one
= build_int_cst (intermediate_type
, 1);
6877 if (code
== EQ_EXPR
)
6878 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6880 /* Put the AND last so it can combine with more things. */
6881 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6883 /* Make sure to return the proper type. */
6884 inner
= fold_convert_loc (loc
, result_type
, inner
);
6891 /* Check whether we are allowed to reorder operands arg0 and arg1,
6892 such that the evaluation of arg1 occurs before arg0. */
6895 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6897 if (! flag_evaluation_order
)
6899 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6901 return ! TREE_SIDE_EFFECTS (arg0
)
6902 && ! TREE_SIDE_EFFECTS (arg1
);
6905 /* Test whether it is preferable two swap two operands, ARG0 and
6906 ARG1, for example because ARG0 is an integer constant and ARG1
6907 isn't. If REORDER is true, only recommend swapping if we can
6908 evaluate the operands in reverse order. */
6911 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6913 if (CONSTANT_CLASS_P (arg1
))
6915 if (CONSTANT_CLASS_P (arg0
))
6921 if (TREE_CONSTANT (arg1
))
6923 if (TREE_CONSTANT (arg0
))
6926 if (reorder
&& flag_evaluation_order
6927 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6930 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6931 for commutative and comparison operators. Ensuring a canonical
6932 form allows the optimizers to find additional redundancies without
6933 having to explicitly check for both orderings. */
6934 if (TREE_CODE (arg0
) == SSA_NAME
6935 && TREE_CODE (arg1
) == SSA_NAME
6936 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6939 /* Put SSA_NAMEs last. */
6940 if (TREE_CODE (arg1
) == SSA_NAME
)
6942 if (TREE_CODE (arg0
) == SSA_NAME
)
6945 /* Put variables last. */
6954 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6955 ARG0 is extended to a wider type. */
6958 fold_widened_comparison (location_t loc
, enum tree_code code
,
6959 tree type
, tree arg0
, tree arg1
)
6961 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6963 tree shorter_type
, outer_type
;
6967 if (arg0_unw
== arg0
)
6969 shorter_type
= TREE_TYPE (arg0_unw
);
6971 #ifdef HAVE_canonicalize_funcptr_for_compare
6972 /* Disable this optimization if we're casting a function pointer
6973 type on targets that require function pointer canonicalization. */
6974 if (HAVE_canonicalize_funcptr_for_compare
6975 && TREE_CODE (shorter_type
) == POINTER_TYPE
6976 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6980 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6983 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6985 /* If possible, express the comparison in the shorter mode. */
6986 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6987 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6988 && (TREE_TYPE (arg1_unw
) == shorter_type
6989 || ((TYPE_PRECISION (shorter_type
)
6990 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6991 && (TYPE_UNSIGNED (shorter_type
)
6992 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6993 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6994 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6995 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6996 && int_fits_type_p (arg1_unw
, shorter_type
))))
6997 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6998 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
7000 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7001 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7002 || !int_fits_type_p (arg1_unw
, shorter_type
))
7005 /* If we are comparing with the integer that does not fit into the range
7006 of the shorter type, the result is known. */
7007 outer_type
= TREE_TYPE (arg1_unw
);
7008 min
= lower_bound_in_type (outer_type
, shorter_type
);
7009 max
= upper_bound_in_type (outer_type
, shorter_type
);
7011 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7013 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7020 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7025 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7031 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7033 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7038 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7040 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7049 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7050 ARG0 just the signedness is changed. */
7053 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
7054 tree arg0
, tree arg1
)
7057 tree inner_type
, outer_type
;
7059 if (!CONVERT_EXPR_P (arg0
))
7062 outer_type
= TREE_TYPE (arg0
);
7063 arg0_inner
= TREE_OPERAND (arg0
, 0);
7064 inner_type
= TREE_TYPE (arg0_inner
);
7066 #ifdef HAVE_canonicalize_funcptr_for_compare
7067 /* Disable this optimization if we're casting a function pointer
7068 type on targets that require function pointer canonicalization. */
7069 if (HAVE_canonicalize_funcptr_for_compare
7070 && TREE_CODE (inner_type
) == POINTER_TYPE
7071 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7075 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7078 if (TREE_CODE (arg1
) != INTEGER_CST
7079 && !(CONVERT_EXPR_P (arg1
)
7080 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7083 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7088 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
7091 if (TREE_CODE (arg1
) == INTEGER_CST
)
7092 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
7093 TREE_OVERFLOW (arg1
));
7095 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
7097 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
7101 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7102 means A >= Y && A != MAX, but in this case we know that
7103 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7106 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7108 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7110 if (TREE_CODE (bound
) == LT_EXPR
)
7111 a
= TREE_OPERAND (bound
, 0);
7112 else if (TREE_CODE (bound
) == GT_EXPR
)
7113 a
= TREE_OPERAND (bound
, 1);
7117 typea
= TREE_TYPE (a
);
7118 if (!INTEGRAL_TYPE_P (typea
)
7119 && !POINTER_TYPE_P (typea
))
7122 if (TREE_CODE (ineq
) == LT_EXPR
)
7124 a1
= TREE_OPERAND (ineq
, 1);
7125 y
= TREE_OPERAND (ineq
, 0);
7127 else if (TREE_CODE (ineq
) == GT_EXPR
)
7129 a1
= TREE_OPERAND (ineq
, 0);
7130 y
= TREE_OPERAND (ineq
, 1);
7135 if (TREE_TYPE (a1
) != typea
)
7138 if (POINTER_TYPE_P (typea
))
7140 /* Convert the pointer types into integer before taking the difference. */
7141 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7142 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7143 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7146 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7148 if (!diff
|| !integer_onep (diff
))
7151 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7154 /* Fold a sum or difference of at least one multiplication.
7155 Returns the folded tree or NULL if no simplification could be made. */
7158 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7159 tree arg0
, tree arg1
)
7161 tree arg00
, arg01
, arg10
, arg11
;
7162 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7164 /* (A * C) +- (B * C) -> (A+-B) * C.
7165 (A * C) +- A -> A * (C+-1).
7166 We are most concerned about the case where C is a constant,
7167 but other combinations show up during loop reduction. Since
7168 it is not difficult, try all four possibilities. */
7170 if (TREE_CODE (arg0
) == MULT_EXPR
)
7172 arg00
= TREE_OPERAND (arg0
, 0);
7173 arg01
= TREE_OPERAND (arg0
, 1);
7175 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7177 arg00
= build_one_cst (type
);
7182 /* We cannot generate constant 1 for fract. */
7183 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7186 arg01
= build_one_cst (type
);
7188 if (TREE_CODE (arg1
) == MULT_EXPR
)
7190 arg10
= TREE_OPERAND (arg1
, 0);
7191 arg11
= TREE_OPERAND (arg1
, 1);
7193 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7195 arg10
= build_one_cst (type
);
7196 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7197 the purpose of this canonicalization. */
7198 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7199 && negate_expr_p (arg1
)
7200 && code
== PLUS_EXPR
)
7202 arg11
= negate_expr (arg1
);
7210 /* We cannot generate constant 1 for fract. */
7211 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7214 arg11
= build_one_cst (type
);
7218 if (operand_equal_p (arg01
, arg11
, 0))
7219 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7220 else if (operand_equal_p (arg00
, arg10
, 0))
7221 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7222 else if (operand_equal_p (arg00
, arg11
, 0))
7223 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7224 else if (operand_equal_p (arg01
, arg10
, 0))
7225 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7227 /* No identical multiplicands; see if we can find a common
7228 power-of-two factor in non-power-of-two multiplies. This
7229 can help in multi-dimensional array access. */
7230 else if (tree_fits_shwi_p (arg01
)
7231 && tree_fits_shwi_p (arg11
))
7233 HOST_WIDE_INT int01
, int11
, tmp
;
7236 int01
= tree_to_shwi (arg01
);
7237 int11
= tree_to_shwi (arg11
);
7239 /* Move min of absolute values to int11. */
7240 if (absu_hwi (int01
) < absu_hwi (int11
))
7242 tmp
= int01
, int01
= int11
, int11
= tmp
;
7243 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7250 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7251 /* The remainder should not be a constant, otherwise we
7252 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7253 increased the number of multiplications necessary. */
7254 && TREE_CODE (arg10
) != INTEGER_CST
)
7256 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7257 build_int_cst (TREE_TYPE (arg00
),
7262 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7267 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7268 fold_build2_loc (loc
, code
, type
,
7269 fold_convert_loc (loc
, type
, alt0
),
7270 fold_convert_loc (loc
, type
, alt1
)),
7271 fold_convert_loc (loc
, type
, same
));
7276 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7277 specified by EXPR into the buffer PTR of length LEN bytes.
7278 Return the number of bytes placed in the buffer, or zero
7282 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7284 tree type
= TREE_TYPE (expr
);
7285 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7286 int byte
, offset
, word
, words
;
7287 unsigned char value
;
7289 if ((off
== -1 && total_bytes
> len
)
7290 || off
>= total_bytes
)
7294 words
= total_bytes
/ UNITS_PER_WORD
;
7296 for (byte
= 0; byte
< total_bytes
; byte
++)
7298 int bitpos
= byte
* BITS_PER_UNIT
;
7299 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7301 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7303 if (total_bytes
> UNITS_PER_WORD
)
7305 word
= byte
/ UNITS_PER_WORD
;
7306 if (WORDS_BIG_ENDIAN
)
7307 word
= (words
- 1) - word
;
7308 offset
= word
* UNITS_PER_WORD
;
7309 if (BYTES_BIG_ENDIAN
)
7310 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7312 offset
+= byte
% UNITS_PER_WORD
;
7315 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7317 && offset
- off
< len
)
7318 ptr
[offset
- off
] = value
;
7320 return MIN (len
, total_bytes
- off
);
7324 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7325 specified by EXPR into the buffer PTR of length LEN bytes.
7326 Return the number of bytes placed in the buffer, or zero
7330 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7332 tree type
= TREE_TYPE (expr
);
7333 machine_mode mode
= TYPE_MODE (type
);
7334 int total_bytes
= GET_MODE_SIZE (mode
);
7335 FIXED_VALUE_TYPE value
;
7336 tree i_value
, i_type
;
7338 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7341 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7343 if (NULL_TREE
== i_type
7344 || TYPE_PRECISION (i_type
) != total_bytes
)
7347 value
= TREE_FIXED_CST (expr
);
7348 i_value
= double_int_to_tree (i_type
, value
.data
);
7350 return native_encode_int (i_value
, ptr
, len
, off
);
7354 /* Subroutine of native_encode_expr. Encode the REAL_CST
7355 specified by EXPR into the buffer PTR of length LEN bytes.
7356 Return the number of bytes placed in the buffer, or zero
7360 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7362 tree type
= TREE_TYPE (expr
);
7363 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7364 int byte
, offset
, word
, words
, bitpos
;
7365 unsigned char value
;
7367 /* There are always 32 bits in each long, no matter the size of
7368 the hosts long. We handle floating point representations with
7372 if ((off
== -1 && total_bytes
> len
)
7373 || off
>= total_bytes
)
7377 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7379 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7381 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7382 bitpos
+= BITS_PER_UNIT
)
7384 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7385 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7387 if (UNITS_PER_WORD
< 4)
7389 word
= byte
/ UNITS_PER_WORD
;
7390 if (WORDS_BIG_ENDIAN
)
7391 word
= (words
- 1) - word
;
7392 offset
= word
* UNITS_PER_WORD
;
7393 if (BYTES_BIG_ENDIAN
)
7394 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7396 offset
+= byte
% UNITS_PER_WORD
;
7399 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7400 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7402 && offset
- off
< len
)
7403 ptr
[offset
- off
] = value
;
7405 return MIN (len
, total_bytes
- off
);
7408 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7409 specified by EXPR into the buffer PTR of length LEN bytes.
7410 Return the number of bytes placed in the buffer, or zero
7414 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7419 part
= TREE_REALPART (expr
);
7420 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7424 part
= TREE_IMAGPART (expr
);
7426 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7427 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7431 return rsize
+ isize
;
7435 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7436 specified by EXPR into the buffer PTR of length LEN bytes.
7437 Return the number of bytes placed in the buffer, or zero
7441 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7448 count
= VECTOR_CST_NELTS (expr
);
7449 itype
= TREE_TYPE (TREE_TYPE (expr
));
7450 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7451 for (i
= 0; i
< count
; i
++)
7458 elem
= VECTOR_CST_ELT (expr
, i
);
7459 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7460 if ((off
== -1 && res
!= size
)
7473 /* Subroutine of native_encode_expr. Encode the STRING_CST
7474 specified by EXPR into the buffer PTR of length LEN bytes.
7475 Return the number of bytes placed in the buffer, or zero
7479 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7481 tree type
= TREE_TYPE (expr
);
7482 HOST_WIDE_INT total_bytes
;
7484 if (TREE_CODE (type
) != ARRAY_TYPE
7485 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7486 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7487 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7489 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7490 if ((off
== -1 && total_bytes
> len
)
7491 || off
>= total_bytes
)
7495 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7498 if (off
< TREE_STRING_LENGTH (expr
))
7500 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7501 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7503 memset (ptr
+ written
, 0,
7504 MIN (total_bytes
- written
, len
- written
));
7507 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7508 return MIN (total_bytes
- off
, len
);
7512 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7513 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7514 buffer PTR of length LEN bytes. If OFF is not -1 then start
7515 the encoding at byte offset OFF and encode at most LEN bytes.
7516 Return the number of bytes placed in the buffer, or zero upon failure. */
7519 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7521 switch (TREE_CODE (expr
))
7524 return native_encode_int (expr
, ptr
, len
, off
);
7527 return native_encode_real (expr
, ptr
, len
, off
);
7530 return native_encode_fixed (expr
, ptr
, len
, off
);
7533 return native_encode_complex (expr
, ptr
, len
, off
);
7536 return native_encode_vector (expr
, ptr
, len
, off
);
7539 return native_encode_string (expr
, ptr
, len
, off
);
7547 /* Subroutine of native_interpret_expr. Interpret the contents of
7548 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7549 If the buffer cannot be interpreted, return NULL_TREE. */
7552 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7554 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7556 if (total_bytes
> len
7557 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7560 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7562 return wide_int_to_tree (type
, result
);
7566 /* Subroutine of native_interpret_expr. Interpret the contents of
7567 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7568 If the buffer cannot be interpreted, return NULL_TREE. */
7571 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7573 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7575 FIXED_VALUE_TYPE fixed_value
;
7577 if (total_bytes
> len
7578 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7581 result
= double_int::from_buffer (ptr
, total_bytes
);
7582 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7584 return build_fixed (type
, fixed_value
);
7588 /* Subroutine of native_interpret_expr. Interpret the contents of
7589 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7590 If the buffer cannot be interpreted, return NULL_TREE. */
7593 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7595 machine_mode mode
= TYPE_MODE (type
);
7596 int total_bytes
= GET_MODE_SIZE (mode
);
7597 int byte
, offset
, word
, words
, bitpos
;
7598 unsigned char value
;
7599 /* There are always 32 bits in each long, no matter the size of
7600 the hosts long. We handle floating point representations with
7605 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7606 if (total_bytes
> len
|| total_bytes
> 24)
7608 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7610 memset (tmp
, 0, sizeof (tmp
));
7611 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7612 bitpos
+= BITS_PER_UNIT
)
7614 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7615 if (UNITS_PER_WORD
< 4)
7617 word
= byte
/ UNITS_PER_WORD
;
7618 if (WORDS_BIG_ENDIAN
)
7619 word
= (words
- 1) - word
;
7620 offset
= word
* UNITS_PER_WORD
;
7621 if (BYTES_BIG_ENDIAN
)
7622 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7624 offset
+= byte
% UNITS_PER_WORD
;
7627 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7628 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7630 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7633 real_from_target (&r
, tmp
, mode
);
7634 return build_real (type
, r
);
7638 /* Subroutine of native_interpret_expr. Interpret the contents of
7639 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7640 If the buffer cannot be interpreted, return NULL_TREE. */
7643 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7645 tree etype
, rpart
, ipart
;
7648 etype
= TREE_TYPE (type
);
7649 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7652 rpart
= native_interpret_expr (etype
, ptr
, size
);
7655 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7658 return build_complex (type
, rpart
, ipart
);
7662 /* Subroutine of native_interpret_expr. Interpret the contents of
7663 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7664 If the buffer cannot be interpreted, return NULL_TREE. */
7667 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7673 etype
= TREE_TYPE (type
);
7674 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7675 count
= TYPE_VECTOR_SUBPARTS (type
);
7676 if (size
* count
> len
)
7679 elements
= XALLOCAVEC (tree
, count
);
7680 for (i
= count
- 1; i
>= 0; i
--)
7682 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7687 return build_vector (type
, elements
);
7691 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7692 the buffer PTR of length LEN as a constant of type TYPE. For
7693 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7694 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7695 return NULL_TREE. */
7698 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7700 switch (TREE_CODE (type
))
7706 case REFERENCE_TYPE
:
7707 return native_interpret_int (type
, ptr
, len
);
7710 return native_interpret_real (type
, ptr
, len
);
7712 case FIXED_POINT_TYPE
:
7713 return native_interpret_fixed (type
, ptr
, len
);
7716 return native_interpret_complex (type
, ptr
, len
);
7719 return native_interpret_vector (type
, ptr
, len
);
7726 /* Returns true if we can interpret the contents of a native encoding
7730 can_native_interpret_type_p (tree type
)
7732 switch (TREE_CODE (type
))
7738 case REFERENCE_TYPE
:
7739 case FIXED_POINT_TYPE
:
7749 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7750 TYPE at compile-time. If we're unable to perform the conversion
7751 return NULL_TREE. */
7754 fold_view_convert_expr (tree type
, tree expr
)
7756 /* We support up to 512-bit values (for V8DFmode). */
7757 unsigned char buffer
[64];
7760 /* Check that the host and target are sane. */
7761 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7764 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7768 return native_interpret_expr (type
, buffer
, len
);
7771 /* Build an expression for the address of T. Folds away INDIRECT_REF
7772 to avoid confusing the gimplify process. */
7775 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7777 /* The size of the object is not relevant when talking about its address. */
7778 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7779 t
= TREE_OPERAND (t
, 0);
7781 if (TREE_CODE (t
) == INDIRECT_REF
)
7783 t
= TREE_OPERAND (t
, 0);
7785 if (TREE_TYPE (t
) != ptrtype
)
7786 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7788 else if (TREE_CODE (t
) == MEM_REF
7789 && integer_zerop (TREE_OPERAND (t
, 1)))
7790 return TREE_OPERAND (t
, 0);
7791 else if (TREE_CODE (t
) == MEM_REF
7792 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7793 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7794 TREE_OPERAND (t
, 0),
7795 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7796 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7798 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7800 if (TREE_TYPE (t
) != ptrtype
)
7801 t
= fold_convert_loc (loc
, ptrtype
, t
);
7804 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7809 /* Build an expression for the address of T. */
7812 build_fold_addr_expr_loc (location_t loc
, tree t
)
7814 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7816 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7819 /* Fold a unary expression of code CODE and type TYPE with operand
7820 OP0. Return the folded expression if folding is successful.
7821 Otherwise, return NULL_TREE. */
7824 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7828 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7830 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7831 && TREE_CODE_LENGTH (code
) == 1);
7836 if (CONVERT_EXPR_CODE_P (code
)
7837 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7839 /* Don't use STRIP_NOPS, because signedness of argument type
7841 STRIP_SIGN_NOPS (arg0
);
7845 /* Strip any conversions that don't change the mode. This
7846 is safe for every expression, except for a comparison
7847 expression because its signedness is derived from its
7850 Note that this is done as an internal manipulation within
7851 the constant folder, in order to find the simplest
7852 representation of the arguments so that their form can be
7853 studied. In any cases, the appropriate type conversions
7854 should be put back in the tree that will get out of the
7859 if (CONSTANT_CLASS_P (arg0
))
7861 tree tem
= const_unop (code
, type
, arg0
);
7864 if (TREE_TYPE (tem
) != type
)
7865 tem
= fold_convert_loc (loc
, type
, tem
);
7871 tem
= generic_simplify (loc
, code
, type
, op0
);
7875 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7877 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7878 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7879 fold_build1_loc (loc
, code
, type
,
7880 fold_convert_loc (loc
, TREE_TYPE (op0
),
7881 TREE_OPERAND (arg0
, 1))));
7882 else if (TREE_CODE (arg0
) == COND_EXPR
)
7884 tree arg01
= TREE_OPERAND (arg0
, 1);
7885 tree arg02
= TREE_OPERAND (arg0
, 2);
7886 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7887 arg01
= fold_build1_loc (loc
, code
, type
,
7888 fold_convert_loc (loc
,
7889 TREE_TYPE (op0
), arg01
));
7890 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7891 arg02
= fold_build1_loc (loc
, code
, type
,
7892 fold_convert_loc (loc
,
7893 TREE_TYPE (op0
), arg02
));
7894 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7897 /* If this was a conversion, and all we did was to move into
7898 inside the COND_EXPR, bring it back out. But leave it if
7899 it is a conversion from integer to integer and the
7900 result precision is no wider than a word since such a
7901 conversion is cheap and may be optimized away by combine,
7902 while it couldn't if it were outside the COND_EXPR. Then return
7903 so we don't get into an infinite recursion loop taking the
7904 conversion out and then back in. */
7906 if ((CONVERT_EXPR_CODE_P (code
)
7907 || code
== NON_LVALUE_EXPR
)
7908 && TREE_CODE (tem
) == COND_EXPR
7909 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7910 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7911 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7912 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7913 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7914 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7915 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7917 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7918 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7919 || flag_syntax_only
))
7920 tem
= build1_loc (loc
, code
, type
,
7922 TREE_TYPE (TREE_OPERAND
7923 (TREE_OPERAND (tem
, 1), 0)),
7924 TREE_OPERAND (tem
, 0),
7925 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7926 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7934 case NON_LVALUE_EXPR
:
7935 if (!maybe_lvalue_p (op0
))
7936 return fold_convert_loc (loc
, type
, op0
);
7941 case FIX_TRUNC_EXPR
:
7942 if (COMPARISON_CLASS_P (op0
))
7944 /* If we have (type) (a CMP b) and type is an integral type, return
7945 new expression involving the new type. Canonicalize
7946 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7948 Do not fold the result as that would not simplify further, also
7949 folding again results in recursions. */
7950 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7951 return build2_loc (loc
, TREE_CODE (op0
), type
,
7952 TREE_OPERAND (op0
, 0),
7953 TREE_OPERAND (op0
, 1));
7954 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7955 && TREE_CODE (type
) != VECTOR_TYPE
)
7956 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7957 constant_boolean_node (true, type
),
7958 constant_boolean_node (false, type
));
7961 /* Handle (T *)&A.B.C for A being of type T and B and C
7962 living at offset zero. This occurs frequently in
7963 C++ upcasting and then accessing the base. */
7964 if (TREE_CODE (op0
) == ADDR_EXPR
7965 && POINTER_TYPE_P (type
)
7966 && handled_component_p (TREE_OPERAND (op0
, 0)))
7968 HOST_WIDE_INT bitsize
, bitpos
;
7971 int unsignedp
, volatilep
;
7972 tree base
= TREE_OPERAND (op0
, 0);
7973 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7974 &mode
, &unsignedp
, &volatilep
, false);
7975 /* If the reference was to a (constant) zero offset, we can use
7976 the address of the base if it has the same base type
7977 as the result type and the pointer type is unqualified. */
7978 if (! offset
&& bitpos
== 0
7979 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7980 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7981 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7982 return fold_convert_loc (loc
, type
,
7983 build_fold_addr_expr_loc (loc
, base
));
7986 if (TREE_CODE (op0
) == MODIFY_EXPR
7987 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7988 /* Detect assigning a bitfield. */
7989 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7991 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7993 /* Don't leave an assignment inside a conversion
7994 unless assigning a bitfield. */
7995 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7996 /* First do the assignment, then return converted constant. */
7997 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7998 TREE_NO_WARNING (tem
) = 1;
7999 TREE_USED (tem
) = 1;
8003 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8004 constants (if x has signed type, the sign bit cannot be set
8005 in c). This folds extension into the BIT_AND_EXPR.
8006 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8007 very likely don't have maximal range for their precision and this
8008 transformation effectively doesn't preserve non-maximal ranges. */
8009 if (TREE_CODE (type
) == INTEGER_TYPE
8010 && TREE_CODE (op0
) == BIT_AND_EXPR
8011 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8013 tree and_expr
= op0
;
8014 tree and0
= TREE_OPERAND (and_expr
, 0);
8015 tree and1
= TREE_OPERAND (and_expr
, 1);
8018 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8019 || (TYPE_PRECISION (type
)
8020 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8022 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8023 <= HOST_BITS_PER_WIDE_INT
8024 && tree_fits_uhwi_p (and1
))
8026 unsigned HOST_WIDE_INT cst
;
8028 cst
= tree_to_uhwi (and1
);
8029 cst
&= HOST_WIDE_INT_M1U
8030 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8031 change
= (cst
== 0);
8032 #ifdef LOAD_EXTEND_OP
8034 && !flag_syntax_only
8035 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8038 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8039 and0
= fold_convert_loc (loc
, uns
, and0
);
8040 and1
= fold_convert_loc (loc
, uns
, and1
);
8046 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8047 TREE_OVERFLOW (and1
));
8048 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8049 fold_convert_loc (loc
, type
, and0
), tem
);
8053 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8054 when one of the new casts will fold away. Conservatively we assume
8055 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8056 if (POINTER_TYPE_P (type
)
8057 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8058 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8059 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8060 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8061 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8063 tree arg00
= TREE_OPERAND (arg0
, 0);
8064 tree arg01
= TREE_OPERAND (arg0
, 1);
8066 return fold_build_pointer_plus_loc
8067 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8070 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8071 of the same precision, and X is an integer type not narrower than
8072 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8073 if (INTEGRAL_TYPE_P (type
)
8074 && TREE_CODE (op0
) == BIT_NOT_EXPR
8075 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8076 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8077 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8079 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8080 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8081 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8082 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8083 fold_convert_loc (loc
, type
, tem
));
8086 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8087 type of X and Y (integer types only). */
8088 if (INTEGRAL_TYPE_P (type
)
8089 && TREE_CODE (op0
) == MULT_EXPR
8090 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8091 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8093 /* Be careful not to introduce new overflows. */
8095 if (TYPE_OVERFLOW_WRAPS (type
))
8098 mult_type
= unsigned_type_for (type
);
8100 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8102 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8103 fold_convert_loc (loc
, mult_type
,
8104 TREE_OPERAND (op0
, 0)),
8105 fold_convert_loc (loc
, mult_type
,
8106 TREE_OPERAND (op0
, 1)));
8107 return fold_convert_loc (loc
, type
, tem
);
8113 case VIEW_CONVERT_EXPR
:
8114 if (TREE_CODE (op0
) == MEM_REF
)
8115 return fold_build2_loc (loc
, MEM_REF
, type
,
8116 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8121 tem
= fold_negate_expr (loc
, arg0
);
8123 return fold_convert_loc (loc
, type
, tem
);
8127 /* Convert fabs((double)float) into (double)fabsf(float). */
8128 if (TREE_CODE (arg0
) == NOP_EXPR
8129 && TREE_CODE (type
) == REAL_TYPE
)
8131 tree targ0
= strip_float_extensions (arg0
);
8133 return fold_convert_loc (loc
, type
,
8134 fold_build1_loc (loc
, ABS_EXPR
,
8138 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8139 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8142 /* Strip sign ops from argument. */
8143 if (TREE_CODE (type
) == REAL_TYPE
)
8145 tem
= fold_strip_sign_ops (arg0
);
8147 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8148 fold_convert_loc (loc
, type
, tem
));
8153 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8154 return fold_convert_loc (loc
, type
, arg0
);
8155 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8157 tree itype
= TREE_TYPE (type
);
8158 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8159 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8160 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8161 negate_expr (ipart
));
8163 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8164 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8168 /* Convert ~ (-A) to A - 1. */
8169 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8170 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8171 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8172 build_int_cst (type
, 1));
8173 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8174 else if (INTEGRAL_TYPE_P (type
)
8175 && ((TREE_CODE (arg0
) == MINUS_EXPR
8176 && integer_onep (TREE_OPERAND (arg0
, 1)))
8177 || (TREE_CODE (arg0
) == PLUS_EXPR
8178 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8180 /* Perform the negation in ARG0's type and only then convert
8181 to TYPE as to avoid introducing undefined behavior. */
8182 tree t
= fold_build1_loc (loc
, NEGATE_EXPR
,
8183 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
8184 TREE_OPERAND (arg0
, 0));
8185 return fold_convert_loc (loc
, type
, t
);
8187 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8188 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8189 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8190 fold_convert_loc (loc
, type
,
8191 TREE_OPERAND (arg0
, 0)))))
8192 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8193 fold_convert_loc (loc
, type
,
8194 TREE_OPERAND (arg0
, 1)));
8195 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8196 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8197 fold_convert_loc (loc
, type
,
8198 TREE_OPERAND (arg0
, 1)))))
8199 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8200 fold_convert_loc (loc
, type
,
8201 TREE_OPERAND (arg0
, 0)), tem
);
8205 case TRUTH_NOT_EXPR
:
8206 /* Note that the operand of this must be an int
8207 and its values must be 0 or 1.
8208 ("true" is a fixed value perhaps depending on the language,
8209 but we don't handle values other than 1 correctly yet.) */
8210 tem
= fold_truth_not_expr (loc
, arg0
);
8213 return fold_convert_loc (loc
, type
, tem
);
8216 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8217 return fold_convert_loc (loc
, type
, arg0
);
8218 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8220 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8221 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8222 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8223 TREE_OPERAND (arg0
, 0)),
8224 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8225 TREE_OPERAND (arg0
, 1)));
8226 return fold_convert_loc (loc
, type
, tem
);
8228 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8230 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8231 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8232 TREE_OPERAND (arg0
, 0));
8233 return fold_convert_loc (loc
, type
, tem
);
8235 if (TREE_CODE (arg0
) == CALL_EXPR
)
8237 tree fn
= get_callee_fndecl (arg0
);
8238 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8239 switch (DECL_FUNCTION_CODE (fn
))
8241 CASE_FLT_FN (BUILT_IN_CEXPI
):
8242 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8244 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8254 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8255 return build_zero_cst (type
);
8256 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8258 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8259 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8260 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8261 TREE_OPERAND (arg0
, 0)),
8262 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8263 TREE_OPERAND (arg0
, 1)));
8264 return fold_convert_loc (loc
, type
, tem
);
8266 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8268 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8269 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8270 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8272 if (TREE_CODE (arg0
) == CALL_EXPR
)
8274 tree fn
= get_callee_fndecl (arg0
);
8275 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8276 switch (DECL_FUNCTION_CODE (fn
))
8278 CASE_FLT_FN (BUILT_IN_CEXPI
):
8279 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8281 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8291 /* Fold *&X to X if X is an lvalue. */
8292 if (TREE_CODE (op0
) == ADDR_EXPR
)
8294 tree op00
= TREE_OPERAND (op0
, 0);
8295 if ((TREE_CODE (op00
) == VAR_DECL
8296 || TREE_CODE (op00
) == PARM_DECL
8297 || TREE_CODE (op00
) == RESULT_DECL
)
8298 && !TREE_READONLY (op00
))
8305 } /* switch (code) */
8309 /* If the operation was a conversion do _not_ mark a resulting constant
8310 with TREE_OVERFLOW if the original constant was not. These conversions
8311 have implementation defined behavior and retaining the TREE_OVERFLOW
8312 flag here would confuse later passes such as VRP. */
8314 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8315 tree type
, tree op0
)
8317 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8319 && TREE_CODE (res
) == INTEGER_CST
8320 && TREE_CODE (op0
) == INTEGER_CST
8321 && CONVERT_EXPR_CODE_P (code
))
8322 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8327 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8328 operands OP0 and OP1. LOC is the location of the resulting expression.
8329 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8330 Return the folded expression if folding is successful. Otherwise,
8331 return NULL_TREE. */
8333 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8334 tree arg0
, tree arg1
, tree op0
, tree op1
)
8338 /* We only do these simplifications if we are optimizing. */
8342 /* Check for things like (A || B) && (A || C). We can convert this
8343 to A || (B && C). Note that either operator can be any of the four
8344 truth and/or operations and the transformation will still be
8345 valid. Also note that we only care about order for the
8346 ANDIF and ORIF operators. If B contains side effects, this
8347 might change the truth-value of A. */
8348 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8349 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8350 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8351 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8352 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8353 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8355 tree a00
= TREE_OPERAND (arg0
, 0);
8356 tree a01
= TREE_OPERAND (arg0
, 1);
8357 tree a10
= TREE_OPERAND (arg1
, 0);
8358 tree a11
= TREE_OPERAND (arg1
, 1);
8359 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8360 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8361 && (code
== TRUTH_AND_EXPR
8362 || code
== TRUTH_OR_EXPR
));
8364 if (operand_equal_p (a00
, a10
, 0))
8365 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8366 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8367 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8368 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8369 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8370 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8371 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8372 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8374 /* This case if tricky because we must either have commutative
8375 operators or else A10 must not have side-effects. */
8377 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8378 && operand_equal_p (a01
, a11
, 0))
8379 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8380 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8384 /* See if we can build a range comparison. */
8385 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8388 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8389 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8391 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8393 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8396 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8397 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8399 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8401 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8404 /* Check for the possibility of merging component references. If our
8405 lhs is another similar operation, try to merge its rhs with our
8406 rhs. Then try to merge our lhs and rhs. */
8407 if (TREE_CODE (arg0
) == code
8408 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8409 TREE_OPERAND (arg0
, 1), arg1
)))
8410 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8412 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8415 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8416 && (code
== TRUTH_AND_EXPR
8417 || code
== TRUTH_ANDIF_EXPR
8418 || code
== TRUTH_OR_EXPR
8419 || code
== TRUTH_ORIF_EXPR
))
8421 enum tree_code ncode
, icode
;
8423 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8424 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8425 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8427 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8428 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8429 We don't want to pack more than two leafs to a non-IF AND/OR
8431 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8432 equal to IF-CODE, then we don't want to add right-hand operand.
8433 If the inner right-hand side of left-hand operand has
8434 side-effects, or isn't simple, then we can't add to it,
8435 as otherwise we might destroy if-sequence. */
8436 if (TREE_CODE (arg0
) == icode
8437 && simple_operand_p_2 (arg1
)
8438 /* Needed for sequence points to handle trappings, and
8440 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8442 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8444 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8447 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8448 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8449 else if (TREE_CODE (arg1
) == icode
8450 && simple_operand_p_2 (arg0
)
8451 /* Needed for sequence points to handle trappings, and
8453 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8455 tem
= fold_build2_loc (loc
, ncode
, type
,
8456 arg0
, TREE_OPERAND (arg1
, 0));
8457 return fold_build2_loc (loc
, icode
, type
, tem
,
8458 TREE_OPERAND (arg1
, 1));
8460 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8462 For sequence point consistancy, we need to check for trapping,
8463 and side-effects. */
8464 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8465 && simple_operand_p_2 (arg1
))
8466 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8472 /* Fold a binary expression of code CODE and type TYPE with operands
8473 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8474 Return the folded expression if folding is successful. Otherwise,
8475 return NULL_TREE. */
8478 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8480 enum tree_code compl_code
;
8482 if (code
== MIN_EXPR
)
8483 compl_code
= MAX_EXPR
;
8484 else if (code
== MAX_EXPR
)
8485 compl_code
= MIN_EXPR
;
8489 /* MIN (MAX (a, b), b) == b. */
8490 if (TREE_CODE (op0
) == compl_code
8491 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8492 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8494 /* MIN (MAX (b, a), b) == b. */
8495 if (TREE_CODE (op0
) == compl_code
8496 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8497 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8498 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8500 /* MIN (a, MAX (a, b)) == a. */
8501 if (TREE_CODE (op1
) == compl_code
8502 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8503 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8504 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8506 /* MIN (a, MAX (b, a)) == a. */
8507 if (TREE_CODE (op1
) == compl_code
8508 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8509 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8510 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8515 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8516 by changing CODE to reduce the magnitude of constants involved in
8517 ARG0 of the comparison.
8518 Returns a canonicalized comparison tree if a simplification was
8519 possible, otherwise returns NULL_TREE.
8520 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8521 valid if signed overflow is undefined. */
8524 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8525 tree arg0
, tree arg1
,
8526 bool *strict_overflow_p
)
8528 enum tree_code code0
= TREE_CODE (arg0
);
8529 tree t
, cst0
= NULL_TREE
;
8533 /* Match A +- CST code arg1 and CST code arg1. We can change the
8534 first form only if overflow is undefined. */
8535 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8536 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8537 /* In principle pointers also have undefined overflow behavior,
8538 but that causes problems elsewhere. */
8539 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8540 && (code0
== MINUS_EXPR
8541 || code0
== PLUS_EXPR
)
8542 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8543 || code0
== INTEGER_CST
))
8546 /* Identify the constant in arg0 and its sign. */
8547 if (code0
== INTEGER_CST
)
8550 cst0
= TREE_OPERAND (arg0
, 1);
8551 sgn0
= tree_int_cst_sgn (cst0
);
8553 /* Overflowed constants and zero will cause problems. */
8554 if (integer_zerop (cst0
)
8555 || TREE_OVERFLOW (cst0
))
8558 /* See if we can reduce the magnitude of the constant in
8559 arg0 by changing the comparison code. */
8560 if (code0
== INTEGER_CST
)
8562 /* CST <= arg1 -> CST-1 < arg1. */
8563 if (code
== LE_EXPR
&& sgn0
== 1)
8565 /* -CST < arg1 -> -CST-1 <= arg1. */
8566 else if (code
== LT_EXPR
&& sgn0
== -1)
8568 /* CST > arg1 -> CST-1 >= arg1. */
8569 else if (code
== GT_EXPR
&& sgn0
== 1)
8571 /* -CST >= arg1 -> -CST-1 > arg1. */
8572 else if (code
== GE_EXPR
&& sgn0
== -1)
8576 /* arg1 code' CST' might be more canonical. */
8581 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8583 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8585 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8586 else if (code
== GT_EXPR
8587 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8589 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8590 else if (code
== LE_EXPR
8591 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8593 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8594 else if (code
== GE_EXPR
8595 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8599 *strict_overflow_p
= true;
8602 /* Now build the constant reduced in magnitude. But not if that
8603 would produce one outside of its types range. */
8604 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8606 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8607 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8609 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8610 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8611 /* We cannot swap the comparison here as that would cause us to
8612 endlessly recurse. */
8615 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8616 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8617 if (code0
!= INTEGER_CST
)
8618 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8619 t
= fold_convert (TREE_TYPE (arg1
), t
);
8621 /* If swapping might yield to a more canonical form, do so. */
8623 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8625 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8628 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8629 overflow further. Try to decrease the magnitude of constants involved
8630 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8631 and put sole constants at the second argument position.
8632 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8635 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8636 tree arg0
, tree arg1
)
8639 bool strict_overflow_p
;
8640 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8641 "when reducing constant in comparison");
8643 /* Try canonicalization by simplifying arg0. */
8644 strict_overflow_p
= false;
8645 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8646 &strict_overflow_p
);
8649 if (strict_overflow_p
)
8650 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8654 /* Try canonicalization by simplifying arg1 using the swapped
8656 code
= swap_tree_comparison (code
);
8657 strict_overflow_p
= false;
8658 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8659 &strict_overflow_p
);
8660 if (t
&& strict_overflow_p
)
8661 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8665 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8666 space. This is used to avoid issuing overflow warnings for
8667 expressions like &p->x which can not wrap. */
8670 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8672 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8679 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8680 if (offset
== NULL_TREE
)
8681 wi_offset
= wi::zero (precision
);
8682 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8688 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8689 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8693 if (!wi::fits_uhwi_p (total
))
8696 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8700 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8702 if (TREE_CODE (base
) == ADDR_EXPR
)
8704 HOST_WIDE_INT base_size
;
8706 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8707 if (base_size
> 0 && size
< base_size
)
8711 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8714 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8715 kind INTEGER_CST. This makes sure to properly sign-extend the
8718 static HOST_WIDE_INT
8719 size_low_cst (const_tree t
)
8721 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8722 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8723 if (prec
< HOST_BITS_PER_WIDE_INT
)
8724 return sext_hwi (w
, prec
);
8728 /* Subroutine of fold_binary. This routine performs all of the
8729 transformations that are common to the equality/inequality
8730 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8731 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8732 fold_binary should call fold_binary. Fold a comparison with
8733 tree code CODE and type TYPE with operands OP0 and OP1. Return
8734 the folded comparison or NULL_TREE. */
8737 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8740 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8741 tree arg0
, arg1
, tem
;
8746 STRIP_SIGN_NOPS (arg0
);
8747 STRIP_SIGN_NOPS (arg1
);
8749 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8750 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8752 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8753 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8754 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8755 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8756 && TREE_CODE (arg1
) == INTEGER_CST
8757 && !TREE_OVERFLOW (arg1
))
8759 const enum tree_code
8760 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8761 tree const1
= TREE_OPERAND (arg0
, 1);
8762 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8763 tree variable
= TREE_OPERAND (arg0
, 0);
8764 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8766 /* If the constant operation overflowed this can be
8767 simplified as a comparison against INT_MAX/INT_MIN. */
8768 if (TREE_OVERFLOW (new_const
)
8769 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8771 int const1_sgn
= tree_int_cst_sgn (const1
);
8772 enum tree_code code2
= code
;
8774 /* Get the sign of the constant on the lhs if the
8775 operation were VARIABLE + CONST1. */
8776 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8777 const1_sgn
= -const1_sgn
;
8779 /* The sign of the constant determines if we overflowed
8780 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8781 Canonicalize to the INT_MIN overflow by swapping the comparison
8783 if (const1_sgn
== -1)
8784 code2
= swap_tree_comparison (code
);
8786 /* We now can look at the canonicalized case
8787 VARIABLE + 1 CODE2 INT_MIN
8788 and decide on the result. */
8795 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8801 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8810 fold_overflow_warning ("assuming signed overflow does not occur "
8811 "when changing X +- C1 cmp C2 to "
8813 WARN_STRICT_OVERFLOW_COMPARISON
);
8814 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8818 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8819 if (TREE_CODE (arg0
) == MINUS_EXPR
8821 && integer_zerop (arg1
))
8823 /* ??? The transformation is valid for the other operators if overflow
8824 is undefined for the type, but performing it here badly interacts
8825 with the transformation in fold_cond_expr_with_comparison which
8826 attempts to synthetize ABS_EXPR. */
8828 fold_overflow_warning ("assuming signed overflow does not occur "
8829 "when changing X - Y cmp 0 to X cmp Y",
8830 WARN_STRICT_OVERFLOW_COMPARISON
);
8831 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8832 TREE_OPERAND (arg0
, 1));
8835 /* For comparisons of pointers we can decompose it to a compile time
8836 comparison of the base objects and the offsets into the object.
8837 This requires at least one operand being an ADDR_EXPR or a
8838 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8839 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8840 && (TREE_CODE (arg0
) == ADDR_EXPR
8841 || TREE_CODE (arg1
) == ADDR_EXPR
8842 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8843 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8845 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8846 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8848 int volatilep
, unsignedp
;
8849 bool indirect_base0
= false, indirect_base1
= false;
8851 /* Get base and offset for the access. Strip ADDR_EXPR for
8852 get_inner_reference, but put it back by stripping INDIRECT_REF
8853 off the base object if possible. indirect_baseN will be true
8854 if baseN is not an address but refers to the object itself. */
8856 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8858 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8859 &bitsize
, &bitpos0
, &offset0
, &mode
,
8860 &unsignedp
, &volatilep
, false);
8861 if (TREE_CODE (base0
) == INDIRECT_REF
)
8862 base0
= TREE_OPERAND (base0
, 0);
8864 indirect_base0
= true;
8866 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8868 base0
= TREE_OPERAND (arg0
, 0);
8869 STRIP_SIGN_NOPS (base0
);
8870 if (TREE_CODE (base0
) == ADDR_EXPR
)
8872 base0
= TREE_OPERAND (base0
, 0);
8873 indirect_base0
= true;
8875 offset0
= TREE_OPERAND (arg0
, 1);
8876 if (tree_fits_shwi_p (offset0
))
8878 HOST_WIDE_INT off
= size_low_cst (offset0
);
8879 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8881 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8883 bitpos0
= off
* BITS_PER_UNIT
;
8884 offset0
= NULL_TREE
;
8890 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8892 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8893 &bitsize
, &bitpos1
, &offset1
, &mode
,
8894 &unsignedp
, &volatilep
, false);
8895 if (TREE_CODE (base1
) == INDIRECT_REF
)
8896 base1
= TREE_OPERAND (base1
, 0);
8898 indirect_base1
= true;
8900 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8902 base1
= TREE_OPERAND (arg1
, 0);
8903 STRIP_SIGN_NOPS (base1
);
8904 if (TREE_CODE (base1
) == ADDR_EXPR
)
8906 base1
= TREE_OPERAND (base1
, 0);
8907 indirect_base1
= true;
8909 offset1
= TREE_OPERAND (arg1
, 1);
8910 if (tree_fits_shwi_p (offset1
))
8912 HOST_WIDE_INT off
= size_low_cst (offset1
);
8913 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8915 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8917 bitpos1
= off
* BITS_PER_UNIT
;
8918 offset1
= NULL_TREE
;
8923 /* A local variable can never be pointed to by
8924 the default SSA name of an incoming parameter. */
8925 if ((TREE_CODE (arg0
) == ADDR_EXPR
8927 && TREE_CODE (base0
) == VAR_DECL
8928 && auto_var_in_fn_p (base0
, current_function_decl
)
8930 && TREE_CODE (base1
) == SSA_NAME
8931 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8932 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8933 || (TREE_CODE (arg1
) == ADDR_EXPR
8935 && TREE_CODE (base1
) == VAR_DECL
8936 && auto_var_in_fn_p (base1
, current_function_decl
)
8938 && TREE_CODE (base0
) == SSA_NAME
8939 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8940 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8942 if (code
== NE_EXPR
)
8943 return constant_boolean_node (1, type
);
8944 else if (code
== EQ_EXPR
)
8945 return constant_boolean_node (0, type
);
8947 /* If we have equivalent bases we might be able to simplify. */
8948 else if (indirect_base0
== indirect_base1
8949 && operand_equal_p (base0
, base1
, 0))
8951 /* We can fold this expression to a constant if the non-constant
8952 offset parts are equal. */
8953 if ((offset0
== offset1
8954 || (offset0
&& offset1
8955 && operand_equal_p (offset0
, offset1
, 0)))
8958 || (indirect_base0
&& DECL_P (base0
))
8959 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8963 && bitpos0
!= bitpos1
8964 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8965 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8966 fold_overflow_warning (("assuming pointer wraparound does not "
8967 "occur when comparing P +- C1 with "
8969 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8974 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8976 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8978 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8980 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8982 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8984 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8988 /* We can simplify the comparison to a comparison of the variable
8989 offset parts if the constant offset parts are equal.
8990 Be careful to use signed sizetype here because otherwise we
8991 mess with array offsets in the wrong way. This is possible
8992 because pointer arithmetic is restricted to retain within an
8993 object and overflow on pointer differences is undefined as of
8994 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8995 else if (bitpos0
== bitpos1
8997 || (indirect_base0
&& DECL_P (base0
))
8998 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9000 /* By converting to signed sizetype we cover middle-end pointer
9001 arithmetic which operates on unsigned pointer types of size
9002 type size and ARRAY_REF offsets which are properly sign or
9003 zero extended from their type in case it is narrower than
9005 if (offset0
== NULL_TREE
)
9006 offset0
= build_int_cst (ssizetype
, 0);
9008 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9009 if (offset1
== NULL_TREE
)
9010 offset1
= build_int_cst (ssizetype
, 0);
9012 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9015 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9016 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9017 fold_overflow_warning (("assuming pointer wraparound does not "
9018 "occur when comparing P +- C1 with "
9020 WARN_STRICT_OVERFLOW_COMPARISON
);
9022 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9025 /* For non-equal bases we can simplify if they are addresses
9026 declarations with different addresses. */
9027 else if (indirect_base0
&& indirect_base1
9028 /* We know that !operand_equal_p (base0, base1, 0)
9029 because the if condition was false. But make
9030 sure two decls are not the same. */
9032 && TREE_CODE (arg0
) == ADDR_EXPR
9033 && TREE_CODE (arg1
) == ADDR_EXPR
9036 /* Watch for aliases. */
9037 && (!decl_in_symtab_p (base0
)
9038 || !decl_in_symtab_p (base1
)
9039 || !symtab_node::get_create (base0
)->equal_address_to
9040 (symtab_node::get_create (base1
))))
9042 if (code
== EQ_EXPR
)
9043 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9045 else if (code
== NE_EXPR
)
9046 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9049 /* For equal offsets we can simplify to a comparison of the
9051 else if (bitpos0
== bitpos1
9053 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9055 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9056 && ((offset0
== offset1
)
9057 || (offset0
&& offset1
9058 && operand_equal_p (offset0
, offset1
, 0))))
9061 base0
= build_fold_addr_expr_loc (loc
, base0
);
9063 base1
= build_fold_addr_expr_loc (loc
, base1
);
9064 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9068 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9069 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9070 the resulting offset is smaller in absolute value than the
9071 original one and has the same sign. */
9072 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9073 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9074 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9075 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9076 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9077 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9078 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9079 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9081 tree const1
= TREE_OPERAND (arg0
, 1);
9082 tree const2
= TREE_OPERAND (arg1
, 1);
9083 tree variable1
= TREE_OPERAND (arg0
, 0);
9084 tree variable2
= TREE_OPERAND (arg1
, 0);
9086 const char * const warnmsg
= G_("assuming signed overflow does not "
9087 "occur when combining constants around "
9090 /* Put the constant on the side where it doesn't overflow and is
9091 of lower absolute value and of same sign than before. */
9092 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9093 ? MINUS_EXPR
: PLUS_EXPR
,
9095 if (!TREE_OVERFLOW (cst
)
9096 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9097 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9099 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9100 return fold_build2_loc (loc
, code
, type
,
9102 fold_build2_loc (loc
, TREE_CODE (arg1
),
9107 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9108 ? MINUS_EXPR
: PLUS_EXPR
,
9110 if (!TREE_OVERFLOW (cst
)
9111 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9112 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9114 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9115 return fold_build2_loc (loc
, code
, type
,
9116 fold_build2_loc (loc
, TREE_CODE (arg0
),
9123 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9124 signed arithmetic case. That form is created by the compiler
9125 often enough for folding it to be of value. One example is in
9126 computing loop trip counts after Operator Strength Reduction. */
9127 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9128 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9129 && TREE_CODE (arg0
) == MULT_EXPR
9130 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9131 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9132 && integer_zerop (arg1
))
9134 tree const1
= TREE_OPERAND (arg0
, 1);
9135 tree const2
= arg1
; /* zero */
9136 tree variable1
= TREE_OPERAND (arg0
, 0);
9137 enum tree_code cmp_code
= code
;
9139 /* Handle unfolded multiplication by zero. */
9140 if (integer_zerop (const1
))
9141 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9143 fold_overflow_warning (("assuming signed overflow does not occur when "
9144 "eliminating multiplication in comparison "
9146 WARN_STRICT_OVERFLOW_COMPARISON
);
9148 /* If const1 is negative we swap the sense of the comparison. */
9149 if (tree_int_cst_sgn (const1
) < 0)
9150 cmp_code
= swap_tree_comparison (cmp_code
);
9152 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9155 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9159 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9161 tree targ0
= strip_float_extensions (arg0
);
9162 tree targ1
= strip_float_extensions (arg1
);
9163 tree newtype
= TREE_TYPE (targ0
);
9165 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9166 newtype
= TREE_TYPE (targ1
);
9168 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9169 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9170 return fold_build2_loc (loc
, code
, type
,
9171 fold_convert_loc (loc
, newtype
, targ0
),
9172 fold_convert_loc (loc
, newtype
, targ1
));
9174 if (TREE_CODE (arg1
) == REAL_CST
)
9176 REAL_VALUE_TYPE cst
;
9177 cst
= TREE_REAL_CST (arg1
);
9179 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9180 /* a CMP (-0) -> a CMP 0 */
9181 if (REAL_VALUE_MINUS_ZERO (cst
))
9182 return fold_build2_loc (loc
, code
, type
, arg0
,
9183 build_real (TREE_TYPE (arg1
), dconst0
));
9185 /* x != NaN is always true, other ops are always false. */
9186 if (REAL_VALUE_ISNAN (cst
)
9187 && ! HONOR_SNANS (arg1
))
9189 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9190 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9193 /* Fold comparisons against infinity. */
9194 if (REAL_VALUE_ISINF (cst
)
9195 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9197 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9198 if (tem
!= NULL_TREE
)
9203 /* If this is a comparison of a real constant with a PLUS_EXPR
9204 or a MINUS_EXPR of a real constant, we can convert it into a
9205 comparison with a revised real constant as long as no overflow
9206 occurs when unsafe_math_optimizations are enabled. */
9207 if (flag_unsafe_math_optimizations
9208 && TREE_CODE (arg1
) == REAL_CST
9209 && (TREE_CODE (arg0
) == PLUS_EXPR
9210 || TREE_CODE (arg0
) == MINUS_EXPR
)
9211 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9212 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9213 ? MINUS_EXPR
: PLUS_EXPR
,
9214 arg1
, TREE_OPERAND (arg0
, 1)))
9215 && !TREE_OVERFLOW (tem
))
9216 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9218 /* Likewise, we can simplify a comparison of a real constant with
9219 a MINUS_EXPR whose first operand is also a real constant, i.e.
9220 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9221 floating-point types only if -fassociative-math is set. */
9222 if (flag_associative_math
9223 && TREE_CODE (arg1
) == REAL_CST
9224 && TREE_CODE (arg0
) == MINUS_EXPR
9225 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9226 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9228 && !TREE_OVERFLOW (tem
))
9229 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9230 TREE_OPERAND (arg0
, 1), tem
);
9232 /* Fold comparisons against built-in math functions. */
9233 if (TREE_CODE (arg1
) == REAL_CST
9234 && flag_unsafe_math_optimizations
9235 && ! flag_errno_math
)
9237 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9239 if (fcode
!= END_BUILTINS
)
9241 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9242 if (tem
!= NULL_TREE
)
9248 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9249 && CONVERT_EXPR_P (arg0
))
9251 /* If we are widening one operand of an integer comparison,
9252 see if the other operand is similarly being widened. Perhaps we
9253 can do the comparison in the narrower type. */
9254 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9258 /* Or if we are changing signedness. */
9259 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9264 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9265 constant, we can simplify it. */
9266 if (TREE_CODE (arg1
) == INTEGER_CST
9267 && (TREE_CODE (arg0
) == MIN_EXPR
9268 || TREE_CODE (arg0
) == MAX_EXPR
)
9269 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9271 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9276 /* Simplify comparison of something with itself. (For IEEE
9277 floating-point, we can only do some of these simplifications.) */
9278 if (operand_equal_p (arg0
, arg1
, 0))
9283 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9284 || ! HONOR_NANS (arg0
))
9285 return constant_boolean_node (1, type
);
9290 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9291 || ! HONOR_NANS (arg0
))
9292 return constant_boolean_node (1, type
);
9293 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9296 /* For NE, we can only do this simplification if integer
9297 or we don't honor IEEE floating point NaNs. */
9298 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9299 && HONOR_NANS (arg0
))
9301 /* ... fall through ... */
9304 return constant_boolean_node (0, type
);
9310 /* If we are comparing an expression that just has comparisons
9311 of two integer values, arithmetic expressions of those comparisons,
9312 and constants, we can simplify it. There are only three cases
9313 to check: the two values can either be equal, the first can be
9314 greater, or the second can be greater. Fold the expression for
9315 those three values. Since each value must be 0 or 1, we have
9316 eight possibilities, each of which corresponds to the constant 0
9317 or 1 or one of the six possible comparisons.
9319 This handles common cases like (a > b) == 0 but also handles
9320 expressions like ((x > y) - (y > x)) > 0, which supposedly
9321 occur in macroized code. */
9323 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9325 tree cval1
= 0, cval2
= 0;
9328 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9329 /* Don't handle degenerate cases here; they should already
9330 have been handled anyway. */
9331 && cval1
!= 0 && cval2
!= 0
9332 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9333 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9334 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9335 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9336 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9337 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9338 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9340 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9341 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9343 /* We can't just pass T to eval_subst in case cval1 or cval2
9344 was the same as ARG1. */
9347 = fold_build2_loc (loc
, code
, type
,
9348 eval_subst (loc
, arg0
, cval1
, maxval
,
9352 = fold_build2_loc (loc
, code
, type
,
9353 eval_subst (loc
, arg0
, cval1
, maxval
,
9357 = fold_build2_loc (loc
, code
, type
,
9358 eval_subst (loc
, arg0
, cval1
, minval
,
9362 /* All three of these results should be 0 or 1. Confirm they are.
9363 Then use those values to select the proper code to use. */
9365 if (TREE_CODE (high_result
) == INTEGER_CST
9366 && TREE_CODE (equal_result
) == INTEGER_CST
9367 && TREE_CODE (low_result
) == INTEGER_CST
)
9369 /* Make a 3-bit mask with the high-order bit being the
9370 value for `>', the next for '=', and the low for '<'. */
9371 switch ((integer_onep (high_result
) * 4)
9372 + (integer_onep (equal_result
) * 2)
9373 + integer_onep (low_result
))
9377 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9398 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9403 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9404 SET_EXPR_LOCATION (tem
, loc
);
9407 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9412 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9413 into a single range test. */
9414 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9415 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9416 && TREE_CODE (arg1
) == INTEGER_CST
9417 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9418 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9419 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9420 && !TREE_OVERFLOW (arg1
))
9422 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9423 if (tem
!= NULL_TREE
)
9427 /* Fold ~X op ~Y as Y op X. */
9428 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9429 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9431 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9432 return fold_build2_loc (loc
, code
, type
,
9433 fold_convert_loc (loc
, cmp_type
,
9434 TREE_OPERAND (arg1
, 0)),
9435 TREE_OPERAND (arg0
, 0));
9438 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9439 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9440 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9442 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9443 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9444 TREE_OPERAND (arg0
, 0),
9445 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9446 fold_convert_loc (loc
, cmp_type
, arg1
)));
9453 /* Subroutine of fold_binary. Optimize complex multiplications of the
9454 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9455 argument EXPR represents the expression "z" of type TYPE. */
9458 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9460 tree itype
= TREE_TYPE (type
);
9461 tree rpart
, ipart
, tem
;
9463 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9465 rpart
= TREE_OPERAND (expr
, 0);
9466 ipart
= TREE_OPERAND (expr
, 1);
9468 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9470 rpart
= TREE_REALPART (expr
);
9471 ipart
= TREE_IMAGPART (expr
);
9475 expr
= save_expr (expr
);
9476 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9477 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9480 rpart
= save_expr (rpart
);
9481 ipart
= save_expr (ipart
);
9482 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9483 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9484 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9485 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9486 build_zero_cst (itype
));
9490 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9491 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9492 guarantees that P and N have the same least significant log2(M) bits.
9493 N is not otherwise constrained. In particular, N is not normalized to
9494 0 <= N < M as is common. In general, the precise value of P is unknown.
9495 M is chosen as large as possible such that constant N can be determined.
9497 Returns M and sets *RESIDUE to N.
9499 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9500 account. This is not always possible due to PR 35705.
9503 static unsigned HOST_WIDE_INT
9504 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9505 bool allow_func_align
)
9507 enum tree_code code
;
9511 code
= TREE_CODE (expr
);
9512 if (code
== ADDR_EXPR
)
9514 unsigned int bitalign
;
9515 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9516 *residue
/= BITS_PER_UNIT
;
9517 return bitalign
/ BITS_PER_UNIT
;
9519 else if (code
== POINTER_PLUS_EXPR
)
9522 unsigned HOST_WIDE_INT modulus
;
9523 enum tree_code inner_code
;
9525 op0
= TREE_OPERAND (expr
, 0);
9527 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9530 op1
= TREE_OPERAND (expr
, 1);
9532 inner_code
= TREE_CODE (op1
);
9533 if (inner_code
== INTEGER_CST
)
9535 *residue
+= TREE_INT_CST_LOW (op1
);
9538 else if (inner_code
== MULT_EXPR
)
9540 op1
= TREE_OPERAND (op1
, 1);
9541 if (TREE_CODE (op1
) == INTEGER_CST
)
9543 unsigned HOST_WIDE_INT align
;
9545 /* Compute the greatest power-of-2 divisor of op1. */
9546 align
= TREE_INT_CST_LOW (op1
);
9549 /* If align is non-zero and less than *modulus, replace
9550 *modulus with align., If align is 0, then either op1 is 0
9551 or the greatest power-of-2 divisor of op1 doesn't fit in an
9552 unsigned HOST_WIDE_INT. In either case, no additional
9553 constraint is imposed. */
9555 modulus
= MIN (modulus
, align
);
9562 /* If we get here, we were unable to determine anything useful about the
9567 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9568 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9571 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9573 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9575 if (TREE_CODE (arg
) == VECTOR_CST
)
9577 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9578 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9580 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9582 constructor_elt
*elt
;
9584 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9585 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9588 elts
[i
] = elt
->value
;
9592 for (; i
< nelts
; i
++)
9594 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9598 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9599 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9600 NULL_TREE otherwise. */
9603 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9605 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9607 bool need_ctor
= false;
9609 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9610 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9611 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9612 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9615 elts
= XALLOCAVEC (tree
, nelts
* 3);
9616 if (!vec_cst_ctor_to_array (arg0
, elts
)
9617 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9620 for (i
= 0; i
< nelts
; i
++)
9622 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9624 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9629 vec
<constructor_elt
, va_gc
> *v
;
9630 vec_alloc (v
, nelts
);
9631 for (i
= 0; i
< nelts
; i
++)
9632 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9633 return build_constructor (type
, v
);
9636 return build_vector (type
, &elts
[2 * nelts
]);
9639 /* Try to fold a pointer difference of type TYPE two address expressions of
9640 array references AREF0 and AREF1 using location LOC. Return a
9641 simplified expression for the difference or NULL_TREE. */
9644 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9645 tree aref0
, tree aref1
)
9647 tree base0
= TREE_OPERAND (aref0
, 0);
9648 tree base1
= TREE_OPERAND (aref1
, 0);
9649 tree base_offset
= build_int_cst (type
, 0);
9651 /* If the bases are array references as well, recurse. If the bases
9652 are pointer indirections compute the difference of the pointers.
9653 If the bases are equal, we are set. */
9654 if ((TREE_CODE (base0
) == ARRAY_REF
9655 && TREE_CODE (base1
) == ARRAY_REF
9657 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9658 || (INDIRECT_REF_P (base0
)
9659 && INDIRECT_REF_P (base1
)
9660 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9661 TREE_OPERAND (base0
, 0),
9662 TREE_OPERAND (base1
, 0))))
9663 || operand_equal_p (base0
, base1
, 0))
9665 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9666 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9667 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9668 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9669 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9671 fold_build2_loc (loc
, MULT_EXPR
, type
,
9677 /* If the real or vector real constant CST of type TYPE has an exact
9678 inverse, return it, else return NULL. */
9681 exact_inverse (tree type
, tree cst
)
9684 tree unit_type
, *elts
;
9686 unsigned vec_nelts
, i
;
9688 switch (TREE_CODE (cst
))
9691 r
= TREE_REAL_CST (cst
);
9693 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9694 return build_real (type
, r
);
9699 vec_nelts
= VECTOR_CST_NELTS (cst
);
9700 elts
= XALLOCAVEC (tree
, vec_nelts
);
9701 unit_type
= TREE_TYPE (type
);
9702 mode
= TYPE_MODE (unit_type
);
9704 for (i
= 0; i
< vec_nelts
; i
++)
9706 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9707 if (!exact_real_inverse (mode
, &r
))
9709 elts
[i
] = build_real (unit_type
, r
);
9712 return build_vector (type
, elts
);
9719 /* Mask out the tz least significant bits of X of type TYPE where
9720 tz is the number of trailing zeroes in Y. */
9722 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9724 int tz
= wi::ctz (y
);
9726 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9730 /* Return true when T is an address and is known to be nonzero.
9731 For floating point we further ensure that T is not denormal.
9732 Similar logic is present in nonzero_address in rtlanal.h.
9734 If the return value is based on the assumption that signed overflow
9735 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9736 change *STRICT_OVERFLOW_P. */
9739 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9741 tree type
= TREE_TYPE (t
);
9742 enum tree_code code
;
9744 /* Doing something useful for floating point would need more work. */
9745 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9748 code
= TREE_CODE (t
);
9749 switch (TREE_CODE_CLASS (code
))
9752 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9755 case tcc_comparison
:
9756 return tree_binary_nonzero_warnv_p (code
, type
,
9757 TREE_OPERAND (t
, 0),
9758 TREE_OPERAND (t
, 1),
9761 case tcc_declaration
:
9763 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9771 case TRUTH_NOT_EXPR
:
9772 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9775 case TRUTH_AND_EXPR
:
9777 case TRUTH_XOR_EXPR
:
9778 return tree_binary_nonzero_warnv_p (code
, type
,
9779 TREE_OPERAND (t
, 0),
9780 TREE_OPERAND (t
, 1),
9788 case WITH_SIZE_EXPR
:
9790 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9795 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9799 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9804 tree fndecl
= get_callee_fndecl (t
);
9805 if (!fndecl
) return false;
9806 if (flag_delete_null_pointer_checks
&& !flag_check_new
9807 && DECL_IS_OPERATOR_NEW (fndecl
)
9808 && !TREE_NOTHROW (fndecl
))
9810 if (flag_delete_null_pointer_checks
9811 && lookup_attribute ("returns_nonnull",
9812 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9814 return alloca_call_p (t
);
9823 /* Return true when T is an address and is known to be nonzero.
9824 Handle warnings about undefined signed overflow. */
9827 tree_expr_nonzero_p (tree t
)
9829 bool ret
, strict_overflow_p
;
9831 strict_overflow_p
= false;
9832 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9833 if (strict_overflow_p
)
9834 fold_overflow_warning (("assuming signed overflow does not occur when "
9835 "determining that expression is always "
9837 WARN_STRICT_OVERFLOW_MISC
);
9841 /* Fold a binary expression of code CODE and type TYPE with operands
9842 OP0 and OP1. LOC is the location of the resulting expression.
9843 Return the folded expression if folding is successful. Otherwise,
9844 return NULL_TREE. */
9847 fold_binary_loc (location_t loc
,
9848 enum tree_code code
, tree type
, tree op0
, tree op1
)
9850 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9851 tree arg0
, arg1
, tem
;
9852 tree t1
= NULL_TREE
;
9853 bool strict_overflow_p
;
9856 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9857 && TREE_CODE_LENGTH (code
) == 2
9859 && op1
!= NULL_TREE
);
9864 /* Strip any conversions that don't change the mode. This is
9865 safe for every expression, except for a comparison expression
9866 because its signedness is derived from its operands. So, in
9867 the latter case, only strip conversions that don't change the
9868 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9871 Note that this is done as an internal manipulation within the
9872 constant folder, in order to find the simplest representation
9873 of the arguments so that their form can be studied. In any
9874 cases, the appropriate type conversions should be put back in
9875 the tree that will get out of the constant folder. */
9877 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9879 STRIP_SIGN_NOPS (arg0
);
9880 STRIP_SIGN_NOPS (arg1
);
9888 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9889 constant but we can't do arithmetic on them. */
9890 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9892 tem
= const_binop (code
, type
, arg0
, arg1
);
9893 if (tem
!= NULL_TREE
)
9895 if (TREE_TYPE (tem
) != type
)
9896 tem
= fold_convert_loc (loc
, type
, tem
);
9901 /* If this is a commutative operation, and ARG0 is a constant, move it
9902 to ARG1 to reduce the number of tests below. */
9903 if (commutative_tree_code (code
)
9904 && tree_swap_operands_p (arg0
, arg1
, true))
9905 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9907 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9908 to ARG1 to reduce the number of tests below. */
9909 if (kind
== tcc_comparison
9910 && tree_swap_operands_p (arg0
, arg1
, true))
9911 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9913 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9917 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9919 First check for cases where an arithmetic operation is applied to a
9920 compound, conditional, or comparison operation. Push the arithmetic
9921 operation inside the compound or conditional to see if any folding
9922 can then be done. Convert comparison to conditional for this purpose.
9923 The also optimizes non-constant cases that used to be done in
9926 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9927 one of the operands is a comparison and the other is a comparison, a
9928 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9929 code below would make the expression more complex. Change it to a
9930 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9931 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9933 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9934 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9935 && TREE_CODE (type
) != VECTOR_TYPE
9936 && ((truth_value_p (TREE_CODE (arg0
))
9937 && (truth_value_p (TREE_CODE (arg1
))
9938 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9939 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9940 || (truth_value_p (TREE_CODE (arg1
))
9941 && (truth_value_p (TREE_CODE (arg0
))
9942 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9943 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9945 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9946 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9949 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9950 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9952 if (code
== EQ_EXPR
)
9953 tem
= invert_truthvalue_loc (loc
, tem
);
9955 return fold_convert_loc (loc
, type
, tem
);
9958 if (TREE_CODE_CLASS (code
) == tcc_binary
9959 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9961 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9963 tem
= fold_build2_loc (loc
, code
, type
,
9964 fold_convert_loc (loc
, TREE_TYPE (op0
),
9965 TREE_OPERAND (arg0
, 1)), op1
);
9966 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9969 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9970 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9972 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9973 fold_convert_loc (loc
, TREE_TYPE (op1
),
9974 TREE_OPERAND (arg1
, 1)));
9975 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9979 if (TREE_CODE (arg0
) == COND_EXPR
9980 || TREE_CODE (arg0
) == VEC_COND_EXPR
9981 || COMPARISON_CLASS_P (arg0
))
9983 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9985 /*cond_first_p=*/1);
9986 if (tem
!= NULL_TREE
)
9990 if (TREE_CODE (arg1
) == COND_EXPR
9991 || TREE_CODE (arg1
) == VEC_COND_EXPR
9992 || COMPARISON_CLASS_P (arg1
))
9994 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9996 /*cond_first_p=*/0);
9997 if (tem
!= NULL_TREE
)
10005 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10006 if (TREE_CODE (arg0
) == ADDR_EXPR
10007 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10009 tree iref
= TREE_OPERAND (arg0
, 0);
10010 return fold_build2 (MEM_REF
, type
,
10011 TREE_OPERAND (iref
, 0),
10012 int_const_binop (PLUS_EXPR
, arg1
,
10013 TREE_OPERAND (iref
, 1)));
10016 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10017 if (TREE_CODE (arg0
) == ADDR_EXPR
10018 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10021 HOST_WIDE_INT coffset
;
10022 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10026 return fold_build2 (MEM_REF
, type
,
10027 build_fold_addr_expr (base
),
10028 int_const_binop (PLUS_EXPR
, arg1
,
10029 size_int (coffset
)));
10034 case POINTER_PLUS_EXPR
:
10035 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10036 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10037 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10038 return fold_convert_loc (loc
, type
,
10039 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10040 fold_convert_loc (loc
, sizetype
,
10042 fold_convert_loc (loc
, sizetype
,
10048 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10050 /* X + (X / CST) * -CST is X % CST. */
10051 if (TREE_CODE (arg1
) == MULT_EXPR
10052 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10053 && operand_equal_p (arg0
,
10054 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10056 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10057 tree cst1
= TREE_OPERAND (arg1
, 1);
10058 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10060 if (sum
&& integer_zerop (sum
))
10061 return fold_convert_loc (loc
, type
,
10062 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10063 TREE_TYPE (arg0
), arg0
,
10068 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10069 one. Make sure the type is not saturating and has the signedness of
10070 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10071 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10072 if ((TREE_CODE (arg0
) == MULT_EXPR
10073 || TREE_CODE (arg1
) == MULT_EXPR
)
10074 && !TYPE_SATURATING (type
)
10075 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10076 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10077 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10079 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10084 if (! FLOAT_TYPE_P (type
))
10086 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10087 with a constant, and the two constants have no bits in common,
10088 we should treat this as a BIT_IOR_EXPR since this may produce more
10089 simplifications. */
10090 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10091 && TREE_CODE (arg1
) == BIT_AND_EXPR
10092 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10093 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10094 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10095 TREE_OPERAND (arg1
, 1)) == 0)
10097 code
= BIT_IOR_EXPR
;
10101 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10102 (plus (plus (mult) (mult)) (foo)) so that we can
10103 take advantage of the factoring cases below. */
10104 if (ANY_INTEGRAL_TYPE_P (type
)
10105 && TYPE_OVERFLOW_WRAPS (type
)
10106 && (((TREE_CODE (arg0
) == PLUS_EXPR
10107 || TREE_CODE (arg0
) == MINUS_EXPR
)
10108 && TREE_CODE (arg1
) == MULT_EXPR
)
10109 || ((TREE_CODE (arg1
) == PLUS_EXPR
10110 || TREE_CODE (arg1
) == MINUS_EXPR
)
10111 && TREE_CODE (arg0
) == MULT_EXPR
)))
10113 tree parg0
, parg1
, parg
, marg
;
10114 enum tree_code pcode
;
10116 if (TREE_CODE (arg1
) == MULT_EXPR
)
10117 parg
= arg0
, marg
= arg1
;
10119 parg
= arg1
, marg
= arg0
;
10120 pcode
= TREE_CODE (parg
);
10121 parg0
= TREE_OPERAND (parg
, 0);
10122 parg1
= TREE_OPERAND (parg
, 1);
10123 STRIP_NOPS (parg0
);
10124 STRIP_NOPS (parg1
);
10126 if (TREE_CODE (parg0
) == MULT_EXPR
10127 && TREE_CODE (parg1
) != MULT_EXPR
)
10128 return fold_build2_loc (loc
, pcode
, type
,
10129 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10130 fold_convert_loc (loc
, type
,
10132 fold_convert_loc (loc
, type
,
10134 fold_convert_loc (loc
, type
, parg1
));
10135 if (TREE_CODE (parg0
) != MULT_EXPR
10136 && TREE_CODE (parg1
) == MULT_EXPR
)
10138 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10139 fold_convert_loc (loc
, type
, parg0
),
10140 fold_build2_loc (loc
, pcode
, type
,
10141 fold_convert_loc (loc
, type
, marg
),
10142 fold_convert_loc (loc
, type
,
10148 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10149 to __complex__ ( x, y ). This is not the same for SNaNs or
10150 if signed zeros are involved. */
10151 if (!HONOR_SNANS (element_mode (arg0
))
10152 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10153 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10155 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10156 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10157 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10158 bool arg0rz
= false, arg0iz
= false;
10159 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10160 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10162 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10163 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10164 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10166 tree rp
= arg1r
? arg1r
10167 : build1 (REALPART_EXPR
, rtype
, arg1
);
10168 tree ip
= arg0i
? arg0i
10169 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10170 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10172 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10174 tree rp
= arg0r
? arg0r
10175 : build1 (REALPART_EXPR
, rtype
, arg0
);
10176 tree ip
= arg1i
? arg1i
10177 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10178 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10183 if (flag_unsafe_math_optimizations
10184 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10185 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10186 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10189 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10190 We associate floats only if the user has specified
10191 -fassociative-math. */
10192 if (flag_associative_math
10193 && TREE_CODE (arg1
) == PLUS_EXPR
10194 && TREE_CODE (arg0
) != MULT_EXPR
)
10196 tree tree10
= TREE_OPERAND (arg1
, 0);
10197 tree tree11
= TREE_OPERAND (arg1
, 1);
10198 if (TREE_CODE (tree11
) == MULT_EXPR
10199 && TREE_CODE (tree10
) == MULT_EXPR
)
10202 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10203 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10206 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10207 We associate floats only if the user has specified
10208 -fassociative-math. */
10209 if (flag_associative_math
10210 && TREE_CODE (arg0
) == PLUS_EXPR
10211 && TREE_CODE (arg1
) != MULT_EXPR
)
10213 tree tree00
= TREE_OPERAND (arg0
, 0);
10214 tree tree01
= TREE_OPERAND (arg0
, 1);
10215 if (TREE_CODE (tree01
) == MULT_EXPR
10216 && TREE_CODE (tree00
) == MULT_EXPR
)
10219 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10220 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10226 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10227 is a rotate of A by C1 bits. */
10228 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10229 is a rotate of A by B bits. */
10231 enum tree_code code0
, code1
;
10233 code0
= TREE_CODE (arg0
);
10234 code1
= TREE_CODE (arg1
);
10235 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10236 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10237 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10238 TREE_OPERAND (arg1
, 0), 0)
10239 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10240 TYPE_UNSIGNED (rtype
))
10241 /* Only create rotates in complete modes. Other cases are not
10242 expanded properly. */
10243 && (element_precision (rtype
)
10244 == element_precision (TYPE_MODE (rtype
))))
10246 tree tree01
, tree11
;
10247 enum tree_code code01
, code11
;
10249 tree01
= TREE_OPERAND (arg0
, 1);
10250 tree11
= TREE_OPERAND (arg1
, 1);
10251 STRIP_NOPS (tree01
);
10252 STRIP_NOPS (tree11
);
10253 code01
= TREE_CODE (tree01
);
10254 code11
= TREE_CODE (tree11
);
10255 if (code01
== INTEGER_CST
10256 && code11
== INTEGER_CST
10257 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10258 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10260 tem
= build2_loc (loc
, LROTATE_EXPR
,
10261 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10262 TREE_OPERAND (arg0
, 0),
10263 code0
== LSHIFT_EXPR
10264 ? TREE_OPERAND (arg0
, 1)
10265 : TREE_OPERAND (arg1
, 1));
10266 return fold_convert_loc (loc
, type
, tem
);
10268 else if (code11
== MINUS_EXPR
)
10270 tree tree110
, tree111
;
10271 tree110
= TREE_OPERAND (tree11
, 0);
10272 tree111
= TREE_OPERAND (tree11
, 1);
10273 STRIP_NOPS (tree110
);
10274 STRIP_NOPS (tree111
);
10275 if (TREE_CODE (tree110
) == INTEGER_CST
10276 && 0 == compare_tree_int (tree110
,
10278 (TREE_TYPE (TREE_OPERAND
10280 && operand_equal_p (tree01
, tree111
, 0))
10282 fold_convert_loc (loc
, type
,
10283 build2 ((code0
== LSHIFT_EXPR
10286 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10287 TREE_OPERAND (arg0
, 0),
10288 TREE_OPERAND (arg0
, 1)));
10290 else if (code01
== MINUS_EXPR
)
10292 tree tree010
, tree011
;
10293 tree010
= TREE_OPERAND (tree01
, 0);
10294 tree011
= TREE_OPERAND (tree01
, 1);
10295 STRIP_NOPS (tree010
);
10296 STRIP_NOPS (tree011
);
10297 if (TREE_CODE (tree010
) == INTEGER_CST
10298 && 0 == compare_tree_int (tree010
,
10300 (TREE_TYPE (TREE_OPERAND
10302 && operand_equal_p (tree11
, tree011
, 0))
10303 return fold_convert_loc
10305 build2 ((code0
!= LSHIFT_EXPR
10308 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10309 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
10315 /* In most languages, can't associate operations on floats through
10316 parentheses. Rather than remember where the parentheses were, we
10317 don't associate floats at all, unless the user has specified
10318 -fassociative-math.
10319 And, we need to make sure type is not saturating. */
10321 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10322 && !TYPE_SATURATING (type
))
10324 tree var0
, con0
, lit0
, minus_lit0
;
10325 tree var1
, con1
, lit1
, minus_lit1
;
10329 /* Split both trees into variables, constants, and literals. Then
10330 associate each group together, the constants with literals,
10331 then the result with variables. This increases the chances of
10332 literals being recombined later and of generating relocatable
10333 expressions for the sum of a constant and literal. */
10334 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10335 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10336 code
== MINUS_EXPR
);
10338 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10339 if (code
== MINUS_EXPR
)
10342 /* With undefined overflow prefer doing association in a type
10343 which wraps on overflow, if that is one of the operand types. */
10344 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10345 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10347 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10348 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10349 atype
= TREE_TYPE (arg0
);
10350 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10351 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10352 atype
= TREE_TYPE (arg1
);
10353 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10356 /* With undefined overflow we can only associate constants with one
10357 variable, and constants whose association doesn't overflow. */
10358 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10359 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10366 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10367 tmp0
= TREE_OPERAND (tmp0
, 0);
10368 if (CONVERT_EXPR_P (tmp0
)
10369 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10371 <= TYPE_PRECISION (atype
)))
10372 tmp0
= TREE_OPERAND (tmp0
, 0);
10373 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10374 tmp1
= TREE_OPERAND (tmp1
, 0);
10375 if (CONVERT_EXPR_P (tmp1
)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10378 <= TYPE_PRECISION (atype
)))
10379 tmp1
= TREE_OPERAND (tmp1
, 0);
10380 /* The only case we can still associate with two variables
10381 is if they are the same, modulo negation and bit-pattern
10382 preserving conversions. */
10383 if (!operand_equal_p (tmp0
, tmp1
, 0))
10388 /* Only do something if we found more than two objects. Otherwise,
10389 nothing has changed and we risk infinite recursion. */
10391 && (2 < ((var0
!= 0) + (var1
!= 0)
10392 + (con0
!= 0) + (con1
!= 0)
10393 + (lit0
!= 0) + (lit1
!= 0)
10394 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10396 bool any_overflows
= false;
10397 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10398 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10399 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10400 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10401 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10402 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10403 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10404 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10407 /* Preserve the MINUS_EXPR if the negative part of the literal is
10408 greater than the positive part. Otherwise, the multiplicative
10409 folding code (i.e extract_muldiv) may be fooled in case
10410 unsigned constants are subtracted, like in the following
10411 example: ((X*2 + 4) - 8U)/2. */
10412 if (minus_lit0
&& lit0
)
10414 if (TREE_CODE (lit0
) == INTEGER_CST
10415 && TREE_CODE (minus_lit0
) == INTEGER_CST
10416 && tree_int_cst_lt (lit0
, minus_lit0
))
10418 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10419 MINUS_EXPR
, atype
);
10424 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10425 MINUS_EXPR
, atype
);
10430 /* Don't introduce overflows through reassociation. */
10432 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
10433 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
10440 fold_convert_loc (loc
, type
,
10441 associate_trees (loc
, var0
, minus_lit0
,
10442 MINUS_EXPR
, atype
));
10445 con0
= associate_trees (loc
, con0
, minus_lit0
,
10446 MINUS_EXPR
, atype
);
10448 fold_convert_loc (loc
, type
,
10449 associate_trees (loc
, var0
, con0
,
10450 PLUS_EXPR
, atype
));
10454 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10456 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10464 /* Pointer simplifications for subtraction, simple reassociations. */
10465 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10467 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10468 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10469 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10471 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10472 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10473 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10474 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10475 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10476 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10478 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10481 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10482 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10484 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10485 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10486 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10487 fold_convert_loc (loc
, type
, arg1
));
10489 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10491 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10493 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10495 tree arg10
= fold_convert_loc (loc
, type
,
10496 TREE_OPERAND (arg1
, 0));
10497 tree arg11
= fold_convert_loc (loc
, type
,
10498 TREE_OPERAND (arg1
, 1));
10499 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10500 fold_convert_loc (loc
, type
, arg0
),
10503 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10506 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10507 if (TREE_CODE (arg0
) == NEGATE_EXPR
10508 && negate_expr_p (arg1
)
10509 && reorder_operands_p (arg0
, arg1
))
10510 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10511 fold_convert_loc (loc
, type
,
10512 negate_expr (arg1
)),
10513 fold_convert_loc (loc
, type
,
10514 TREE_OPERAND (arg0
, 0)));
10516 /* X - (X / Y) * Y is X % Y. */
10517 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10518 && TREE_CODE (arg1
) == MULT_EXPR
10519 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10520 && operand_equal_p (arg0
,
10521 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10522 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10523 TREE_OPERAND (arg1
, 1), 0))
10525 fold_convert_loc (loc
, type
,
10526 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10527 arg0
, TREE_OPERAND (arg1
, 1)));
10529 if (! FLOAT_TYPE_P (type
))
10531 /* Fold A - (A & B) into ~B & A. */
10532 if (!TREE_SIDE_EFFECTS (arg0
)
10533 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10535 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10537 tree arg10
= fold_convert_loc (loc
, type
,
10538 TREE_OPERAND (arg1
, 0));
10539 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10540 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10542 fold_convert_loc (loc
, type
, arg0
));
10544 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10546 tree arg11
= fold_convert_loc (loc
,
10547 type
, TREE_OPERAND (arg1
, 1));
10548 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10549 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10551 fold_convert_loc (loc
, type
, arg0
));
10555 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10556 any power of 2 minus 1. */
10557 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10558 && TREE_CODE (arg1
) == BIT_AND_EXPR
10559 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10560 TREE_OPERAND (arg1
, 0), 0))
10562 tree mask0
= TREE_OPERAND (arg0
, 1);
10563 tree mask1
= TREE_OPERAND (arg1
, 1);
10564 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10566 if (operand_equal_p (tem
, mask1
, 0))
10568 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10569 TREE_OPERAND (arg0
, 0), mask1
);
10570 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10575 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10576 __complex__ ( x, -y ). This is not the same for SNaNs or if
10577 signed zeros are involved. */
10578 if (!HONOR_SNANS (element_mode (arg0
))
10579 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10580 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10582 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10583 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10584 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10585 bool arg0rz
= false, arg0iz
= false;
10586 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10587 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10589 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10590 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10591 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10593 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10595 : build1 (REALPART_EXPR
, rtype
, arg1
));
10596 tree ip
= arg0i
? arg0i
10597 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10598 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10600 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10602 tree rp
= arg0r
? arg0r
10603 : build1 (REALPART_EXPR
, rtype
, arg0
);
10604 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10606 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10607 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10612 /* A - B -> A + (-B) if B is easily negatable. */
10613 if (negate_expr_p (arg1
)
10614 && !TYPE_OVERFLOW_SANITIZED (type
)
10615 && ((FLOAT_TYPE_P (type
)
10616 /* Avoid this transformation if B is a positive REAL_CST. */
10617 && (TREE_CODE (arg1
) != REAL_CST
10618 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10619 || INTEGRAL_TYPE_P (type
)))
10620 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10621 fold_convert_loc (loc
, type
, arg0
),
10622 fold_convert_loc (loc
, type
,
10623 negate_expr (arg1
)));
10625 /* Try folding difference of addresses. */
10627 HOST_WIDE_INT diff
;
10629 if ((TREE_CODE (arg0
) == ADDR_EXPR
10630 || TREE_CODE (arg1
) == ADDR_EXPR
)
10631 && ptr_difference_const (arg0
, arg1
, &diff
))
10632 return build_int_cst_type (type
, diff
);
10635 /* Fold &a[i] - &a[j] to i-j. */
10636 if (TREE_CODE (arg0
) == ADDR_EXPR
10637 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10638 && TREE_CODE (arg1
) == ADDR_EXPR
10639 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10641 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10642 TREE_OPERAND (arg0
, 0),
10643 TREE_OPERAND (arg1
, 0));
10648 if (FLOAT_TYPE_P (type
)
10649 && flag_unsafe_math_optimizations
10650 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10651 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10652 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10655 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10656 one. Make sure the type is not saturating and has the signedness of
10657 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10658 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10659 if ((TREE_CODE (arg0
) == MULT_EXPR
10660 || TREE_CODE (arg1
) == MULT_EXPR
)
10661 && !TYPE_SATURATING (type
)
10662 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10663 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10664 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10666 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10674 /* (-A) * (-B) -> A * B */
10675 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10676 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10677 fold_convert_loc (loc
, type
,
10678 TREE_OPERAND (arg0
, 0)),
10679 fold_convert_loc (loc
, type
,
10680 negate_expr (arg1
)));
10681 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10682 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10683 fold_convert_loc (loc
, type
,
10684 negate_expr (arg0
)),
10685 fold_convert_loc (loc
, type
,
10686 TREE_OPERAND (arg1
, 0)));
10688 if (! FLOAT_TYPE_P (type
))
10690 /* Transform x * -C into -x * C if x is easily negatable. */
10691 if (TREE_CODE (arg1
) == INTEGER_CST
10692 && tree_int_cst_sgn (arg1
) == -1
10693 && negate_expr_p (arg0
)
10694 && (tem
= negate_expr (arg1
)) != arg1
10695 && !TREE_OVERFLOW (tem
))
10696 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10697 fold_convert_loc (loc
, type
,
10698 negate_expr (arg0
)),
10701 /* (a * (1 << b)) is (a << b) */
10702 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10703 && integer_onep (TREE_OPERAND (arg1
, 0)))
10704 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10705 TREE_OPERAND (arg1
, 1));
10706 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10707 && integer_onep (TREE_OPERAND (arg0
, 0)))
10708 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10709 TREE_OPERAND (arg0
, 1));
10711 /* (A + A) * C -> A * 2 * C */
10712 if (TREE_CODE (arg0
) == PLUS_EXPR
10713 && TREE_CODE (arg1
) == INTEGER_CST
10714 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10715 TREE_OPERAND (arg0
, 1), 0))
10716 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10717 omit_one_operand_loc (loc
, type
,
10718 TREE_OPERAND (arg0
, 0),
10719 TREE_OPERAND (arg0
, 1)),
10720 fold_build2_loc (loc
, MULT_EXPR
, type
,
10721 build_int_cst (type
, 2) , arg1
));
10723 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10724 sign-changing only. */
10725 if (TREE_CODE (arg1
) == INTEGER_CST
10726 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10727 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10728 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10730 strict_overflow_p
= false;
10731 if (TREE_CODE (arg1
) == INTEGER_CST
10732 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10733 &strict_overflow_p
)))
10735 if (strict_overflow_p
)
10736 fold_overflow_warning (("assuming signed overflow does not "
10737 "occur when simplifying "
10739 WARN_STRICT_OVERFLOW_MISC
);
10740 return fold_convert_loc (loc
, type
, tem
);
10743 /* Optimize z * conj(z) for integer complex numbers. */
10744 if (TREE_CODE (arg0
) == CONJ_EXPR
10745 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10746 return fold_mult_zconjz (loc
, type
, arg1
);
10747 if (TREE_CODE (arg1
) == CONJ_EXPR
10748 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10749 return fold_mult_zconjz (loc
, type
, arg0
);
10753 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10754 the result for floating point types due to rounding so it is applied
10755 only if -fassociative-math was specify. */
10756 if (flag_associative_math
10757 && TREE_CODE (arg0
) == RDIV_EXPR
10758 && TREE_CODE (arg1
) == REAL_CST
10759 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10761 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10764 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10765 TREE_OPERAND (arg0
, 1));
10768 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10769 if (operand_equal_p (arg0
, arg1
, 0))
10771 tree tem
= fold_strip_sign_ops (arg0
);
10772 if (tem
!= NULL_TREE
)
10774 tem
= fold_convert_loc (loc
, type
, tem
);
10775 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10779 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10780 This is not the same for NaNs or if signed zeros are
10782 if (!HONOR_NANS (arg0
)
10783 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10784 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10785 && TREE_CODE (arg1
) == COMPLEX_CST
10786 && real_zerop (TREE_REALPART (arg1
)))
10788 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10789 if (real_onep (TREE_IMAGPART (arg1
)))
10791 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10792 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10794 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10795 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10797 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10798 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10799 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10803 /* Optimize z * conj(z) for floating point complex numbers.
10804 Guarded by flag_unsafe_math_optimizations as non-finite
10805 imaginary components don't produce scalar results. */
10806 if (flag_unsafe_math_optimizations
10807 && TREE_CODE (arg0
) == CONJ_EXPR
10808 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10809 return fold_mult_zconjz (loc
, type
, arg1
);
10810 if (flag_unsafe_math_optimizations
10811 && TREE_CODE (arg1
) == CONJ_EXPR
10812 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10813 return fold_mult_zconjz (loc
, type
, arg0
);
10815 if (flag_unsafe_math_optimizations
)
10817 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10818 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10820 /* Optimizations of root(...)*root(...). */
10821 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10824 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10825 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10827 /* Optimize sqrt(x)*sqrt(x) as x. */
10828 if (BUILTIN_SQRT_P (fcode0
)
10829 && operand_equal_p (arg00
, arg10
, 0)
10830 && ! HONOR_SNANS (element_mode (type
)))
10833 /* Optimize root(x)*root(y) as root(x*y). */
10834 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10835 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10836 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10839 /* Optimize expN(x)*expN(y) as expN(x+y). */
10840 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10842 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10843 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10844 CALL_EXPR_ARG (arg0
, 0),
10845 CALL_EXPR_ARG (arg1
, 0));
10846 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10849 /* Optimizations of pow(...)*pow(...). */
10850 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10851 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10852 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10854 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10855 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10856 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10857 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10859 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10860 if (operand_equal_p (arg01
, arg11
, 0))
10862 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10863 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10865 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10868 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10869 if (operand_equal_p (arg00
, arg10
, 0))
10871 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10872 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10874 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10878 /* Optimize tan(x)*cos(x) as sin(x). */
10879 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10880 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10881 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10882 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10883 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10884 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10885 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10886 CALL_EXPR_ARG (arg1
, 0), 0))
10888 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10890 if (sinfn
!= NULL_TREE
)
10891 return build_call_expr_loc (loc
, sinfn
, 1,
10892 CALL_EXPR_ARG (arg0
, 0));
10895 /* Optimize x*pow(x,c) as pow(x,c+1). */
10896 if (fcode1
== BUILT_IN_POW
10897 || fcode1
== BUILT_IN_POWF
10898 || fcode1
== BUILT_IN_POWL
)
10900 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10901 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10902 if (TREE_CODE (arg11
) == REAL_CST
10903 && !TREE_OVERFLOW (arg11
)
10904 && operand_equal_p (arg0
, arg10
, 0))
10906 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10910 c
= TREE_REAL_CST (arg11
);
10911 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10912 arg
= build_real (type
, c
);
10913 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10917 /* Optimize pow(x,c)*x as pow(x,c+1). */
10918 if (fcode0
== BUILT_IN_POW
10919 || fcode0
== BUILT_IN_POWF
10920 || fcode0
== BUILT_IN_POWL
)
10922 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10923 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10924 if (TREE_CODE (arg01
) == REAL_CST
10925 && !TREE_OVERFLOW (arg01
)
10926 && operand_equal_p (arg1
, arg00
, 0))
10928 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10932 c
= TREE_REAL_CST (arg01
);
10933 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10934 arg
= build_real (type
, c
);
10935 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10939 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10940 if (!in_gimple_form
10942 && operand_equal_p (arg0
, arg1
, 0))
10944 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10948 tree arg
= build_real (type
, dconst2
);
10949 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10958 /* ~X | X is -1. */
10959 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10960 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10962 t1
= build_zero_cst (type
);
10963 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10964 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10967 /* X | ~X is -1. */
10968 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10969 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10971 t1
= build_zero_cst (type
);
10972 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10973 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10976 /* Canonicalize (X & C1) | C2. */
10977 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10978 && TREE_CODE (arg1
) == INTEGER_CST
10979 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10981 int width
= TYPE_PRECISION (type
), w
;
10982 wide_int c1
= TREE_OPERAND (arg0
, 1);
10983 wide_int c2
= arg1
;
10985 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10986 if ((c1
& c2
) == c1
)
10987 return omit_one_operand_loc (loc
, type
, arg1
,
10988 TREE_OPERAND (arg0
, 0));
10990 wide_int msk
= wi::mask (width
, false,
10991 TYPE_PRECISION (TREE_TYPE (arg1
)));
10993 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10994 if (msk
.and_not (c1
| c2
) == 0)
10995 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10996 TREE_OPERAND (arg0
, 0), arg1
);
10998 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10999 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11000 mode which allows further optimizations. */
11003 wide_int c3
= c1
.and_not (c2
);
11004 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11006 wide_int mask
= wi::mask (w
, false,
11007 TYPE_PRECISION (type
));
11008 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11016 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11017 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11018 TREE_OPERAND (arg0
, 0),
11019 wide_int_to_tree (type
,
11024 /* (X & ~Y) | (~X & Y) is X ^ Y */
11025 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11026 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11028 tree a0
, a1
, l0
, l1
, n0
, n1
;
11030 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11031 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11033 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11034 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11036 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11037 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11039 if ((operand_equal_p (n0
, a0
, 0)
11040 && operand_equal_p (n1
, a1
, 0))
11041 || (operand_equal_p (n0
, a1
, 0)
11042 && operand_equal_p (n1
, a0
, 0)))
11043 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11046 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11047 if (t1
!= NULL_TREE
)
11050 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11052 This results in more efficient code for machines without a NAND
11053 instruction. Combine will canonicalize to the first form
11054 which will allow use of NAND instructions provided by the
11055 backend if they exist. */
11056 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11057 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11060 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11061 build2 (BIT_AND_EXPR
, type
,
11062 fold_convert_loc (loc
, type
,
11063 TREE_OPERAND (arg0
, 0)),
11064 fold_convert_loc (loc
, type
,
11065 TREE_OPERAND (arg1
, 0))));
11068 /* See if this can be simplified into a rotate first. If that
11069 is unsuccessful continue in the association code. */
11073 /* ~X ^ X is -1. */
11074 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11075 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11077 t1
= build_zero_cst (type
);
11078 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11079 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11082 /* X ^ ~X is -1. */
11083 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11084 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11086 t1
= build_zero_cst (type
);
11087 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11088 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11091 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11092 with a constant, and the two constants have no bits in common,
11093 we should treat this as a BIT_IOR_EXPR since this may produce more
11094 simplifications. */
11095 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11096 && TREE_CODE (arg1
) == BIT_AND_EXPR
11097 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11098 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11099 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11100 TREE_OPERAND (arg1
, 1)) == 0)
11102 code
= BIT_IOR_EXPR
;
11106 /* (X | Y) ^ X -> Y & ~ X*/
11107 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11108 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11110 tree t2
= TREE_OPERAND (arg0
, 1);
11111 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11113 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11114 fold_convert_loc (loc
, type
, t2
),
11115 fold_convert_loc (loc
, type
, t1
));
11119 /* (Y | X) ^ X -> Y & ~ X*/
11120 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11121 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11123 tree t2
= TREE_OPERAND (arg0
, 0);
11124 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11126 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11127 fold_convert_loc (loc
, type
, t2
),
11128 fold_convert_loc (loc
, type
, t1
));
11132 /* X ^ (X | Y) -> Y & ~ X*/
11133 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11134 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11136 tree t2
= TREE_OPERAND (arg1
, 1);
11137 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11139 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11140 fold_convert_loc (loc
, type
, t2
),
11141 fold_convert_loc (loc
, type
, t1
));
11145 /* X ^ (Y | X) -> Y & ~ X*/
11146 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11147 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11149 tree t2
= TREE_OPERAND (arg1
, 0);
11150 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11152 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11153 fold_convert_loc (loc
, type
, t2
),
11154 fold_convert_loc (loc
, type
, t1
));
11158 /* Convert ~X ^ ~Y to X ^ Y. */
11159 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11160 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11161 return fold_build2_loc (loc
, code
, type
,
11162 fold_convert_loc (loc
, type
,
11163 TREE_OPERAND (arg0
, 0)),
11164 fold_convert_loc (loc
, type
,
11165 TREE_OPERAND (arg1
, 0)));
11167 /* Convert ~X ^ C to X ^ ~C. */
11168 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11169 && TREE_CODE (arg1
) == INTEGER_CST
)
11170 return fold_build2_loc (loc
, code
, type
,
11171 fold_convert_loc (loc
, type
,
11172 TREE_OPERAND (arg0
, 0)),
11173 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11175 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11176 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11177 && INTEGRAL_TYPE_P (type
)
11178 && integer_onep (TREE_OPERAND (arg0
, 1))
11179 && integer_onep (arg1
))
11180 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11181 build_zero_cst (TREE_TYPE (arg0
)));
11183 /* Fold (X & Y) ^ Y as ~X & Y. */
11184 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11185 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11187 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11188 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11189 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11190 fold_convert_loc (loc
, type
, arg1
));
11192 /* Fold (X & Y) ^ X as ~Y & X. */
11193 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11194 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11195 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11197 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11198 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11199 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11200 fold_convert_loc (loc
, type
, arg1
));
11202 /* Fold X ^ (X & Y) as X & ~Y. */
11203 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11204 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11206 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11207 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11208 fold_convert_loc (loc
, type
, arg0
),
11209 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11211 /* Fold X ^ (Y & X) as ~Y & X. */
11212 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11213 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11214 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11216 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11217 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11218 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11219 fold_convert_loc (loc
, type
, arg0
));
11222 /* See if this can be simplified into a rotate first. If that
11223 is unsuccessful continue in the association code. */
11227 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11228 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11229 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11230 || (TREE_CODE (arg0
) == EQ_EXPR
11231 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11232 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11233 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11235 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11236 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11237 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11238 || (TREE_CODE (arg1
) == EQ_EXPR
11239 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11240 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11241 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11243 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11244 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11245 && INTEGRAL_TYPE_P (type
)
11246 && integer_onep (TREE_OPERAND (arg0
, 1))
11247 && integer_onep (arg1
))
11250 tem
= TREE_OPERAND (arg0
, 0);
11251 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11252 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11254 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11255 build_zero_cst (TREE_TYPE (tem
)));
11257 /* Fold ~X & 1 as (X & 1) == 0. */
11258 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11259 && INTEGRAL_TYPE_P (type
)
11260 && integer_onep (arg1
))
11263 tem
= TREE_OPERAND (arg0
, 0);
11264 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11265 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11267 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11268 build_zero_cst (TREE_TYPE (tem
)));
11270 /* Fold !X & 1 as X == 0. */
11271 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11272 && integer_onep (arg1
))
11274 tem
= TREE_OPERAND (arg0
, 0);
11275 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11276 build_zero_cst (TREE_TYPE (tem
)));
11279 /* Fold (X ^ Y) & Y as ~X & Y. */
11280 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11281 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11283 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11284 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11285 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11286 fold_convert_loc (loc
, type
, arg1
));
11288 /* Fold (X ^ Y) & X as ~Y & X. */
11289 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11290 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11291 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11293 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11294 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11295 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11296 fold_convert_loc (loc
, type
, arg1
));
11298 /* Fold X & (X ^ Y) as X & ~Y. */
11299 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11300 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11302 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11303 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11304 fold_convert_loc (loc
, type
, arg0
),
11305 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11307 /* Fold X & (Y ^ X) as ~Y & X. */
11308 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11309 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11310 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11312 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11313 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11314 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11315 fold_convert_loc (loc
, type
, arg0
));
11318 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11319 multiple of 1 << CST. */
11320 if (TREE_CODE (arg1
) == INTEGER_CST
)
11322 wide_int cst1
= arg1
;
11323 wide_int ncst1
= -cst1
;
11324 if ((cst1
& ncst1
) == ncst1
11325 && multiple_of_p (type
, arg0
,
11326 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11327 return fold_convert_loc (loc
, type
, arg0
);
11330 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11332 if (TREE_CODE (arg1
) == INTEGER_CST
11333 && TREE_CODE (arg0
) == MULT_EXPR
11334 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11336 wide_int warg1
= arg1
;
11337 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11340 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11342 else if (masked
!= warg1
)
11344 /* Avoid the transform if arg1 is a mask of some
11345 mode which allows further optimizations. */
11346 int pop
= wi::popcount (warg1
);
11347 if (!(pop
>= BITS_PER_UNIT
11348 && exact_log2 (pop
) != -1
11349 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11350 return fold_build2_loc (loc
, code
, type
, op0
,
11351 wide_int_to_tree (type
, masked
));
11355 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11356 ((A & N) + B) & M -> (A + B) & M
11357 Similarly if (N & M) == 0,
11358 ((A | N) + B) & M -> (A + B) & M
11359 and for - instead of + (or unary - instead of +)
11360 and/or ^ instead of |.
11361 If B is constant and (B & M) == 0, fold into A & M. */
11362 if (TREE_CODE (arg1
) == INTEGER_CST
)
11364 wide_int cst1
= arg1
;
11365 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11366 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11367 && (TREE_CODE (arg0
) == PLUS_EXPR
11368 || TREE_CODE (arg0
) == MINUS_EXPR
11369 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11370 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11371 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11377 /* Now we know that arg0 is (C + D) or (C - D) or
11378 -C and arg1 (M) is == (1LL << cst) - 1.
11379 Store C into PMOP[0] and D into PMOP[1]. */
11380 pmop
[0] = TREE_OPERAND (arg0
, 0);
11382 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11384 pmop
[1] = TREE_OPERAND (arg0
, 1);
11388 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11391 for (; which
>= 0; which
--)
11392 switch (TREE_CODE (pmop
[which
]))
11397 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11400 cst0
= TREE_OPERAND (pmop
[which
], 1);
11402 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11407 else if (cst0
!= 0)
11409 /* If C or D is of the form (A & N) where
11410 (N & M) == M, or of the form (A | N) or
11411 (A ^ N) where (N & M) == 0, replace it with A. */
11412 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11415 /* If C or D is a N where (N & M) == 0, it can be
11416 omitted (assumed 0). */
11417 if ((TREE_CODE (arg0
) == PLUS_EXPR
11418 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11419 && (cst1
& pmop
[which
]) == 0)
11420 pmop
[which
] = NULL
;
11426 /* Only build anything new if we optimized one or both arguments
11428 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11429 || (TREE_CODE (arg0
) != NEGATE_EXPR
11430 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11432 tree utype
= TREE_TYPE (arg0
);
11433 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11435 /* Perform the operations in a type that has defined
11436 overflow behavior. */
11437 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11438 if (pmop
[0] != NULL
)
11439 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11440 if (pmop
[1] != NULL
)
11441 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11444 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11445 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11446 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11448 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11449 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11451 else if (pmop
[0] != NULL
)
11453 else if (pmop
[1] != NULL
)
11456 return build_int_cst (type
, 0);
11458 else if (pmop
[0] == NULL
)
11459 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11461 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11463 /* TEM is now the new binary +, - or unary - replacement. */
11464 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11465 fold_convert_loc (loc
, utype
, arg1
));
11466 return fold_convert_loc (loc
, type
, tem
);
11471 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11472 if (t1
!= NULL_TREE
)
11474 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11475 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11476 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11478 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11480 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11483 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11486 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11488 This results in more efficient code for machines without a NOR
11489 instruction. Combine will canonicalize to the first form
11490 which will allow use of NOR instructions provided by the
11491 backend if they exist. */
11492 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11493 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11495 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11496 build2 (BIT_IOR_EXPR
, type
,
11497 fold_convert_loc (loc
, type
,
11498 TREE_OPERAND (arg0
, 0)),
11499 fold_convert_loc (loc
, type
,
11500 TREE_OPERAND (arg1
, 0))));
11503 /* If arg0 is derived from the address of an object or function, we may
11504 be able to fold this expression using the object or function's
11506 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11508 unsigned HOST_WIDE_INT modulus
, residue
;
11509 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11511 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11512 integer_onep (arg1
));
11514 /* This works because modulus is a power of 2. If this weren't the
11515 case, we'd have to replace it by its greatest power-of-2
11516 divisor: modulus & -modulus. */
11518 return build_int_cst (type
, residue
& low
);
11521 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11522 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11523 if the new mask might be further optimized. */
11524 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11525 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11526 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11527 && TREE_CODE (arg1
) == INTEGER_CST
11528 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11529 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11530 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11531 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11533 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11534 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11535 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11536 tree shift_type
= TREE_TYPE (arg0
);
11538 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11539 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11540 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11541 && TYPE_PRECISION (TREE_TYPE (arg0
))
11542 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11544 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11545 tree arg00
= TREE_OPERAND (arg0
, 0);
11546 /* See if more bits can be proven as zero because of
11548 if (TREE_CODE (arg00
) == NOP_EXPR
11549 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11551 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11552 if (TYPE_PRECISION (inner_type
)
11553 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11554 && TYPE_PRECISION (inner_type
) < prec
)
11556 prec
= TYPE_PRECISION (inner_type
);
11557 /* See if we can shorten the right shift. */
11559 shift_type
= inner_type
;
11560 /* Otherwise X >> C1 is all zeros, so we'll optimize
11561 it into (X, 0) later on by making sure zerobits
11565 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11568 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11569 zerobits
<<= prec
- shiftc
;
11571 /* For arithmetic shift if sign bit could be set, zerobits
11572 can contain actually sign bits, so no transformation is
11573 possible, unless MASK masks them all away. In that
11574 case the shift needs to be converted into logical shift. */
11575 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11576 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11578 if ((mask
& zerobits
) == 0)
11579 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11585 /* ((X << 16) & 0xff00) is (X, 0). */
11586 if ((mask
& zerobits
) == mask
)
11587 return omit_one_operand_loc (loc
, type
,
11588 build_int_cst (type
, 0), arg0
);
11590 newmask
= mask
| zerobits
;
11591 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11593 /* Only do the transformation if NEWMASK is some integer
11595 for (prec
= BITS_PER_UNIT
;
11596 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11597 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11599 if (prec
< HOST_BITS_PER_WIDE_INT
11600 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11604 if (shift_type
!= TREE_TYPE (arg0
))
11606 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11607 fold_convert_loc (loc
, shift_type
,
11608 TREE_OPERAND (arg0
, 0)),
11609 TREE_OPERAND (arg0
, 1));
11610 tem
= fold_convert_loc (loc
, type
, tem
);
11614 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11615 if (!tree_int_cst_equal (newmaskt
, arg1
))
11616 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11624 /* Don't touch a floating-point divide by zero unless the mode
11625 of the constant can represent infinity. */
11626 if (TREE_CODE (arg1
) == REAL_CST
11627 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11628 && real_zerop (arg1
))
11631 /* (-A) / (-B) -> A / B */
11632 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11633 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11634 TREE_OPERAND (arg0
, 0),
11635 negate_expr (arg1
));
11636 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11637 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11638 negate_expr (arg0
),
11639 TREE_OPERAND (arg1
, 0));
11641 /* Convert A/B/C to A/(B*C). */
11642 if (flag_reciprocal_math
11643 && TREE_CODE (arg0
) == RDIV_EXPR
)
11644 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11645 fold_build2_loc (loc
, MULT_EXPR
, type
,
11646 TREE_OPERAND (arg0
, 1), arg1
));
11648 /* Convert A/(B/C) to (A/B)*C. */
11649 if (flag_reciprocal_math
11650 && TREE_CODE (arg1
) == RDIV_EXPR
)
11651 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11652 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11653 TREE_OPERAND (arg1
, 0)),
11654 TREE_OPERAND (arg1
, 1));
11656 /* Convert C1/(X*C2) into (C1/C2)/X. */
11657 if (flag_reciprocal_math
11658 && TREE_CODE (arg1
) == MULT_EXPR
11659 && TREE_CODE (arg0
) == REAL_CST
11660 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11662 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11663 TREE_OPERAND (arg1
, 1));
11665 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11666 TREE_OPERAND (arg1
, 0));
11669 if (flag_unsafe_math_optimizations
)
11671 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11672 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11674 /* Optimize sin(x)/cos(x) as tan(x). */
11675 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11676 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11677 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11678 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11679 CALL_EXPR_ARG (arg1
, 0), 0))
11681 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11683 if (tanfn
!= NULL_TREE
)
11684 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11687 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11688 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11689 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11690 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11691 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11692 CALL_EXPR_ARG (arg1
, 0), 0))
11694 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11696 if (tanfn
!= NULL_TREE
)
11698 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11699 CALL_EXPR_ARG (arg0
, 0));
11700 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11701 build_real (type
, dconst1
), tmp
);
11705 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11706 NaNs or Infinities. */
11707 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11708 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11709 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11711 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11712 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11714 if (! HONOR_NANS (arg00
)
11715 && ! HONOR_INFINITIES (element_mode (arg00
))
11716 && operand_equal_p (arg00
, arg01
, 0))
11718 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11720 if (cosfn
!= NULL_TREE
)
11721 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11725 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11726 NaNs or Infinities. */
11727 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11728 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11729 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11731 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11732 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11734 if (! HONOR_NANS (arg00
)
11735 && ! HONOR_INFINITIES (element_mode (arg00
))
11736 && operand_equal_p (arg00
, arg01
, 0))
11738 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11740 if (cosfn
!= NULL_TREE
)
11742 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11743 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11744 build_real (type
, dconst1
),
11750 /* Optimize pow(x,c)/x as pow(x,c-1). */
11751 if (fcode0
== BUILT_IN_POW
11752 || fcode0
== BUILT_IN_POWF
11753 || fcode0
== BUILT_IN_POWL
)
11755 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11756 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11757 if (TREE_CODE (arg01
) == REAL_CST
11758 && !TREE_OVERFLOW (arg01
)
11759 && operand_equal_p (arg1
, arg00
, 0))
11761 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11765 c
= TREE_REAL_CST (arg01
);
11766 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11767 arg
= build_real (type
, c
);
11768 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11772 /* Optimize a/root(b/c) into a*root(c/b). */
11773 if (BUILTIN_ROOT_P (fcode1
))
11775 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11777 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11779 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11780 tree b
= TREE_OPERAND (rootarg
, 0);
11781 tree c
= TREE_OPERAND (rootarg
, 1);
11783 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11785 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11786 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11790 /* Optimize x/expN(y) into x*expN(-y). */
11791 if (BUILTIN_EXPONENT_P (fcode1
))
11793 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11794 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11795 arg1
= build_call_expr_loc (loc
,
11797 fold_convert_loc (loc
, type
, arg
));
11798 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11801 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11802 if (fcode1
== BUILT_IN_POW
11803 || fcode1
== BUILT_IN_POWF
11804 || fcode1
== BUILT_IN_POWL
)
11806 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11807 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11808 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11809 tree neg11
= fold_convert_loc (loc
, type
,
11810 negate_expr (arg11
));
11811 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11812 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11817 case TRUNC_DIV_EXPR
:
11818 /* Optimize (X & (-A)) / A where A is a power of 2,
11820 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11821 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11822 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11824 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11825 arg1
, TREE_OPERAND (arg0
, 1));
11826 if (sum
&& integer_zerop (sum
)) {
11827 tree pow2
= build_int_cst (integer_type_node
,
11828 wi::exact_log2 (arg1
));
11829 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11830 TREE_OPERAND (arg0
, 0), pow2
);
11836 case FLOOR_DIV_EXPR
:
11837 /* Simplify A / (B << N) where A and B are positive and B is
11838 a power of 2, to A >> (N + log2(B)). */
11839 strict_overflow_p
= false;
11840 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11841 && (TYPE_UNSIGNED (type
)
11842 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11844 tree sval
= TREE_OPERAND (arg1
, 0);
11845 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11847 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11848 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11849 wi::exact_log2 (sval
));
11851 if (strict_overflow_p
)
11852 fold_overflow_warning (("assuming signed overflow does not "
11853 "occur when simplifying A / (B << N)"),
11854 WARN_STRICT_OVERFLOW_MISC
);
11856 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11858 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11859 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11865 case ROUND_DIV_EXPR
:
11866 case CEIL_DIV_EXPR
:
11867 case EXACT_DIV_EXPR
:
11868 if (integer_zerop (arg1
))
11871 /* Convert -A / -B to A / B when the type is signed and overflow is
11873 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11874 && TREE_CODE (arg0
) == NEGATE_EXPR
11875 && negate_expr_p (arg1
))
11877 if (INTEGRAL_TYPE_P (type
))
11878 fold_overflow_warning (("assuming signed overflow does not occur "
11879 "when distributing negation across "
11881 WARN_STRICT_OVERFLOW_MISC
);
11882 return fold_build2_loc (loc
, code
, type
,
11883 fold_convert_loc (loc
, type
,
11884 TREE_OPERAND (arg0
, 0)),
11885 fold_convert_loc (loc
, type
,
11886 negate_expr (arg1
)));
11888 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11889 && TREE_CODE (arg1
) == NEGATE_EXPR
11890 && negate_expr_p (arg0
))
11892 if (INTEGRAL_TYPE_P (type
))
11893 fold_overflow_warning (("assuming signed overflow does not occur "
11894 "when distributing negation across "
11896 WARN_STRICT_OVERFLOW_MISC
);
11897 return fold_build2_loc (loc
, code
, type
,
11898 fold_convert_loc (loc
, type
,
11899 negate_expr (arg0
)),
11900 fold_convert_loc (loc
, type
,
11901 TREE_OPERAND (arg1
, 0)));
11904 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11905 operation, EXACT_DIV_EXPR.
11907 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11908 At one time others generated faster code, it's not clear if they do
11909 after the last round to changes to the DIV code in expmed.c. */
11910 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11911 && multiple_of_p (type
, arg0
, arg1
))
11912 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11914 strict_overflow_p
= false;
11915 if (TREE_CODE (arg1
) == INTEGER_CST
11916 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11917 &strict_overflow_p
)))
11919 if (strict_overflow_p
)
11920 fold_overflow_warning (("assuming signed overflow does not occur "
11921 "when simplifying division"),
11922 WARN_STRICT_OVERFLOW_MISC
);
11923 return fold_convert_loc (loc
, type
, tem
);
11928 case CEIL_MOD_EXPR
:
11929 case FLOOR_MOD_EXPR
:
11930 case ROUND_MOD_EXPR
:
11931 case TRUNC_MOD_EXPR
:
11932 strict_overflow_p
= false;
11933 if (TREE_CODE (arg1
) == INTEGER_CST
11934 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11935 &strict_overflow_p
)))
11937 if (strict_overflow_p
)
11938 fold_overflow_warning (("assuming signed overflow does not occur "
11939 "when simplifying modulus"),
11940 WARN_STRICT_OVERFLOW_MISC
);
11941 return fold_convert_loc (loc
, type
, tem
);
11950 /* Since negative shift count is not well-defined,
11951 don't try to compute it in the compiler. */
11952 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11955 prec
= element_precision (type
);
11957 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11958 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
11959 && tree_to_uhwi (arg1
) < prec
11960 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11961 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11963 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11964 + tree_to_uhwi (arg1
));
11966 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11967 being well defined. */
11970 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11972 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11973 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
11974 TREE_OPERAND (arg0
, 0));
11979 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11980 build_int_cst (TREE_TYPE (arg1
), low
));
11983 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11984 into x & ((unsigned)-1 >> c) for unsigned types. */
11985 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11986 || (TYPE_UNSIGNED (type
)
11987 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11988 && tree_fits_uhwi_p (arg1
)
11989 && tree_to_uhwi (arg1
) < prec
11990 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11991 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11993 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11994 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12000 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12002 lshift
= build_minus_one_cst (type
);
12003 lshift
= const_binop (code
, lshift
, arg1
);
12005 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12009 /* If we have a rotate of a bit operation with the rotate count and
12010 the second operand of the bit operation both constant,
12011 permute the two operations. */
12012 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12013 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12014 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12015 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12016 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12017 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12018 fold_build2_loc (loc
, code
, type
,
12019 TREE_OPERAND (arg0
, 0), arg1
),
12020 fold_build2_loc (loc
, code
, type
,
12021 TREE_OPERAND (arg0
, 1), arg1
));
12023 /* Two consecutive rotates adding up to the some integer
12024 multiple of the precision of the type can be ignored. */
12025 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12026 && TREE_CODE (arg0
) == RROTATE_EXPR
12027 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12028 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12030 return TREE_OPERAND (arg0
, 0);
12032 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12033 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12034 if the latter can be further optimized. */
12035 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12036 && TREE_CODE (arg0
) == BIT_AND_EXPR
12037 && TREE_CODE (arg1
) == INTEGER_CST
12038 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12040 tree mask
= fold_build2_loc (loc
, code
, type
,
12041 fold_convert_loc (loc
, type
,
12042 TREE_OPERAND (arg0
, 1)),
12044 tree shift
= fold_build2_loc (loc
, code
, type
,
12045 fold_convert_loc (loc
, type
,
12046 TREE_OPERAND (arg0
, 0)),
12048 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12056 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12062 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12067 case TRUTH_ANDIF_EXPR
:
12068 /* Note that the operands of this must be ints
12069 and their values must be 0 or 1.
12070 ("true" is a fixed value perhaps depending on the language.) */
12071 /* If first arg is constant zero, return it. */
12072 if (integer_zerop (arg0
))
12073 return fold_convert_loc (loc
, type
, arg0
);
12074 case TRUTH_AND_EXPR
:
12075 /* If either arg is constant true, drop it. */
12076 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12077 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12078 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12079 /* Preserve sequence points. */
12080 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12081 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12082 /* If second arg is constant zero, result is zero, but first arg
12083 must be evaluated. */
12084 if (integer_zerop (arg1
))
12085 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12086 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12087 case will be handled here. */
12088 if (integer_zerop (arg0
))
12089 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12091 /* !X && X is always false. */
12092 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12093 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12094 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12095 /* X && !X is always false. */
12096 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12097 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12098 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12100 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12101 means A >= Y && A != MAX, but in this case we know that
12104 if (!TREE_SIDE_EFFECTS (arg0
)
12105 && !TREE_SIDE_EFFECTS (arg1
))
12107 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12108 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12109 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12111 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12112 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12113 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12116 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12122 case TRUTH_ORIF_EXPR
:
12123 /* Note that the operands of this must be ints
12124 and their values must be 0 or true.
12125 ("true" is a fixed value perhaps depending on the language.) */
12126 /* If first arg is constant true, return it. */
12127 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12128 return fold_convert_loc (loc
, type
, arg0
);
12129 case TRUTH_OR_EXPR
:
12130 /* If either arg is constant zero, drop it. */
12131 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12132 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12133 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12134 /* Preserve sequence points. */
12135 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12136 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12137 /* If second arg is constant true, result is true, but we must
12138 evaluate first arg. */
12139 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12140 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12141 /* Likewise for first arg, but note this only occurs here for
12143 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12144 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12146 /* !X || X is always true. */
12147 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12148 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12149 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12150 /* X || !X is always true. */
12151 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12152 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12153 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12155 /* (X && !Y) || (!X && Y) is X ^ Y */
12156 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12157 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12159 tree a0
, a1
, l0
, l1
, n0
, n1
;
12161 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12162 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12164 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12165 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12167 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12168 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12170 if ((operand_equal_p (n0
, a0
, 0)
12171 && operand_equal_p (n1
, a1
, 0))
12172 || (operand_equal_p (n0
, a1
, 0)
12173 && operand_equal_p (n1
, a0
, 0)))
12174 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12177 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12183 case TRUTH_XOR_EXPR
:
12184 /* If the second arg is constant zero, drop it. */
12185 if (integer_zerop (arg1
))
12186 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12187 /* If the second arg is constant true, this is a logical inversion. */
12188 if (integer_onep (arg1
))
12190 tem
= invert_truthvalue_loc (loc
, arg0
);
12191 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12193 /* Identical arguments cancel to zero. */
12194 if (operand_equal_p (arg0
, arg1
, 0))
12195 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12197 /* !X ^ X is always true. */
12198 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12199 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12200 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12202 /* X ^ !X is always true. */
12203 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12204 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12205 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12214 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12215 if (tem
!= NULL_TREE
)
12218 /* bool_var != 0 becomes bool_var. */
12219 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12220 && code
== NE_EXPR
)
12221 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12223 /* bool_var == 1 becomes bool_var. */
12224 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12225 && code
== EQ_EXPR
)
12226 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12228 /* bool_var != 1 becomes !bool_var. */
12229 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12230 && code
== NE_EXPR
)
12231 return fold_convert_loc (loc
, type
,
12232 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12233 TREE_TYPE (arg0
), arg0
));
12235 /* bool_var == 0 becomes !bool_var. */
12236 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12237 && code
== EQ_EXPR
)
12238 return fold_convert_loc (loc
, type
,
12239 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12240 TREE_TYPE (arg0
), arg0
));
12242 /* !exp != 0 becomes !exp */
12243 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12244 && code
== NE_EXPR
)
12245 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12247 /* If this is an equality comparison of the address of two non-weak,
12248 unaliased symbols neither of which are extern (since we do not
12249 have access to attributes for externs), then we know the result. */
12250 if (TREE_CODE (arg0
) == ADDR_EXPR
12251 && DECL_P (TREE_OPERAND (arg0
, 0))
12252 && TREE_CODE (arg1
) == ADDR_EXPR
12253 && DECL_P (TREE_OPERAND (arg1
, 0)))
12257 if (decl_in_symtab_p (TREE_OPERAND (arg0
, 0))
12258 && decl_in_symtab_p (TREE_OPERAND (arg1
, 0)))
12259 equal
= symtab_node::get_create (TREE_OPERAND (arg0
, 0))
12260 ->equal_address_to (symtab_node::get_create
12261 (TREE_OPERAND (arg1
, 0)));
12263 equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12265 return constant_boolean_node (equal
12266 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12270 /* Similarly for a NEGATE_EXPR. */
12271 if (TREE_CODE (arg0
) == NEGATE_EXPR
12272 && TREE_CODE (arg1
) == INTEGER_CST
12273 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12275 && TREE_CODE (tem
) == INTEGER_CST
12276 && !TREE_OVERFLOW (tem
))
12277 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12279 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12280 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12281 && TREE_CODE (arg1
) == INTEGER_CST
12282 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12283 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12284 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12285 fold_convert_loc (loc
,
12288 TREE_OPERAND (arg0
, 1)));
12290 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12291 if ((TREE_CODE (arg0
) == PLUS_EXPR
12292 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12293 || TREE_CODE (arg0
) == MINUS_EXPR
)
12294 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12297 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12298 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12300 tree val
= TREE_OPERAND (arg0
, 1);
12301 return omit_two_operands_loc (loc
, type
,
12302 fold_build2_loc (loc
, code
, type
,
12304 build_int_cst (TREE_TYPE (val
),
12306 TREE_OPERAND (arg0
, 0), arg1
);
12309 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12310 if (TREE_CODE (arg0
) == MINUS_EXPR
12311 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12312 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12315 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12317 return omit_two_operands_loc (loc
, type
,
12319 ? boolean_true_node
: boolean_false_node
,
12320 TREE_OPERAND (arg0
, 1), arg1
);
12323 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12324 if (TREE_CODE (arg0
) == ABS_EXPR
12325 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12326 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12328 /* If this is an EQ or NE comparison with zero and ARG0 is
12329 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12330 two operations, but the latter can be done in one less insn
12331 on machines that have only two-operand insns or on which a
12332 constant cannot be the first operand. */
12333 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12334 && integer_zerop (arg1
))
12336 tree arg00
= TREE_OPERAND (arg0
, 0);
12337 tree arg01
= TREE_OPERAND (arg0
, 1);
12338 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12339 && integer_onep (TREE_OPERAND (arg00
, 0)))
12341 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12342 arg01
, TREE_OPERAND (arg00
, 1));
12343 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12344 build_int_cst (TREE_TYPE (arg0
), 1));
12345 return fold_build2_loc (loc
, code
, type
,
12346 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12349 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12350 && integer_onep (TREE_OPERAND (arg01
, 0)))
12352 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12353 arg00
, TREE_OPERAND (arg01
, 1));
12354 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12355 build_int_cst (TREE_TYPE (arg0
), 1));
12356 return fold_build2_loc (loc
, code
, type
,
12357 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12362 /* If this is an NE or EQ comparison of zero against the result of a
12363 signed MOD operation whose second operand is a power of 2, make
12364 the MOD operation unsigned since it is simpler and equivalent. */
12365 if (integer_zerop (arg1
)
12366 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12367 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12368 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12369 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12370 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12371 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12373 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12374 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12375 fold_convert_loc (loc
, newtype
,
12376 TREE_OPERAND (arg0
, 0)),
12377 fold_convert_loc (loc
, newtype
,
12378 TREE_OPERAND (arg0
, 1)));
12380 return fold_build2_loc (loc
, code
, type
, newmod
,
12381 fold_convert_loc (loc
, newtype
, arg1
));
12384 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12385 C1 is a valid shift constant, and C2 is a power of two, i.e.
12387 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12388 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12389 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12391 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12392 && integer_zerop (arg1
))
12394 tree itype
= TREE_TYPE (arg0
);
12395 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12396 prec
= TYPE_PRECISION (itype
);
12398 /* Check for a valid shift count. */
12399 if (wi::ltu_p (arg001
, prec
))
12401 tree arg01
= TREE_OPERAND (arg0
, 1);
12402 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12403 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12404 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12405 can be rewritten as (X & (C2 << C1)) != 0. */
12406 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12408 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12409 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12410 return fold_build2_loc (loc
, code
, type
, tem
,
12411 fold_convert_loc (loc
, itype
, arg1
));
12413 /* Otherwise, for signed (arithmetic) shifts,
12414 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12415 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12416 else if (!TYPE_UNSIGNED (itype
))
12417 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12418 arg000
, build_int_cst (itype
, 0));
12419 /* Otherwise, of unsigned (logical) shifts,
12420 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12421 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12423 return omit_one_operand_loc (loc
, type
,
12424 code
== EQ_EXPR
? integer_one_node
12425 : integer_zero_node
,
12430 /* If we have (A & C) == C where C is a power of 2, convert this into
12431 (A & C) != 0. Similarly for NE_EXPR. */
12432 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12433 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12434 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12435 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12436 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12437 integer_zero_node
));
12439 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12440 bit, then fold the expression into A < 0 or A >= 0. */
12441 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12445 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12446 Similarly for NE_EXPR. */
12447 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12448 && TREE_CODE (arg1
) == INTEGER_CST
12449 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12451 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12452 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12453 TREE_OPERAND (arg0
, 1));
12455 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12456 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12458 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12459 if (integer_nonzerop (dandnotc
))
12460 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12463 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12464 Similarly for NE_EXPR. */
12465 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12466 && TREE_CODE (arg1
) == INTEGER_CST
12467 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12469 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12471 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12472 TREE_OPERAND (arg0
, 1),
12473 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12474 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12475 if (integer_nonzerop (candnotd
))
12476 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12479 /* If this is a comparison of a field, we may be able to simplify it. */
12480 if ((TREE_CODE (arg0
) == COMPONENT_REF
12481 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12482 /* Handle the constant case even without -O
12483 to make sure the warnings are given. */
12484 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12486 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12491 /* Optimize comparisons of strlen vs zero to a compare of the
12492 first character of the string vs zero. To wit,
12493 strlen(ptr) == 0 => *ptr == 0
12494 strlen(ptr) != 0 => *ptr != 0
12495 Other cases should reduce to one of these two (or a constant)
12496 due to the return value of strlen being unsigned. */
12497 if (TREE_CODE (arg0
) == CALL_EXPR
12498 && integer_zerop (arg1
))
12500 tree fndecl
= get_callee_fndecl (arg0
);
12503 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12504 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12505 && call_expr_nargs (arg0
) == 1
12506 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12508 tree iref
= build_fold_indirect_ref_loc (loc
,
12509 CALL_EXPR_ARG (arg0
, 0));
12510 return fold_build2_loc (loc
, code
, type
, iref
,
12511 build_int_cst (TREE_TYPE (iref
), 0));
12515 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12516 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12517 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12518 && integer_zerop (arg1
)
12519 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12521 tree arg00
= TREE_OPERAND (arg0
, 0);
12522 tree arg01
= TREE_OPERAND (arg0
, 1);
12523 tree itype
= TREE_TYPE (arg00
);
12524 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
12526 if (TYPE_UNSIGNED (itype
))
12528 itype
= signed_type_for (itype
);
12529 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12531 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12532 type
, arg00
, build_zero_cst (itype
));
12536 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12537 if (integer_zerop (arg1
)
12538 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12539 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12540 TREE_OPERAND (arg0
, 1));
12542 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12543 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12544 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12545 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12546 build_zero_cst (TREE_TYPE (arg0
)));
12547 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12548 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12549 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12550 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12551 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12552 build_zero_cst (TREE_TYPE (arg0
)));
12554 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12555 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12556 && TREE_CODE (arg1
) == INTEGER_CST
12557 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12558 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12559 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12560 TREE_OPERAND (arg0
, 1), arg1
));
12562 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12563 (X & C) == 0 when C is a single bit. */
12564 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12565 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12566 && integer_zerop (arg1
)
12567 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12569 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12570 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12571 TREE_OPERAND (arg0
, 1));
12572 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12574 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12578 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12579 constant C is a power of two, i.e. a single bit. */
12580 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12581 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12582 && integer_zerop (arg1
)
12583 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12584 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12585 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12587 tree arg00
= TREE_OPERAND (arg0
, 0);
12588 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12589 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12592 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12593 when is C is a power of two, i.e. a single bit. */
12594 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12595 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12596 && integer_zerop (arg1
)
12597 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12599 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12601 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12602 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12603 arg000
, TREE_OPERAND (arg0
, 1));
12604 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12605 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12608 if (integer_zerop (arg1
)
12609 && tree_expr_nonzero_p (arg0
))
12611 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12612 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12615 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12616 if (TREE_CODE (arg0
) == NEGATE_EXPR
12617 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12618 return fold_build2_loc (loc
, code
, type
,
12619 TREE_OPERAND (arg0
, 0),
12620 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12621 TREE_OPERAND (arg1
, 0)));
12623 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12624 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12625 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12627 tree arg00
= TREE_OPERAND (arg0
, 0);
12628 tree arg01
= TREE_OPERAND (arg0
, 1);
12629 tree arg10
= TREE_OPERAND (arg1
, 0);
12630 tree arg11
= TREE_OPERAND (arg1
, 1);
12631 tree itype
= TREE_TYPE (arg0
);
12633 if (operand_equal_p (arg01
, arg11
, 0))
12634 return fold_build2_loc (loc
, code
, type
,
12635 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12636 fold_build2_loc (loc
,
12637 BIT_XOR_EXPR
, itype
,
12640 build_zero_cst (itype
));
12642 if (operand_equal_p (arg01
, arg10
, 0))
12643 return fold_build2_loc (loc
, code
, type
,
12644 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12645 fold_build2_loc (loc
,
12646 BIT_XOR_EXPR
, itype
,
12649 build_zero_cst (itype
));
12651 if (operand_equal_p (arg00
, arg11
, 0))
12652 return fold_build2_loc (loc
, code
, type
,
12653 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12654 fold_build2_loc (loc
,
12655 BIT_XOR_EXPR
, itype
,
12658 build_zero_cst (itype
));
12660 if (operand_equal_p (arg00
, arg10
, 0))
12661 return fold_build2_loc (loc
, code
, type
,
12662 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12663 fold_build2_loc (loc
,
12664 BIT_XOR_EXPR
, itype
,
12667 build_zero_cst (itype
));
12670 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12671 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12673 tree arg00
= TREE_OPERAND (arg0
, 0);
12674 tree arg01
= TREE_OPERAND (arg0
, 1);
12675 tree arg10
= TREE_OPERAND (arg1
, 0);
12676 tree arg11
= TREE_OPERAND (arg1
, 1);
12677 tree itype
= TREE_TYPE (arg0
);
12679 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12680 operand_equal_p guarantees no side-effects so we don't need
12681 to use omit_one_operand on Z. */
12682 if (operand_equal_p (arg01
, arg11
, 0))
12683 return fold_build2_loc (loc
, code
, type
, arg00
,
12684 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12686 if (operand_equal_p (arg01
, arg10
, 0))
12687 return fold_build2_loc (loc
, code
, type
, arg00
,
12688 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12690 if (operand_equal_p (arg00
, arg11
, 0))
12691 return fold_build2_loc (loc
, code
, type
, arg01
,
12692 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12694 if (operand_equal_p (arg00
, arg10
, 0))
12695 return fold_build2_loc (loc
, code
, type
, arg01
,
12696 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12699 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12700 if (TREE_CODE (arg01
) == INTEGER_CST
12701 && TREE_CODE (arg11
) == INTEGER_CST
)
12703 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12704 fold_convert_loc (loc
, itype
, arg11
));
12705 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12706 return fold_build2_loc (loc
, code
, type
, tem
,
12707 fold_convert_loc (loc
, itype
, arg10
));
12711 /* Attempt to simplify equality/inequality comparisons of complex
12712 values. Only lower the comparison if the result is known or
12713 can be simplified to a single scalar comparison. */
12714 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12715 || TREE_CODE (arg0
) == COMPLEX_CST
)
12716 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12717 || TREE_CODE (arg1
) == COMPLEX_CST
))
12719 tree real0
, imag0
, real1
, imag1
;
12722 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12724 real0
= TREE_OPERAND (arg0
, 0);
12725 imag0
= TREE_OPERAND (arg0
, 1);
12729 real0
= TREE_REALPART (arg0
);
12730 imag0
= TREE_IMAGPART (arg0
);
12733 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12735 real1
= TREE_OPERAND (arg1
, 0);
12736 imag1
= TREE_OPERAND (arg1
, 1);
12740 real1
= TREE_REALPART (arg1
);
12741 imag1
= TREE_IMAGPART (arg1
);
12744 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12745 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12747 if (integer_zerop (rcond
))
12749 if (code
== EQ_EXPR
)
12750 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12752 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12756 if (code
== NE_EXPR
)
12757 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12759 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12763 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12764 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12766 if (integer_zerop (icond
))
12768 if (code
== EQ_EXPR
)
12769 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12771 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12775 if (code
== NE_EXPR
)
12776 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12778 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12789 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12790 if (tem
!= NULL_TREE
)
12793 /* Transform comparisons of the form X +- C CMP X. */
12794 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12795 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12796 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12797 && !HONOR_SNANS (arg0
))
12798 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12799 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12801 tree arg01
= TREE_OPERAND (arg0
, 1);
12802 enum tree_code code0
= TREE_CODE (arg0
);
12805 if (TREE_CODE (arg01
) == REAL_CST
)
12806 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12808 is_positive
= tree_int_cst_sgn (arg01
);
12810 /* (X - c) > X becomes false. */
12811 if (code
== GT_EXPR
12812 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12813 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12815 if (TREE_CODE (arg01
) == INTEGER_CST
12816 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12817 fold_overflow_warning (("assuming signed overflow does not "
12818 "occur when assuming that (X - c) > X "
12819 "is always false"),
12820 WARN_STRICT_OVERFLOW_ALL
);
12821 return constant_boolean_node (0, type
);
12824 /* Likewise (X + c) < X becomes false. */
12825 if (code
== LT_EXPR
12826 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12827 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12829 if (TREE_CODE (arg01
) == INTEGER_CST
12830 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12831 fold_overflow_warning (("assuming signed overflow does not "
12832 "occur when assuming that "
12833 "(X + c) < X is always false"),
12834 WARN_STRICT_OVERFLOW_ALL
);
12835 return constant_boolean_node (0, type
);
12838 /* Convert (X - c) <= X to true. */
12839 if (!HONOR_NANS (arg1
)
12841 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12842 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12844 if (TREE_CODE (arg01
) == INTEGER_CST
12845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12846 fold_overflow_warning (("assuming signed overflow does not "
12847 "occur when assuming that "
12848 "(X - c) <= X is always true"),
12849 WARN_STRICT_OVERFLOW_ALL
);
12850 return constant_boolean_node (1, type
);
12853 /* Convert (X + c) >= X to true. */
12854 if (!HONOR_NANS (arg1
)
12856 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12857 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12859 if (TREE_CODE (arg01
) == INTEGER_CST
12860 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12861 fold_overflow_warning (("assuming signed overflow does not "
12862 "occur when assuming that "
12863 "(X + c) >= X is always true"),
12864 WARN_STRICT_OVERFLOW_ALL
);
12865 return constant_boolean_node (1, type
);
12868 if (TREE_CODE (arg01
) == INTEGER_CST
)
12870 /* Convert X + c > X and X - c < X to true for integers. */
12871 if (code
== GT_EXPR
12872 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12873 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12875 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12876 fold_overflow_warning (("assuming signed overflow does "
12877 "not occur when assuming that "
12878 "(X + c) > X is always true"),
12879 WARN_STRICT_OVERFLOW_ALL
);
12880 return constant_boolean_node (1, type
);
12883 if (code
== LT_EXPR
12884 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12885 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12887 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12888 fold_overflow_warning (("assuming signed overflow does "
12889 "not occur when assuming that "
12890 "(X - c) < X is always true"),
12891 WARN_STRICT_OVERFLOW_ALL
);
12892 return constant_boolean_node (1, type
);
12895 /* Convert X + c <= X and X - c >= X to false for integers. */
12896 if (code
== LE_EXPR
12897 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12898 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12900 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12901 fold_overflow_warning (("assuming signed overflow does "
12902 "not occur when assuming that "
12903 "(X + c) <= X is always false"),
12904 WARN_STRICT_OVERFLOW_ALL
);
12905 return constant_boolean_node (0, type
);
12908 if (code
== GE_EXPR
12909 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12910 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12912 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12913 fold_overflow_warning (("assuming signed overflow does "
12914 "not occur when assuming that "
12915 "(X - c) >= X is always false"),
12916 WARN_STRICT_OVERFLOW_ALL
);
12917 return constant_boolean_node (0, type
);
12922 /* Comparisons with the highest or lowest possible integer of
12923 the specified precision will have known values. */
12925 tree arg1_type
= TREE_TYPE (arg1
);
12926 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12928 if (TREE_CODE (arg1
) == INTEGER_CST
12929 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12931 wide_int max
= wi::max_value (arg1_type
);
12932 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12933 wide_int min
= wi::min_value (arg1_type
);
12935 if (wi::eq_p (arg1
, max
))
12939 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12942 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12945 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12948 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12950 /* The GE_EXPR and LT_EXPR cases above are not normally
12951 reached because of previous transformations. */
12956 else if (wi::eq_p (arg1
, max
- 1))
12960 arg1
= const_binop (PLUS_EXPR
, arg1
,
12961 build_int_cst (TREE_TYPE (arg1
), 1));
12962 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12963 fold_convert_loc (loc
,
12964 TREE_TYPE (arg1
), arg0
),
12967 arg1
= const_binop (PLUS_EXPR
, arg1
,
12968 build_int_cst (TREE_TYPE (arg1
), 1));
12969 return fold_build2_loc (loc
, NE_EXPR
, type
,
12970 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12976 else if (wi::eq_p (arg1
, min
))
12980 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12983 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12986 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12989 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12994 else if (wi::eq_p (arg1
, min
+ 1))
12998 arg1
= const_binop (MINUS_EXPR
, arg1
,
12999 build_int_cst (TREE_TYPE (arg1
), 1));
13000 return fold_build2_loc (loc
, NE_EXPR
, type
,
13001 fold_convert_loc (loc
,
13002 TREE_TYPE (arg1
), arg0
),
13005 arg1
= const_binop (MINUS_EXPR
, arg1
,
13006 build_int_cst (TREE_TYPE (arg1
), 1));
13007 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13008 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13015 else if (wi::eq_p (arg1
, signed_max
)
13016 && TYPE_UNSIGNED (arg1_type
)
13017 /* We will flip the signedness of the comparison operator
13018 associated with the mode of arg1, so the sign bit is
13019 specified by this mode. Check that arg1 is the signed
13020 max associated with this sign bit. */
13021 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13022 /* signed_type does not work on pointer types. */
13023 && INTEGRAL_TYPE_P (arg1_type
))
13025 /* The following case also applies to X < signed_max+1
13026 and X >= signed_max+1 because previous transformations. */
13027 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13029 tree st
= signed_type_for (arg1_type
);
13030 return fold_build2_loc (loc
,
13031 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13032 type
, fold_convert_loc (loc
, st
, arg0
),
13033 build_int_cst (st
, 0));
13039 /* If we are comparing an ABS_EXPR with a constant, we can
13040 convert all the cases into explicit comparisons, but they may
13041 well not be faster than doing the ABS and one comparison.
13042 But ABS (X) <= C is a range comparison, which becomes a subtraction
13043 and a comparison, and is probably faster. */
13044 if (code
== LE_EXPR
13045 && TREE_CODE (arg1
) == INTEGER_CST
13046 && TREE_CODE (arg0
) == ABS_EXPR
13047 && ! TREE_SIDE_EFFECTS (arg0
)
13048 && (0 != (tem
= negate_expr (arg1
)))
13049 && TREE_CODE (tem
) == INTEGER_CST
13050 && !TREE_OVERFLOW (tem
))
13051 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13052 build2 (GE_EXPR
, type
,
13053 TREE_OPERAND (arg0
, 0), tem
),
13054 build2 (LE_EXPR
, type
,
13055 TREE_OPERAND (arg0
, 0), arg1
));
13057 /* Convert ABS_EXPR<x> >= 0 to true. */
13058 strict_overflow_p
= false;
13059 if (code
== GE_EXPR
13060 && (integer_zerop (arg1
)
13061 || (! HONOR_NANS (arg0
)
13062 && real_zerop (arg1
)))
13063 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13065 if (strict_overflow_p
)
13066 fold_overflow_warning (("assuming signed overflow does not occur "
13067 "when simplifying comparison of "
13068 "absolute value and zero"),
13069 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13070 return omit_one_operand_loc (loc
, type
,
13071 constant_boolean_node (true, type
),
13075 /* Convert ABS_EXPR<x> < 0 to false. */
13076 strict_overflow_p
= false;
13077 if (code
== LT_EXPR
13078 && (integer_zerop (arg1
) || real_zerop (arg1
))
13079 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13081 if (strict_overflow_p
)
13082 fold_overflow_warning (("assuming signed overflow does not occur "
13083 "when simplifying comparison of "
13084 "absolute value and zero"),
13085 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13086 return omit_one_operand_loc (loc
, type
,
13087 constant_boolean_node (false, type
),
13091 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13092 and similarly for >= into !=. */
13093 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13094 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13095 && TREE_CODE (arg1
) == LSHIFT_EXPR
13096 && integer_onep (TREE_OPERAND (arg1
, 0)))
13097 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13098 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13099 TREE_OPERAND (arg1
, 1)),
13100 build_zero_cst (TREE_TYPE (arg0
)));
13102 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13103 otherwise Y might be >= # of bits in X's type and thus e.g.
13104 (unsigned char) (1 << Y) for Y 15 might be 0.
13105 If the cast is widening, then 1 << Y should have unsigned type,
13106 otherwise if Y is number of bits in the signed shift type minus 1,
13107 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13108 31 might be 0xffffffff80000000. */
13109 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13110 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13111 && CONVERT_EXPR_P (arg1
)
13112 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13113 && (element_precision (TREE_TYPE (arg1
))
13114 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13115 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13116 || (element_precision (TREE_TYPE (arg1
))
13117 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13118 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13120 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13121 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13122 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13123 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13124 build_zero_cst (TREE_TYPE (arg0
)));
13129 case UNORDERED_EXPR
:
13137 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13139 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13140 if (t1
!= NULL_TREE
)
13144 /* If the first operand is NaN, the result is constant. */
13145 if (TREE_CODE (arg0
) == REAL_CST
13146 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13147 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13149 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13150 ? integer_zero_node
13151 : integer_one_node
;
13152 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13155 /* If the second operand is NaN, the result is constant. */
13156 if (TREE_CODE (arg1
) == REAL_CST
13157 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13158 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13160 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13161 ? integer_zero_node
13162 : integer_one_node
;
13163 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13166 /* Simplify unordered comparison of something with itself. */
13167 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13168 && operand_equal_p (arg0
, arg1
, 0))
13169 return constant_boolean_node (1, type
);
13171 if (code
== LTGT_EXPR
13172 && !flag_trapping_math
13173 && operand_equal_p (arg0
, arg1
, 0))
13174 return constant_boolean_node (0, type
);
13176 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13178 tree targ0
= strip_float_extensions (arg0
);
13179 tree targ1
= strip_float_extensions (arg1
);
13180 tree newtype
= TREE_TYPE (targ0
);
13182 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13183 newtype
= TREE_TYPE (targ1
);
13185 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13186 return fold_build2_loc (loc
, code
, type
,
13187 fold_convert_loc (loc
, newtype
, targ0
),
13188 fold_convert_loc (loc
, newtype
, targ1
));
13193 case COMPOUND_EXPR
:
13194 /* When pedantic, a compound expression can be neither an lvalue
13195 nor an integer constant expression. */
13196 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13198 /* Don't let (0, 0) be null pointer constant. */
13199 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13200 : fold_convert_loc (loc
, type
, arg1
);
13201 return pedantic_non_lvalue_loc (loc
, tem
);
13204 /* An ASSERT_EXPR should never be passed to fold_binary. */
13205 gcc_unreachable ();
13209 } /* switch (code) */
13212 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13213 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13217 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13219 switch (TREE_CODE (*tp
))
13225 *walk_subtrees
= 0;
13227 /* ... fall through ... */
13234 /* Return whether the sub-tree ST contains a label which is accessible from
13235 outside the sub-tree. */
13238 contains_label_p (tree st
)
13241 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13244 /* Fold a ternary expression of code CODE and type TYPE with operands
13245 OP0, OP1, and OP2. Return the folded expression if folding is
13246 successful. Otherwise, return NULL_TREE. */
13249 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13250 tree op0
, tree op1
, tree op2
)
13253 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13254 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13256 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13257 && TREE_CODE_LENGTH (code
) == 3);
13259 /* If this is a commutative operation, and OP0 is a constant, move it
13260 to OP1 to reduce the number of tests below. */
13261 if (commutative_ternary_tree_code (code
)
13262 && tree_swap_operands_p (op0
, op1
, true))
13263 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13265 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13269 /* Strip any conversions that don't change the mode. This is safe
13270 for every expression, except for a comparison expression because
13271 its signedness is derived from its operands. So, in the latter
13272 case, only strip conversions that don't change the signedness.
13274 Note that this is done as an internal manipulation within the
13275 constant folder, in order to find the simplest representation of
13276 the arguments so that their form can be studied. In any cases,
13277 the appropriate type conversions should be put back in the tree
13278 that will get out of the constant folder. */
13299 case COMPONENT_REF
:
13300 if (TREE_CODE (arg0
) == CONSTRUCTOR
13301 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13303 unsigned HOST_WIDE_INT idx
;
13305 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13312 case VEC_COND_EXPR
:
13313 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13314 so all simple results must be passed through pedantic_non_lvalue. */
13315 if (TREE_CODE (arg0
) == INTEGER_CST
)
13317 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13318 tem
= integer_zerop (arg0
) ? op2
: op1
;
13319 /* Only optimize constant conditions when the selected branch
13320 has the same type as the COND_EXPR. This avoids optimizing
13321 away "c ? x : throw", where the throw has a void type.
13322 Avoid throwing away that operand which contains label. */
13323 if ((!TREE_SIDE_EFFECTS (unused_op
)
13324 || !contains_label_p (unused_op
))
13325 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13326 || VOID_TYPE_P (type
)))
13327 return pedantic_non_lvalue_loc (loc
, tem
);
13330 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13332 if ((TREE_CODE (arg1
) == VECTOR_CST
13333 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13334 && (TREE_CODE (arg2
) == VECTOR_CST
13335 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13337 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13338 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13339 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13340 for (i
= 0; i
< nelts
; i
++)
13342 tree val
= VECTOR_CST_ELT (arg0
, i
);
13343 if (integer_all_onesp (val
))
13345 else if (integer_zerop (val
))
13346 sel
[i
] = nelts
+ i
;
13347 else /* Currently unreachable. */
13350 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13351 if (t
!= NULL_TREE
)
13356 /* If we have A op B ? A : C, we may be able to convert this to a
13357 simpler expression, depending on the operation and the values
13358 of B and C. Signed zeros prevent all of these transformations,
13359 for reasons given above each one.
13361 Also try swapping the arguments and inverting the conditional. */
13362 if (COMPARISON_CLASS_P (arg0
)
13363 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13364 arg1
, TREE_OPERAND (arg0
, 1))
13365 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
13367 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13372 if (COMPARISON_CLASS_P (arg0
)
13373 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13375 TREE_OPERAND (arg0
, 1))
13376 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
13378 location_t loc0
= expr_location_or (arg0
, loc
);
13379 tem
= fold_invert_truthvalue (loc0
, arg0
);
13380 if (tem
&& COMPARISON_CLASS_P (tem
))
13382 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13388 /* If the second operand is simpler than the third, swap them
13389 since that produces better jump optimization results. */
13390 if (truth_value_p (TREE_CODE (arg0
))
13391 && tree_swap_operands_p (op1
, op2
, false))
13393 location_t loc0
= expr_location_or (arg0
, loc
);
13394 /* See if this can be inverted. If it can't, possibly because
13395 it was a floating-point inequality comparison, don't do
13397 tem
= fold_invert_truthvalue (loc0
, arg0
);
13399 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13402 /* Convert A ? 1 : 0 to simply A. */
13403 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13404 : (integer_onep (op1
)
13405 && !VECTOR_TYPE_P (type
)))
13406 && integer_zerop (op2
)
13407 /* If we try to convert OP0 to our type, the
13408 call to fold will try to move the conversion inside
13409 a COND, which will recurse. In that case, the COND_EXPR
13410 is probably the best choice, so leave it alone. */
13411 && type
== TREE_TYPE (arg0
))
13412 return pedantic_non_lvalue_loc (loc
, arg0
);
13414 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13415 over COND_EXPR in cases such as floating point comparisons. */
13416 if (integer_zerop (op1
)
13417 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13418 : (integer_onep (op2
)
13419 && !VECTOR_TYPE_P (type
)))
13420 && truth_value_p (TREE_CODE (arg0
)))
13421 return pedantic_non_lvalue_loc (loc
,
13422 fold_convert_loc (loc
, type
,
13423 invert_truthvalue_loc (loc
,
13426 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13427 if (TREE_CODE (arg0
) == LT_EXPR
13428 && integer_zerop (TREE_OPERAND (arg0
, 1))
13429 && integer_zerop (op2
)
13430 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13432 /* sign_bit_p looks through both zero and sign extensions,
13433 but for this optimization only sign extensions are
13435 tree tem2
= TREE_OPERAND (arg0
, 0);
13436 while (tem
!= tem2
)
13438 if (TREE_CODE (tem2
) != NOP_EXPR
13439 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13444 tem2
= TREE_OPERAND (tem2
, 0);
13446 /* sign_bit_p only checks ARG1 bits within A's precision.
13447 If <sign bit of A> has wider type than A, bits outside
13448 of A's precision in <sign bit of A> need to be checked.
13449 If they are all 0, this optimization needs to be done
13450 in unsigned A's type, if they are all 1 in signed A's type,
13451 otherwise this can't be done. */
13453 && TYPE_PRECISION (TREE_TYPE (tem
))
13454 < TYPE_PRECISION (TREE_TYPE (arg1
))
13455 && TYPE_PRECISION (TREE_TYPE (tem
))
13456 < TYPE_PRECISION (type
))
13458 int inner_width
, outer_width
;
13461 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13462 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13463 if (outer_width
> TYPE_PRECISION (type
))
13464 outer_width
= TYPE_PRECISION (type
);
13466 wide_int mask
= wi::shifted_mask
13467 (inner_width
, outer_width
- inner_width
, false,
13468 TYPE_PRECISION (TREE_TYPE (arg1
)));
13470 wide_int common
= mask
& arg1
;
13471 if (common
== mask
)
13473 tem_type
= signed_type_for (TREE_TYPE (tem
));
13474 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13476 else if (common
== 0)
13478 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13479 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13487 fold_convert_loc (loc
, type
,
13488 fold_build2_loc (loc
, BIT_AND_EXPR
,
13489 TREE_TYPE (tem
), tem
,
13490 fold_convert_loc (loc
,
13495 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13496 already handled above. */
13497 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13498 && integer_onep (TREE_OPERAND (arg0
, 1))
13499 && integer_zerop (op2
)
13500 && integer_pow2p (arg1
))
13502 tree tem
= TREE_OPERAND (arg0
, 0);
13504 if (TREE_CODE (tem
) == RSHIFT_EXPR
13505 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13506 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13507 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13508 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13509 TREE_OPERAND (tem
, 0), arg1
);
13512 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13513 is probably obsolete because the first operand should be a
13514 truth value (that's why we have the two cases above), but let's
13515 leave it in until we can confirm this for all front-ends. */
13516 if (integer_zerop (op2
)
13517 && TREE_CODE (arg0
) == NE_EXPR
13518 && integer_zerop (TREE_OPERAND (arg0
, 1))
13519 && integer_pow2p (arg1
)
13520 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13521 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13522 arg1
, OEP_ONLY_CONST
))
13523 return pedantic_non_lvalue_loc (loc
,
13524 fold_convert_loc (loc
, type
,
13525 TREE_OPERAND (arg0
, 0)));
13527 /* Disable the transformations below for vectors, since
13528 fold_binary_op_with_conditional_arg may undo them immediately,
13529 yielding an infinite loop. */
13530 if (code
== VEC_COND_EXPR
)
13533 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13534 if (integer_zerop (op2
)
13535 && truth_value_p (TREE_CODE (arg0
))
13536 && truth_value_p (TREE_CODE (arg1
))
13537 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13538 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13539 : TRUTH_ANDIF_EXPR
,
13540 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
13542 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13543 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13544 && truth_value_p (TREE_CODE (arg0
))
13545 && truth_value_p (TREE_CODE (arg1
))
13546 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13548 location_t loc0
= expr_location_or (arg0
, loc
);
13549 /* Only perform transformation if ARG0 is easily inverted. */
13550 tem
= fold_invert_truthvalue (loc0
, arg0
);
13552 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13555 type
, fold_convert_loc (loc
, type
, tem
),
13559 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13560 if (integer_zerop (arg1
)
13561 && truth_value_p (TREE_CODE (arg0
))
13562 && truth_value_p (TREE_CODE (op2
))
13563 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13565 location_t loc0
= expr_location_or (arg0
, loc
);
13566 /* Only perform transformation if ARG0 is easily inverted. */
13567 tem
= fold_invert_truthvalue (loc0
, arg0
);
13569 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13570 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13571 type
, fold_convert_loc (loc
, type
, tem
),
13575 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13576 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13577 && truth_value_p (TREE_CODE (arg0
))
13578 && truth_value_p (TREE_CODE (op2
))
13579 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13580 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13581 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13582 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13587 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13588 of fold_ternary on them. */
13589 gcc_unreachable ();
13591 case BIT_FIELD_REF
:
13592 if ((TREE_CODE (arg0
) == VECTOR_CST
13593 || (TREE_CODE (arg0
) == CONSTRUCTOR
13594 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
13595 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13596 || (TREE_CODE (type
) == VECTOR_TYPE
13597 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
13599 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13600 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
13601 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13602 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13605 && (idx
% width
) == 0
13606 && (n
% width
) == 0
13607 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13612 if (TREE_CODE (arg0
) == VECTOR_CST
)
13615 return VECTOR_CST_ELT (arg0
, idx
);
13617 tree
*vals
= XALLOCAVEC (tree
, n
);
13618 for (unsigned i
= 0; i
< n
; ++i
)
13619 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
13620 return build_vector (type
, vals
);
13623 /* Constructor elements can be subvectors. */
13624 unsigned HOST_WIDE_INT k
= 1;
13625 if (CONSTRUCTOR_NELTS (arg0
) != 0)
13627 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
13628 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
13629 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
13632 /* We keep an exact subset of the constructor elements. */
13633 if ((idx
% k
) == 0 && (n
% k
) == 0)
13635 if (CONSTRUCTOR_NELTS (arg0
) == 0)
13636 return build_constructor (type
, NULL
);
13641 if (idx
< CONSTRUCTOR_NELTS (arg0
))
13642 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13643 return build_zero_cst (type
);
13646 vec
<constructor_elt
, va_gc
> *vals
;
13647 vec_alloc (vals
, n
);
13648 for (unsigned i
= 0;
13649 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
13651 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
13653 (arg0
, idx
+ i
)->value
);
13654 return build_constructor (type
, vals
);
13656 /* The bitfield references a single constructor element. */
13657 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
13659 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
13660 return build_zero_cst (type
);
13662 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
13664 return fold_build3_loc (loc
, code
, type
,
13665 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
13666 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
13671 /* A bit-field-ref that referenced the full argument can be stripped. */
13672 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13673 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
13674 && integer_zerop (op2
))
13675 return fold_convert_loc (loc
, type
, arg0
);
13677 /* On constants we can use native encode/interpret to constant
13678 fold (nearly) all BIT_FIELD_REFs. */
13679 if (CONSTANT_CLASS_P (arg0
)
13680 && can_native_interpret_type_p (type
)
13681 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
13682 /* This limitation should not be necessary, we just need to
13683 round this up to mode size. */
13684 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
13685 /* Need bit-shifting of the buffer to relax the following. */
13686 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
13688 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13689 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13690 unsigned HOST_WIDE_INT clen
;
13691 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
13692 /* ??? We cannot tell native_encode_expr to start at
13693 some random byte only. So limit us to a reasonable amount
13697 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
13698 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
13700 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
13702 tree v
= native_interpret_expr (type
,
13703 b
+ bitpos
/ BITS_PER_UNIT
,
13704 bitsize
/ BITS_PER_UNIT
);
13714 /* For integers we can decompose the FMA if possible. */
13715 if (TREE_CODE (arg0
) == INTEGER_CST
13716 && TREE_CODE (arg1
) == INTEGER_CST
)
13717 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13718 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13719 if (integer_zerop (arg2
))
13720 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13722 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13724 case VEC_PERM_EXPR
:
13725 if (TREE_CODE (arg2
) == VECTOR_CST
)
13727 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
13728 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
13729 unsigned char *sel2
= sel
+ nelts
;
13730 bool need_mask_canon
= false;
13731 bool need_mask_canon2
= false;
13732 bool all_in_vec0
= true;
13733 bool all_in_vec1
= true;
13734 bool maybe_identity
= true;
13735 bool single_arg
= (op0
== op1
);
13736 bool changed
= false;
13738 mask2
= 2 * nelts
- 1;
13739 mask
= single_arg
? (nelts
- 1) : mask2
;
13740 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
13741 for (i
= 0; i
< nelts
; i
++)
13743 tree val
= VECTOR_CST_ELT (arg2
, i
);
13744 if (TREE_CODE (val
) != INTEGER_CST
)
13747 /* Make sure that the perm value is in an acceptable
13750 need_mask_canon
|= wi::gtu_p (t
, mask
);
13751 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
13752 sel
[i
] = t
.to_uhwi () & mask
;
13753 sel2
[i
] = t
.to_uhwi () & mask2
;
13755 if (sel
[i
] < nelts
)
13756 all_in_vec1
= false;
13758 all_in_vec0
= false;
13760 if ((sel
[i
] & (nelts
-1)) != i
)
13761 maybe_identity
= false;
13764 if (maybe_identity
)
13774 else if (all_in_vec1
)
13777 for (i
= 0; i
< nelts
; i
++)
13779 need_mask_canon
= true;
13782 if ((TREE_CODE (op0
) == VECTOR_CST
13783 || TREE_CODE (op0
) == CONSTRUCTOR
)
13784 && (TREE_CODE (op1
) == VECTOR_CST
13785 || TREE_CODE (op1
) == CONSTRUCTOR
))
13787 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
13788 if (t
!= NULL_TREE
)
13792 if (op0
== op1
&& !single_arg
)
13795 /* Some targets are deficient and fail to expand a single
13796 argument permutation while still allowing an equivalent
13797 2-argument version. */
13798 if (need_mask_canon
&& arg2
== op2
13799 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
13800 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
13802 need_mask_canon
= need_mask_canon2
;
13806 if (need_mask_canon
&& arg2
== op2
)
13808 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
13809 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
13810 for (i
= 0; i
< nelts
; i
++)
13811 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
13812 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
13817 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
13823 } /* switch (code) */
13826 /* Perform constant folding and related simplification of EXPR.
13827 The related simplifications include x*1 => x, x*0 => 0, etc.,
13828 and application of the associative law.
13829 NOP_EXPR conversions may be removed freely (as long as we
13830 are careful not to change the type of the overall expression).
13831 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13832 but we can constant-fold them if they have constant operands. */
13834 #ifdef ENABLE_FOLD_CHECKING
13835 # define fold(x) fold_1 (x)
13836 static tree
fold_1 (tree
);
13842 const tree t
= expr
;
13843 enum tree_code code
= TREE_CODE (t
);
13844 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13846 location_t loc
= EXPR_LOCATION (expr
);
13848 /* Return right away if a constant. */
13849 if (kind
== tcc_constant
)
13852 /* CALL_EXPR-like objects with variable numbers of operands are
13853 treated specially. */
13854 if (kind
== tcc_vl_exp
)
13856 if (code
== CALL_EXPR
)
13858 tem
= fold_call_expr (loc
, expr
, false);
13859 return tem
? tem
: expr
;
13864 if (IS_EXPR_CODE_CLASS (kind
))
13866 tree type
= TREE_TYPE (t
);
13867 tree op0
, op1
, op2
;
13869 switch (TREE_CODE_LENGTH (code
))
13872 op0
= TREE_OPERAND (t
, 0);
13873 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13874 return tem
? tem
: expr
;
13876 op0
= TREE_OPERAND (t
, 0);
13877 op1
= TREE_OPERAND (t
, 1);
13878 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13879 return tem
? tem
: expr
;
13881 op0
= TREE_OPERAND (t
, 0);
13882 op1
= TREE_OPERAND (t
, 1);
13883 op2
= TREE_OPERAND (t
, 2);
13884 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13885 return tem
? tem
: expr
;
13895 tree op0
= TREE_OPERAND (t
, 0);
13896 tree op1
= TREE_OPERAND (t
, 1);
13898 if (TREE_CODE (op1
) == INTEGER_CST
13899 && TREE_CODE (op0
) == CONSTRUCTOR
13900 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13902 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
13903 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
13904 unsigned HOST_WIDE_INT begin
= 0;
13906 /* Find a matching index by means of a binary search. */
13907 while (begin
!= end
)
13909 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13910 tree index
= (*elts
)[middle
].index
;
13912 if (TREE_CODE (index
) == INTEGER_CST
13913 && tree_int_cst_lt (index
, op1
))
13914 begin
= middle
+ 1;
13915 else if (TREE_CODE (index
) == INTEGER_CST
13916 && tree_int_cst_lt (op1
, index
))
13918 else if (TREE_CODE (index
) == RANGE_EXPR
13919 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13920 begin
= middle
+ 1;
13921 else if (TREE_CODE (index
) == RANGE_EXPR
13922 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13925 return (*elts
)[middle
].value
;
13932 /* Return a VECTOR_CST if possible. */
13935 tree type
= TREE_TYPE (t
);
13936 if (TREE_CODE (type
) != VECTOR_TYPE
)
13939 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13940 unsigned HOST_WIDE_INT idx
, pos
= 0;
13943 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13945 if (!CONSTANT_CLASS_P (value
))
13947 if (TREE_CODE (value
) == VECTOR_CST
)
13949 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
13950 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
13953 vec
[pos
++] = value
;
13955 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
13956 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
13958 return build_vector (type
, vec
);
13962 return fold (DECL_INITIAL (t
));
13966 } /* switch (code) */
13969 #ifdef ENABLE_FOLD_CHECKING
13972 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13973 hash_table
<pointer_hash
<const tree_node
> > *);
13974 static void fold_check_failed (const_tree
, const_tree
);
13975 void print_fold_checksum (const_tree
);
13977 /* When --enable-checking=fold, compute a digest of expr before
13978 and after actual fold call to see if fold did not accidentally
13979 change original expr. */
13985 struct md5_ctx ctx
;
13986 unsigned char checksum_before
[16], checksum_after
[16];
13987 hash_table
<pointer_hash
<const tree_node
> > ht (32);
13989 md5_init_ctx (&ctx
);
13990 fold_checksum_tree (expr
, &ctx
, &ht
);
13991 md5_finish_ctx (&ctx
, checksum_before
);
13994 ret
= fold_1 (expr
);
13996 md5_init_ctx (&ctx
);
13997 fold_checksum_tree (expr
, &ctx
, &ht
);
13998 md5_finish_ctx (&ctx
, checksum_after
);
14000 if (memcmp (checksum_before
, checksum_after
, 16))
14001 fold_check_failed (expr
, ret
);
14007 print_fold_checksum (const_tree expr
)
14009 struct md5_ctx ctx
;
14010 unsigned char checksum
[16], cnt
;
14011 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14013 md5_init_ctx (&ctx
);
14014 fold_checksum_tree (expr
, &ctx
, &ht
);
14015 md5_finish_ctx (&ctx
, checksum
);
14016 for (cnt
= 0; cnt
< 16; ++cnt
)
14017 fprintf (stderr
, "%02x", checksum
[cnt
]);
14018 putc ('\n', stderr
);
14022 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14024 internal_error ("fold check: original tree changed by fold");
14028 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14029 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14031 const tree_node
**slot
;
14032 enum tree_code code
;
14033 union tree_node buf
;
14039 slot
= ht
->find_slot (expr
, INSERT
);
14043 code
= TREE_CODE (expr
);
14044 if (TREE_CODE_CLASS (code
) == tcc_declaration
14045 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
14047 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14048 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14049 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14050 buf
.decl_with_vis
.symtab_node
= NULL
;
14051 expr
= (tree
) &buf
;
14053 else if (TREE_CODE_CLASS (code
) == tcc_type
14054 && (TYPE_POINTER_TO (expr
)
14055 || TYPE_REFERENCE_TO (expr
)
14056 || TYPE_CACHED_VALUES_P (expr
)
14057 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14058 || TYPE_NEXT_VARIANT (expr
)))
14060 /* Allow these fields to be modified. */
14062 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14063 expr
= tmp
= (tree
) &buf
;
14064 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14065 TYPE_POINTER_TO (tmp
) = NULL
;
14066 TYPE_REFERENCE_TO (tmp
) = NULL
;
14067 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14068 if (TYPE_CACHED_VALUES_P (tmp
))
14070 TYPE_CACHED_VALUES_P (tmp
) = 0;
14071 TYPE_CACHED_VALUES (tmp
) = NULL
;
14074 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14075 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14076 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14077 if (TREE_CODE_CLASS (code
) != tcc_type
14078 && TREE_CODE_CLASS (code
) != tcc_declaration
14079 && code
!= TREE_LIST
14080 && code
!= SSA_NAME
14081 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14082 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14083 switch (TREE_CODE_CLASS (code
))
14089 md5_process_bytes (TREE_STRING_POINTER (expr
),
14090 TREE_STRING_LENGTH (expr
), ctx
);
14093 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14094 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14097 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14098 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14104 case tcc_exceptional
:
14108 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14109 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14110 expr
= TREE_CHAIN (expr
);
14111 goto recursive_label
;
14114 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14115 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14121 case tcc_expression
:
14122 case tcc_reference
:
14123 case tcc_comparison
:
14126 case tcc_statement
:
14128 len
= TREE_OPERAND_LENGTH (expr
);
14129 for (i
= 0; i
< len
; ++i
)
14130 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14132 case tcc_declaration
:
14133 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14134 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14135 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14137 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14138 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14139 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14140 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14141 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14144 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14146 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14148 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14149 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14151 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14155 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14156 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14157 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14158 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14159 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14160 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14161 if (INTEGRAL_TYPE_P (expr
)
14162 || SCALAR_FLOAT_TYPE_P (expr
))
14164 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14165 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14167 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14168 if (TREE_CODE (expr
) == RECORD_TYPE
14169 || TREE_CODE (expr
) == UNION_TYPE
14170 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14171 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14172 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14179 /* Helper function for outputting the checksum of a tree T. When
14180 debugging with gdb, you can "define mynext" to be "next" followed
14181 by "call debug_fold_checksum (op0)", then just trace down till the
14184 DEBUG_FUNCTION
void
14185 debug_fold_checksum (const_tree t
)
14188 unsigned char checksum
[16];
14189 struct md5_ctx ctx
;
14190 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14192 md5_init_ctx (&ctx
);
14193 fold_checksum_tree (t
, &ctx
, &ht
);
14194 md5_finish_ctx (&ctx
, checksum
);
14197 for (i
= 0; i
< 16; i
++)
14198 fprintf (stderr
, "%d ", checksum
[i
]);
14200 fprintf (stderr
, "\n");
14205 /* Fold a unary tree expression with code CODE of type TYPE with an
14206 operand OP0. LOC is the location of the resulting expression.
14207 Return a folded expression if successful. Otherwise, return a tree
14208 expression with code CODE of type TYPE with an operand OP0. */
14211 fold_build1_stat_loc (location_t loc
,
14212 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14215 #ifdef ENABLE_FOLD_CHECKING
14216 unsigned char checksum_before
[16], checksum_after
[16];
14217 struct md5_ctx ctx
;
14218 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14220 md5_init_ctx (&ctx
);
14221 fold_checksum_tree (op0
, &ctx
, &ht
);
14222 md5_finish_ctx (&ctx
, checksum_before
);
14226 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14228 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14230 #ifdef ENABLE_FOLD_CHECKING
14231 md5_init_ctx (&ctx
);
14232 fold_checksum_tree (op0
, &ctx
, &ht
);
14233 md5_finish_ctx (&ctx
, checksum_after
);
14235 if (memcmp (checksum_before
, checksum_after
, 16))
14236 fold_check_failed (op0
, tem
);
14241 /* Fold a binary tree expression with code CODE of type TYPE with
14242 operands OP0 and OP1. LOC is the location of the resulting
14243 expression. Return a folded expression if successful. Otherwise,
14244 return a tree expression with code CODE of type TYPE with operands
14248 fold_build2_stat_loc (location_t loc
,
14249 enum tree_code code
, tree type
, tree op0
, tree op1
14253 #ifdef ENABLE_FOLD_CHECKING
14254 unsigned char checksum_before_op0
[16],
14255 checksum_before_op1
[16],
14256 checksum_after_op0
[16],
14257 checksum_after_op1
[16];
14258 struct md5_ctx ctx
;
14259 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14261 md5_init_ctx (&ctx
);
14262 fold_checksum_tree (op0
, &ctx
, &ht
);
14263 md5_finish_ctx (&ctx
, checksum_before_op0
);
14266 md5_init_ctx (&ctx
);
14267 fold_checksum_tree (op1
, &ctx
, &ht
);
14268 md5_finish_ctx (&ctx
, checksum_before_op1
);
14272 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14274 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14276 #ifdef ENABLE_FOLD_CHECKING
14277 md5_init_ctx (&ctx
);
14278 fold_checksum_tree (op0
, &ctx
, &ht
);
14279 md5_finish_ctx (&ctx
, checksum_after_op0
);
14282 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14283 fold_check_failed (op0
, tem
);
14285 md5_init_ctx (&ctx
);
14286 fold_checksum_tree (op1
, &ctx
, &ht
);
14287 md5_finish_ctx (&ctx
, checksum_after_op1
);
14289 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14290 fold_check_failed (op1
, tem
);
14295 /* Fold a ternary tree expression with code CODE of type TYPE with
14296 operands OP0, OP1, and OP2. Return a folded expression if
14297 successful. Otherwise, return a tree expression with code CODE of
14298 type TYPE with operands OP0, OP1, and OP2. */
14301 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14302 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14305 #ifdef ENABLE_FOLD_CHECKING
14306 unsigned char checksum_before_op0
[16],
14307 checksum_before_op1
[16],
14308 checksum_before_op2
[16],
14309 checksum_after_op0
[16],
14310 checksum_after_op1
[16],
14311 checksum_after_op2
[16];
14312 struct md5_ctx ctx
;
14313 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14315 md5_init_ctx (&ctx
);
14316 fold_checksum_tree (op0
, &ctx
, &ht
);
14317 md5_finish_ctx (&ctx
, checksum_before_op0
);
14320 md5_init_ctx (&ctx
);
14321 fold_checksum_tree (op1
, &ctx
, &ht
);
14322 md5_finish_ctx (&ctx
, checksum_before_op1
);
14325 md5_init_ctx (&ctx
);
14326 fold_checksum_tree (op2
, &ctx
, &ht
);
14327 md5_finish_ctx (&ctx
, checksum_before_op2
);
14331 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14332 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14334 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14336 #ifdef ENABLE_FOLD_CHECKING
14337 md5_init_ctx (&ctx
);
14338 fold_checksum_tree (op0
, &ctx
, &ht
);
14339 md5_finish_ctx (&ctx
, checksum_after_op0
);
14342 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14343 fold_check_failed (op0
, tem
);
14345 md5_init_ctx (&ctx
);
14346 fold_checksum_tree (op1
, &ctx
, &ht
);
14347 md5_finish_ctx (&ctx
, checksum_after_op1
);
14350 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14351 fold_check_failed (op1
, tem
);
14353 md5_init_ctx (&ctx
);
14354 fold_checksum_tree (op2
, &ctx
, &ht
);
14355 md5_finish_ctx (&ctx
, checksum_after_op2
);
14357 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14358 fold_check_failed (op2
, tem
);
14363 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14364 arguments in ARGARRAY, and a null static chain.
14365 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14366 of type TYPE from the given operands as constructed by build_call_array. */
14369 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14370 int nargs
, tree
*argarray
)
14373 #ifdef ENABLE_FOLD_CHECKING
14374 unsigned char checksum_before_fn
[16],
14375 checksum_before_arglist
[16],
14376 checksum_after_fn
[16],
14377 checksum_after_arglist
[16];
14378 struct md5_ctx ctx
;
14379 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14382 md5_init_ctx (&ctx
);
14383 fold_checksum_tree (fn
, &ctx
, &ht
);
14384 md5_finish_ctx (&ctx
, checksum_before_fn
);
14387 md5_init_ctx (&ctx
);
14388 for (i
= 0; i
< nargs
; i
++)
14389 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14390 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14394 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14396 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14398 #ifdef ENABLE_FOLD_CHECKING
14399 md5_init_ctx (&ctx
);
14400 fold_checksum_tree (fn
, &ctx
, &ht
);
14401 md5_finish_ctx (&ctx
, checksum_after_fn
);
14404 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14405 fold_check_failed (fn
, tem
);
14407 md5_init_ctx (&ctx
);
14408 for (i
= 0; i
< nargs
; i
++)
14409 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14410 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14412 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14413 fold_check_failed (NULL_TREE
, tem
);
14418 /* Perform constant folding and related simplification of initializer
14419 expression EXPR. These behave identically to "fold_buildN" but ignore
14420 potential run-time traps and exceptions that fold must preserve. */
14422 #define START_FOLD_INIT \
14423 int saved_signaling_nans = flag_signaling_nans;\
14424 int saved_trapping_math = flag_trapping_math;\
14425 int saved_rounding_math = flag_rounding_math;\
14426 int saved_trapv = flag_trapv;\
14427 int saved_folding_initializer = folding_initializer;\
14428 flag_signaling_nans = 0;\
14429 flag_trapping_math = 0;\
14430 flag_rounding_math = 0;\
14432 folding_initializer = 1;
14434 #define END_FOLD_INIT \
14435 flag_signaling_nans = saved_signaling_nans;\
14436 flag_trapping_math = saved_trapping_math;\
14437 flag_rounding_math = saved_rounding_math;\
14438 flag_trapv = saved_trapv;\
14439 folding_initializer = saved_folding_initializer;
14442 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14443 tree type
, tree op
)
14448 result
= fold_build1_loc (loc
, code
, type
, op
);
14455 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14456 tree type
, tree op0
, tree op1
)
14461 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14468 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14469 int nargs
, tree
*argarray
)
14474 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14480 #undef START_FOLD_INIT
14481 #undef END_FOLD_INIT
14483 /* Determine if first argument is a multiple of second argument. Return 0 if
14484 it is not, or we cannot easily determined it to be.
14486 An example of the sort of thing we care about (at this point; this routine
14487 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14488 fold cases do now) is discovering that
14490 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14496 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14498 This code also handles discovering that
14500 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14502 is a multiple of 8 so we don't have to worry about dealing with a
14503 possible remainder.
14505 Note that we *look* inside a SAVE_EXPR only to determine how it was
14506 calculated; it is not safe for fold to do much of anything else with the
14507 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14508 at run time. For example, the latter example above *cannot* be implemented
14509 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14510 evaluation time of the original SAVE_EXPR is not necessarily the same at
14511 the time the new expression is evaluated. The only optimization of this
14512 sort that would be valid is changing
14514 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14518 SAVE_EXPR (I) * SAVE_EXPR (J)
14520 (where the same SAVE_EXPR (J) is used in the original and the
14521 transformed version). */
14524 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14526 if (operand_equal_p (top
, bottom
, 0))
14529 if (TREE_CODE (type
) != INTEGER_TYPE
)
14532 switch (TREE_CODE (top
))
14535 /* Bitwise and provides a power of two multiple. If the mask is
14536 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14537 if (!integer_pow2p (bottom
))
14542 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14543 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14547 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14548 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14551 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14555 op1
= TREE_OPERAND (top
, 1);
14556 /* const_binop may not detect overflow correctly,
14557 so check for it explicitly here. */
14558 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
14559 && 0 != (t1
= fold_convert (type
,
14560 const_binop (LSHIFT_EXPR
,
14563 && !TREE_OVERFLOW (t1
))
14564 return multiple_of_p (type
, t1
, bottom
);
14569 /* Can't handle conversions from non-integral or wider integral type. */
14570 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14571 || (TYPE_PRECISION (type
)
14572 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14575 /* .. fall through ... */
14578 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14581 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14582 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14585 if (TREE_CODE (bottom
) != INTEGER_CST
14586 || integer_zerop (bottom
)
14587 || (TYPE_UNSIGNED (type
)
14588 && (tree_int_cst_sgn (top
) < 0
14589 || tree_int_cst_sgn (bottom
) < 0)))
14591 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14599 /* Return true if CODE or TYPE is known to be non-negative. */
14602 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14604 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14605 && truth_value_p (code
))
14606 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14607 have a signed:1 type (where the value is -1 and 0). */
14612 /* Return true if (CODE OP0) is known to be non-negative. If the return
14613 value is based on the assumption that signed overflow is undefined,
14614 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14615 *STRICT_OVERFLOW_P. */
14618 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14619 bool *strict_overflow_p
)
14621 if (TYPE_UNSIGNED (type
))
14627 /* We can't return 1 if flag_wrapv is set because
14628 ABS_EXPR<INT_MIN> = INT_MIN. */
14629 if (!ANY_INTEGRAL_TYPE_P (type
))
14631 if (TYPE_OVERFLOW_UNDEFINED (type
))
14633 *strict_overflow_p
= true;
14638 case NON_LVALUE_EXPR
:
14640 case FIX_TRUNC_EXPR
:
14641 return tree_expr_nonnegative_warnv_p (op0
,
14642 strict_overflow_p
);
14646 tree inner_type
= TREE_TYPE (op0
);
14647 tree outer_type
= type
;
14649 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14651 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14652 return tree_expr_nonnegative_warnv_p (op0
,
14653 strict_overflow_p
);
14654 if (INTEGRAL_TYPE_P (inner_type
))
14656 if (TYPE_UNSIGNED (inner_type
))
14658 return tree_expr_nonnegative_warnv_p (op0
,
14659 strict_overflow_p
);
14662 else if (INTEGRAL_TYPE_P (outer_type
))
14664 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14665 return tree_expr_nonnegative_warnv_p (op0
,
14666 strict_overflow_p
);
14667 if (INTEGRAL_TYPE_P (inner_type
))
14668 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14669 && TYPE_UNSIGNED (inner_type
);
14675 return tree_simple_nonnegative_warnv_p (code
, type
);
14678 /* We don't know sign of `t', so be conservative and return false. */
14682 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14683 value is based on the assumption that signed overflow is undefined,
14684 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14685 *STRICT_OVERFLOW_P. */
14688 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14689 tree op1
, bool *strict_overflow_p
)
14691 if (TYPE_UNSIGNED (type
))
14696 case POINTER_PLUS_EXPR
:
14698 if (FLOAT_TYPE_P (type
))
14699 return (tree_expr_nonnegative_warnv_p (op0
,
14701 && tree_expr_nonnegative_warnv_p (op1
,
14702 strict_overflow_p
));
14704 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14705 both unsigned and at least 2 bits shorter than the result. */
14706 if (TREE_CODE (type
) == INTEGER_TYPE
14707 && TREE_CODE (op0
) == NOP_EXPR
14708 && TREE_CODE (op1
) == NOP_EXPR
)
14710 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14711 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14712 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14713 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14715 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14716 TYPE_PRECISION (inner2
)) + 1;
14717 return prec
< TYPE_PRECISION (type
);
14723 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14725 /* x * x is always non-negative for floating point x
14726 or without overflow. */
14727 if (operand_equal_p (op0
, op1
, 0)
14728 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
14729 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
14731 if (ANY_INTEGRAL_TYPE_P (type
)
14732 && TYPE_OVERFLOW_UNDEFINED (type
))
14733 *strict_overflow_p
= true;
14738 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14739 both unsigned and their total bits is shorter than the result. */
14740 if (TREE_CODE (type
) == INTEGER_TYPE
14741 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14742 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14744 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14745 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14747 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14748 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14751 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14752 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14754 if (TREE_CODE (op0
) == INTEGER_CST
)
14755 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14757 if (TREE_CODE (op1
) == INTEGER_CST
)
14758 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14760 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14761 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14763 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14764 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14765 : TYPE_PRECISION (inner0
);
14767 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14768 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14769 : TYPE_PRECISION (inner1
);
14771 return precision0
+ precision1
< TYPE_PRECISION (type
);
14778 return (tree_expr_nonnegative_warnv_p (op0
,
14780 || tree_expr_nonnegative_warnv_p (op1
,
14781 strict_overflow_p
));
14787 case TRUNC_DIV_EXPR
:
14788 case CEIL_DIV_EXPR
:
14789 case FLOOR_DIV_EXPR
:
14790 case ROUND_DIV_EXPR
:
14791 return (tree_expr_nonnegative_warnv_p (op0
,
14793 && tree_expr_nonnegative_warnv_p (op1
,
14794 strict_overflow_p
));
14796 case TRUNC_MOD_EXPR
:
14797 case CEIL_MOD_EXPR
:
14798 case FLOOR_MOD_EXPR
:
14799 case ROUND_MOD_EXPR
:
14800 return tree_expr_nonnegative_warnv_p (op0
,
14801 strict_overflow_p
);
14803 return tree_simple_nonnegative_warnv_p (code
, type
);
14806 /* We don't know sign of `t', so be conservative and return false. */
14810 /* Return true if T is known to be non-negative. If the return
14811 value is based on the assumption that signed overflow is undefined,
14812 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14813 *STRICT_OVERFLOW_P. */
14816 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14818 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14821 switch (TREE_CODE (t
))
14824 return tree_int_cst_sgn (t
) >= 0;
14827 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14830 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14833 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14835 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14836 strict_overflow_p
));
14838 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14841 /* We don't know sign of `t', so be conservative and return false. */
14845 /* Return true if T is known to be non-negative. If the return
14846 value is based on the assumption that signed overflow is undefined,
14847 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14848 *STRICT_OVERFLOW_P. */
14851 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14852 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14854 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14855 switch (DECL_FUNCTION_CODE (fndecl
))
14857 CASE_FLT_FN (BUILT_IN_ACOS
):
14858 CASE_FLT_FN (BUILT_IN_ACOSH
):
14859 CASE_FLT_FN (BUILT_IN_CABS
):
14860 CASE_FLT_FN (BUILT_IN_COSH
):
14861 CASE_FLT_FN (BUILT_IN_ERFC
):
14862 CASE_FLT_FN (BUILT_IN_EXP
):
14863 CASE_FLT_FN (BUILT_IN_EXP10
):
14864 CASE_FLT_FN (BUILT_IN_EXP2
):
14865 CASE_FLT_FN (BUILT_IN_FABS
):
14866 CASE_FLT_FN (BUILT_IN_FDIM
):
14867 CASE_FLT_FN (BUILT_IN_HYPOT
):
14868 CASE_FLT_FN (BUILT_IN_POW10
):
14869 CASE_INT_FN (BUILT_IN_FFS
):
14870 CASE_INT_FN (BUILT_IN_PARITY
):
14871 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14872 CASE_INT_FN (BUILT_IN_CLZ
):
14873 CASE_INT_FN (BUILT_IN_CLRSB
):
14874 case BUILT_IN_BSWAP32
:
14875 case BUILT_IN_BSWAP64
:
14879 CASE_FLT_FN (BUILT_IN_SQRT
):
14880 /* sqrt(-0.0) is -0.0. */
14881 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
14883 return tree_expr_nonnegative_warnv_p (arg0
,
14884 strict_overflow_p
);
14886 CASE_FLT_FN (BUILT_IN_ASINH
):
14887 CASE_FLT_FN (BUILT_IN_ATAN
):
14888 CASE_FLT_FN (BUILT_IN_ATANH
):
14889 CASE_FLT_FN (BUILT_IN_CBRT
):
14890 CASE_FLT_FN (BUILT_IN_CEIL
):
14891 CASE_FLT_FN (BUILT_IN_ERF
):
14892 CASE_FLT_FN (BUILT_IN_EXPM1
):
14893 CASE_FLT_FN (BUILT_IN_FLOOR
):
14894 CASE_FLT_FN (BUILT_IN_FMOD
):
14895 CASE_FLT_FN (BUILT_IN_FREXP
):
14896 CASE_FLT_FN (BUILT_IN_ICEIL
):
14897 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14898 CASE_FLT_FN (BUILT_IN_IRINT
):
14899 CASE_FLT_FN (BUILT_IN_IROUND
):
14900 CASE_FLT_FN (BUILT_IN_LCEIL
):
14901 CASE_FLT_FN (BUILT_IN_LDEXP
):
14902 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14903 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14904 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14905 CASE_FLT_FN (BUILT_IN_LLRINT
):
14906 CASE_FLT_FN (BUILT_IN_LLROUND
):
14907 CASE_FLT_FN (BUILT_IN_LRINT
):
14908 CASE_FLT_FN (BUILT_IN_LROUND
):
14909 CASE_FLT_FN (BUILT_IN_MODF
):
14910 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14911 CASE_FLT_FN (BUILT_IN_RINT
):
14912 CASE_FLT_FN (BUILT_IN_ROUND
):
14913 CASE_FLT_FN (BUILT_IN_SCALB
):
14914 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14915 CASE_FLT_FN (BUILT_IN_SCALBN
):
14916 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14917 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14918 CASE_FLT_FN (BUILT_IN_SINH
):
14919 CASE_FLT_FN (BUILT_IN_TANH
):
14920 CASE_FLT_FN (BUILT_IN_TRUNC
):
14921 /* True if the 1st argument is nonnegative. */
14922 return tree_expr_nonnegative_warnv_p (arg0
,
14923 strict_overflow_p
);
14925 CASE_FLT_FN (BUILT_IN_FMAX
):
14926 /* True if the 1st OR 2nd arguments are nonnegative. */
14927 return (tree_expr_nonnegative_warnv_p (arg0
,
14929 || (tree_expr_nonnegative_warnv_p (arg1
,
14930 strict_overflow_p
)));
14932 CASE_FLT_FN (BUILT_IN_FMIN
):
14933 /* True if the 1st AND 2nd arguments are nonnegative. */
14934 return (tree_expr_nonnegative_warnv_p (arg0
,
14936 && (tree_expr_nonnegative_warnv_p (arg1
,
14937 strict_overflow_p
)));
14939 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14940 /* True if the 2nd argument is nonnegative. */
14941 return tree_expr_nonnegative_warnv_p (arg1
,
14942 strict_overflow_p
);
14944 CASE_FLT_FN (BUILT_IN_POWI
):
14945 /* True if the 1st argument is nonnegative or the second
14946 argument is an even integer. */
14947 if (TREE_CODE (arg1
) == INTEGER_CST
14948 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14950 return tree_expr_nonnegative_warnv_p (arg0
,
14951 strict_overflow_p
);
14953 CASE_FLT_FN (BUILT_IN_POW
):
14954 /* True if the 1st argument is nonnegative or the second
14955 argument is an even integer valued real. */
14956 if (TREE_CODE (arg1
) == REAL_CST
)
14961 c
= TREE_REAL_CST (arg1
);
14962 n
= real_to_integer (&c
);
14965 REAL_VALUE_TYPE cint
;
14966 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14967 if (real_identical (&c
, &cint
))
14971 return tree_expr_nonnegative_warnv_p (arg0
,
14972 strict_overflow_p
);
14977 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14981 /* Return true if T is known to be non-negative. If the return
14982 value is based on the assumption that signed overflow is undefined,
14983 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14984 *STRICT_OVERFLOW_P. */
14987 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14989 enum tree_code code
= TREE_CODE (t
);
14990 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14997 tree temp
= TARGET_EXPR_SLOT (t
);
14998 t
= TARGET_EXPR_INITIAL (t
);
15000 /* If the initializer is non-void, then it's a normal expression
15001 that will be assigned to the slot. */
15002 if (!VOID_TYPE_P (t
))
15003 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15005 /* Otherwise, the initializer sets the slot in some way. One common
15006 way is an assignment statement at the end of the initializer. */
15009 if (TREE_CODE (t
) == BIND_EXPR
)
15010 t
= expr_last (BIND_EXPR_BODY (t
));
15011 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15012 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15013 t
= expr_last (TREE_OPERAND (t
, 0));
15014 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15019 if (TREE_CODE (t
) == MODIFY_EXPR
15020 && TREE_OPERAND (t
, 0) == temp
)
15021 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15022 strict_overflow_p
);
15029 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15030 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15032 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15033 get_callee_fndecl (t
),
15036 strict_overflow_p
);
15038 case COMPOUND_EXPR
:
15040 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15041 strict_overflow_p
);
15043 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15044 strict_overflow_p
);
15046 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15047 strict_overflow_p
);
15050 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15054 /* We don't know sign of `t', so be conservative and return false. */
15058 /* Return true if T is known to be non-negative. If the return
15059 value is based on the assumption that signed overflow is undefined,
15060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15061 *STRICT_OVERFLOW_P. */
15064 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15066 enum tree_code code
;
15067 if (t
== error_mark_node
)
15070 code
= TREE_CODE (t
);
15071 switch (TREE_CODE_CLASS (code
))
15074 case tcc_comparison
:
15075 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15077 TREE_OPERAND (t
, 0),
15078 TREE_OPERAND (t
, 1),
15079 strict_overflow_p
);
15082 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15084 TREE_OPERAND (t
, 0),
15085 strict_overflow_p
);
15088 case tcc_declaration
:
15089 case tcc_reference
:
15090 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15098 case TRUTH_AND_EXPR
:
15099 case TRUTH_OR_EXPR
:
15100 case TRUTH_XOR_EXPR
:
15101 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15103 TREE_OPERAND (t
, 0),
15104 TREE_OPERAND (t
, 1),
15105 strict_overflow_p
);
15106 case TRUTH_NOT_EXPR
:
15107 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15109 TREE_OPERAND (t
, 0),
15110 strict_overflow_p
);
15117 case WITH_SIZE_EXPR
:
15119 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15122 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15126 /* Return true if `t' is known to be non-negative. Handle warnings
15127 about undefined signed overflow. */
15130 tree_expr_nonnegative_p (tree t
)
15132 bool ret
, strict_overflow_p
;
15134 strict_overflow_p
= false;
15135 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15136 if (strict_overflow_p
)
15137 fold_overflow_warning (("assuming signed overflow does not occur when "
15138 "determining that expression is always "
15140 WARN_STRICT_OVERFLOW_MISC
);
15145 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15146 For floating point we further ensure that T is not denormal.
15147 Similar logic is present in nonzero_address in rtlanal.h.
15149 If the return value is based on the assumption that signed overflow
15150 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15151 change *STRICT_OVERFLOW_P. */
15154 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15155 bool *strict_overflow_p
)
15160 return tree_expr_nonzero_warnv_p (op0
,
15161 strict_overflow_p
);
15165 tree inner_type
= TREE_TYPE (op0
);
15166 tree outer_type
= type
;
15168 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15169 && tree_expr_nonzero_warnv_p (op0
,
15170 strict_overflow_p
));
15174 case NON_LVALUE_EXPR
:
15175 return tree_expr_nonzero_warnv_p (op0
,
15176 strict_overflow_p
);
15185 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15186 For floating point we further ensure that T is not denormal.
15187 Similar logic is present in nonzero_address in rtlanal.h.
15189 If the return value is based on the assumption that signed overflow
15190 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15191 change *STRICT_OVERFLOW_P. */
15194 tree_binary_nonzero_warnv_p (enum tree_code code
,
15197 tree op1
, bool *strict_overflow_p
)
15199 bool sub_strict_overflow_p
;
15202 case POINTER_PLUS_EXPR
:
15204 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
15206 /* With the presence of negative values it is hard
15207 to say something. */
15208 sub_strict_overflow_p
= false;
15209 if (!tree_expr_nonnegative_warnv_p (op0
,
15210 &sub_strict_overflow_p
)
15211 || !tree_expr_nonnegative_warnv_p (op1
,
15212 &sub_strict_overflow_p
))
15214 /* One of operands must be positive and the other non-negative. */
15215 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15216 overflows, on a twos-complement machine the sum of two
15217 nonnegative numbers can never be zero. */
15218 return (tree_expr_nonzero_warnv_p (op0
,
15220 || tree_expr_nonzero_warnv_p (op1
,
15221 strict_overflow_p
));
15226 if (TYPE_OVERFLOW_UNDEFINED (type
))
15228 if (tree_expr_nonzero_warnv_p (op0
,
15230 && tree_expr_nonzero_warnv_p (op1
,
15231 strict_overflow_p
))
15233 *strict_overflow_p
= true;
15240 sub_strict_overflow_p
= false;
15241 if (tree_expr_nonzero_warnv_p (op0
,
15242 &sub_strict_overflow_p
)
15243 && tree_expr_nonzero_warnv_p (op1
,
15244 &sub_strict_overflow_p
))
15246 if (sub_strict_overflow_p
)
15247 *strict_overflow_p
= true;
15252 sub_strict_overflow_p
= false;
15253 if (tree_expr_nonzero_warnv_p (op0
,
15254 &sub_strict_overflow_p
))
15256 if (sub_strict_overflow_p
)
15257 *strict_overflow_p
= true;
15259 /* When both operands are nonzero, then MAX must be too. */
15260 if (tree_expr_nonzero_warnv_p (op1
,
15261 strict_overflow_p
))
15264 /* MAX where operand 0 is positive is positive. */
15265 return tree_expr_nonnegative_warnv_p (op0
,
15266 strict_overflow_p
);
15268 /* MAX where operand 1 is positive is positive. */
15269 else if (tree_expr_nonzero_warnv_p (op1
,
15270 &sub_strict_overflow_p
)
15271 && tree_expr_nonnegative_warnv_p (op1
,
15272 &sub_strict_overflow_p
))
15274 if (sub_strict_overflow_p
)
15275 *strict_overflow_p
= true;
15281 return (tree_expr_nonzero_warnv_p (op1
,
15283 || tree_expr_nonzero_warnv_p (op0
,
15284 strict_overflow_p
));
15293 /* Return true when T is an address and is known to be nonzero.
15294 For floating point we further ensure that T is not denormal.
15295 Similar logic is present in nonzero_address in rtlanal.h.
15297 If the return value is based on the assumption that signed overflow
15298 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15299 change *STRICT_OVERFLOW_P. */
15302 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15304 bool sub_strict_overflow_p
;
15305 switch (TREE_CODE (t
))
15308 return !integer_zerop (t
);
15312 tree base
= TREE_OPERAND (t
, 0);
15314 if (!DECL_P (base
))
15315 base
= get_base_address (base
);
15320 /* For objects in symbol table check if we know they are non-zero.
15321 Don't do anything for variables and functions before symtab is built;
15322 it is quite possible that they will be declared weak later. */
15323 if (DECL_P (base
) && decl_in_symtab_p (base
))
15325 struct symtab_node
*symbol
;
15327 symbol
= symtab_node::get_create (base
);
15329 return symbol
->nonzero_address ();
15334 /* Function local objects are never NULL. */
15336 && (DECL_CONTEXT (base
)
15337 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15338 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15341 /* Constants are never weak. */
15342 if (CONSTANT_CLASS_P (base
))
15349 sub_strict_overflow_p
= false;
15350 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15351 &sub_strict_overflow_p
)
15352 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15353 &sub_strict_overflow_p
))
15355 if (sub_strict_overflow_p
)
15356 *strict_overflow_p
= true;
15367 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15368 attempt to fold the expression to a constant without modifying TYPE,
15371 If the expression could be simplified to a constant, then return
15372 the constant. If the expression would not be simplified to a
15373 constant, then return NULL_TREE. */
15376 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15378 tree tem
= fold_binary (code
, type
, op0
, op1
);
15379 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15382 /* Given the components of a unary expression CODE, TYPE and OP0,
15383 attempt to fold the expression to a constant without modifying
15386 If the expression could be simplified to a constant, then return
15387 the constant. If the expression would not be simplified to a
15388 constant, then return NULL_TREE. */
15391 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15393 tree tem
= fold_unary (code
, type
, op0
);
15394 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15397 /* If EXP represents referencing an element in a constant string
15398 (either via pointer arithmetic or array indexing), return the
15399 tree representing the value accessed, otherwise return NULL. */
15402 fold_read_from_constant_string (tree exp
)
15404 if ((TREE_CODE (exp
) == INDIRECT_REF
15405 || TREE_CODE (exp
) == ARRAY_REF
)
15406 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15408 tree exp1
= TREE_OPERAND (exp
, 0);
15411 location_t loc
= EXPR_LOCATION (exp
);
15413 if (TREE_CODE (exp
) == INDIRECT_REF
)
15414 string
= string_constant (exp1
, &index
);
15417 tree low_bound
= array_ref_low_bound (exp
);
15418 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15420 /* Optimize the special-case of a zero lower bound.
15422 We convert the low_bound to sizetype to avoid some problems
15423 with constant folding. (E.g. suppose the lower bound is 1,
15424 and its mode is QI. Without the conversion,l (ARRAY
15425 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15426 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15427 if (! integer_zerop (low_bound
))
15428 index
= size_diffop_loc (loc
, index
,
15429 fold_convert_loc (loc
, sizetype
, low_bound
));
15435 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15436 && TREE_CODE (string
) == STRING_CST
15437 && TREE_CODE (index
) == INTEGER_CST
15438 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15439 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15441 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15442 return build_int_cst_type (TREE_TYPE (exp
),
15443 (TREE_STRING_POINTER (string
)
15444 [TREE_INT_CST_LOW (index
)]));
15449 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15450 an integer constant, real, or fixed-point constant.
15452 TYPE is the type of the result. */
15455 fold_negate_const (tree arg0
, tree type
)
15457 tree t
= NULL_TREE
;
15459 switch (TREE_CODE (arg0
))
15464 wide_int val
= wi::neg (arg0
, &overflow
);
15465 t
= force_fit_type (type
, val
, 1,
15466 (overflow
| TREE_OVERFLOW (arg0
))
15467 && !TYPE_UNSIGNED (type
));
15472 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15477 FIXED_VALUE_TYPE f
;
15478 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15479 &(TREE_FIXED_CST (arg0
)), NULL
,
15480 TYPE_SATURATING (type
));
15481 t
= build_fixed (type
, f
);
15482 /* Propagate overflow flags. */
15483 if (overflow_p
| TREE_OVERFLOW (arg0
))
15484 TREE_OVERFLOW (t
) = 1;
15489 gcc_unreachable ();
15495 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15496 an integer constant or real constant.
15498 TYPE is the type of the result. */
15501 fold_abs_const (tree arg0
, tree type
)
15503 tree t
= NULL_TREE
;
15505 switch (TREE_CODE (arg0
))
15509 /* If the value is unsigned or non-negative, then the absolute value
15510 is the same as the ordinary value. */
15511 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
15514 /* If the value is negative, then the absolute value is
15519 wide_int val
= wi::neg (arg0
, &overflow
);
15520 t
= force_fit_type (type
, val
, -1,
15521 overflow
| TREE_OVERFLOW (arg0
));
15527 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15528 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15534 gcc_unreachable ();
15540 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15541 constant. TYPE is the type of the result. */
15544 fold_not_const (const_tree arg0
, tree type
)
15546 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15548 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
15551 /* Given CODE, a relational operator, the target type, TYPE and two
15552 constant operands OP0 and OP1, return the result of the
15553 relational operation. If the result is not a compile time
15554 constant, then return NULL_TREE. */
15557 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15559 int result
, invert
;
15561 /* From here on, the only cases we handle are when the result is
15562 known to be a constant. */
15564 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15566 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15567 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15569 /* Handle the cases where either operand is a NaN. */
15570 if (real_isnan (c0
) || real_isnan (c1
))
15580 case UNORDERED_EXPR
:
15594 if (flag_trapping_math
)
15600 gcc_unreachable ();
15603 return constant_boolean_node (result
, type
);
15606 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15609 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15611 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15612 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15613 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15616 /* Handle equality/inequality of complex constants. */
15617 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15619 tree rcond
= fold_relational_const (code
, type
,
15620 TREE_REALPART (op0
),
15621 TREE_REALPART (op1
));
15622 tree icond
= fold_relational_const (code
, type
,
15623 TREE_IMAGPART (op0
),
15624 TREE_IMAGPART (op1
));
15625 if (code
== EQ_EXPR
)
15626 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15627 else if (code
== NE_EXPR
)
15628 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15633 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15635 unsigned count
= VECTOR_CST_NELTS (op0
);
15636 tree
*elts
= XALLOCAVEC (tree
, count
);
15637 gcc_assert (VECTOR_CST_NELTS (op1
) == count
15638 && TYPE_VECTOR_SUBPARTS (type
) == count
);
15640 for (unsigned i
= 0; i
< count
; i
++)
15642 tree elem_type
= TREE_TYPE (type
);
15643 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15644 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15646 tree tem
= fold_relational_const (code
, elem_type
,
15649 if (tem
== NULL_TREE
)
15652 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
15655 return build_vector (type
, elts
);
15658 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15660 To compute GT, swap the arguments and do LT.
15661 To compute GE, do LT and invert the result.
15662 To compute LE, swap the arguments, do LT and invert the result.
15663 To compute NE, do EQ and invert the result.
15665 Therefore, the code below must handle only EQ and LT. */
15667 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15672 code
= swap_tree_comparison (code
);
15675 /* Note that it is safe to invert for real values here because we
15676 have already handled the one case that it matters. */
15679 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15682 code
= invert_tree_comparison (code
, false);
15685 /* Compute a result for LT or EQ if args permit;
15686 Otherwise return T. */
15687 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15689 if (code
== EQ_EXPR
)
15690 result
= tree_int_cst_equal (op0
, op1
);
15692 result
= tree_int_cst_lt (op0
, op1
);
15699 return constant_boolean_node (result
, type
);
15702 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15703 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15707 fold_build_cleanup_point_expr (tree type
, tree expr
)
15709 /* If the expression does not have side effects then we don't have to wrap
15710 it with a cleanup point expression. */
15711 if (!TREE_SIDE_EFFECTS (expr
))
15714 /* If the expression is a return, check to see if the expression inside the
15715 return has no side effects or the right hand side of the modify expression
15716 inside the return. If either don't have side effects set we don't need to
15717 wrap the expression in a cleanup point expression. Note we don't check the
15718 left hand side of the modify because it should always be a return decl. */
15719 if (TREE_CODE (expr
) == RETURN_EXPR
)
15721 tree op
= TREE_OPERAND (expr
, 0);
15722 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15724 op
= TREE_OPERAND (op
, 1);
15725 if (!TREE_SIDE_EFFECTS (op
))
15729 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15732 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15733 of an indirection through OP0, or NULL_TREE if no simplification is
15737 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15743 subtype
= TREE_TYPE (sub
);
15744 if (!POINTER_TYPE_P (subtype
))
15747 if (TREE_CODE (sub
) == ADDR_EXPR
)
15749 tree op
= TREE_OPERAND (sub
, 0);
15750 tree optype
= TREE_TYPE (op
);
15751 /* *&CONST_DECL -> to the value of the const decl. */
15752 if (TREE_CODE (op
) == CONST_DECL
)
15753 return DECL_INITIAL (op
);
15754 /* *&p => p; make sure to handle *&"str"[cst] here. */
15755 if (type
== optype
)
15757 tree fop
= fold_read_from_constant_string (op
);
15763 /* *(foo *)&fooarray => fooarray[0] */
15764 else if (TREE_CODE (optype
) == ARRAY_TYPE
15765 && type
== TREE_TYPE (optype
)
15766 && (!in_gimple_form
15767 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15769 tree type_domain
= TYPE_DOMAIN (optype
);
15770 tree min_val
= size_zero_node
;
15771 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15772 min_val
= TYPE_MIN_VALUE (type_domain
);
15774 && TREE_CODE (min_val
) != INTEGER_CST
)
15776 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15777 NULL_TREE
, NULL_TREE
);
15779 /* *(foo *)&complexfoo => __real__ complexfoo */
15780 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15781 && type
== TREE_TYPE (optype
))
15782 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15783 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15784 else if (TREE_CODE (optype
) == VECTOR_TYPE
15785 && type
== TREE_TYPE (optype
))
15787 tree part_width
= TYPE_SIZE (type
);
15788 tree index
= bitsize_int (0);
15789 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15793 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15794 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15796 tree op00
= TREE_OPERAND (sub
, 0);
15797 tree op01
= TREE_OPERAND (sub
, 1);
15800 if (TREE_CODE (op00
) == ADDR_EXPR
)
15803 op00
= TREE_OPERAND (op00
, 0);
15804 op00type
= TREE_TYPE (op00
);
15806 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15807 if (TREE_CODE (op00type
) == VECTOR_TYPE
15808 && type
== TREE_TYPE (op00type
))
15810 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
15811 tree part_width
= TYPE_SIZE (type
);
15812 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
15813 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15814 tree index
= bitsize_int (indexi
);
15816 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
15817 return fold_build3_loc (loc
,
15818 BIT_FIELD_REF
, type
, op00
,
15819 part_width
, index
);
15822 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15823 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15824 && type
== TREE_TYPE (op00type
))
15826 tree size
= TYPE_SIZE_UNIT (type
);
15827 if (tree_int_cst_equal (size
, op01
))
15828 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15830 /* ((foo *)&fooarray)[1] => fooarray[1] */
15831 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15832 && type
== TREE_TYPE (op00type
))
15834 tree type_domain
= TYPE_DOMAIN (op00type
);
15835 tree min_val
= size_zero_node
;
15836 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15837 min_val
= TYPE_MIN_VALUE (type_domain
);
15838 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15839 TYPE_SIZE_UNIT (type
));
15840 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15841 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15842 NULL_TREE
, NULL_TREE
);
15847 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15848 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15849 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15850 && (!in_gimple_form
15851 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15854 tree min_val
= size_zero_node
;
15855 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15856 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15857 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15858 min_val
= TYPE_MIN_VALUE (type_domain
);
15860 && TREE_CODE (min_val
) != INTEGER_CST
)
15862 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15869 /* Builds an expression for an indirection through T, simplifying some
15873 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15875 tree type
= TREE_TYPE (TREE_TYPE (t
));
15876 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15881 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15884 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15887 fold_indirect_ref_loc (location_t loc
, tree t
)
15889 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15897 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15898 whose result is ignored. The type of the returned tree need not be
15899 the same as the original expression. */
15902 fold_ignored_result (tree t
)
15904 if (!TREE_SIDE_EFFECTS (t
))
15905 return integer_zero_node
;
15908 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15911 t
= TREE_OPERAND (t
, 0);
15915 case tcc_comparison
:
15916 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15917 t
= TREE_OPERAND (t
, 0);
15918 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15919 t
= TREE_OPERAND (t
, 1);
15924 case tcc_expression
:
15925 switch (TREE_CODE (t
))
15927 case COMPOUND_EXPR
:
15928 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15930 t
= TREE_OPERAND (t
, 0);
15934 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15935 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15937 t
= TREE_OPERAND (t
, 0);
15950 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15953 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15955 tree div
= NULL_TREE
;
15960 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15961 have to do anything. Only do this when we are not given a const,
15962 because in that case, this check is more expensive than just
15964 if (TREE_CODE (value
) != INTEGER_CST
)
15966 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15968 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15972 /* If divisor is a power of two, simplify this to bit manipulation. */
15973 if (divisor
== (divisor
& -divisor
))
15975 if (TREE_CODE (value
) == INTEGER_CST
)
15977 wide_int val
= value
;
15980 if ((val
& (divisor
- 1)) == 0)
15983 overflow_p
= TREE_OVERFLOW (value
);
15984 val
+= divisor
- 1;
15985 val
&= - (int) divisor
;
15989 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
15995 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15996 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15997 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
15998 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16004 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16005 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16006 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16012 /* Likewise, but round down. */
16015 round_down_loc (location_t loc
, tree value
, int divisor
)
16017 tree div
= NULL_TREE
;
16019 gcc_assert (divisor
> 0);
16023 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16024 have to do anything. Only do this when we are not given a const,
16025 because in that case, this check is more expensive than just
16027 if (TREE_CODE (value
) != INTEGER_CST
)
16029 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16031 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16035 /* If divisor is a power of two, simplify this to bit manipulation. */
16036 if (divisor
== (divisor
& -divisor
))
16040 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16041 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16046 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16047 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16048 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16054 /* Returns the pointer to the base of the object addressed by EXP and
16055 extracts the information about the offset of the access, storing it
16056 to PBITPOS and POFFSET. */
16059 split_address_to_core_and_offset (tree exp
,
16060 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16064 int unsignedp
, volatilep
;
16065 HOST_WIDE_INT bitsize
;
16066 location_t loc
= EXPR_LOCATION (exp
);
16068 if (TREE_CODE (exp
) == ADDR_EXPR
)
16070 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16071 poffset
, &mode
, &unsignedp
, &volatilep
,
16073 core
= build_fold_addr_expr_loc (loc
, core
);
16079 *poffset
= NULL_TREE
;
16085 /* Returns true if addresses of E1 and E2 differ by a constant, false
16086 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16089 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16092 HOST_WIDE_INT bitpos1
, bitpos2
;
16093 tree toffset1
, toffset2
, tdiff
, type
;
16095 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16096 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16098 if (bitpos1
% BITS_PER_UNIT
!= 0
16099 || bitpos2
% BITS_PER_UNIT
!= 0
16100 || !operand_equal_p (core1
, core2
, 0))
16103 if (toffset1
&& toffset2
)
16105 type
= TREE_TYPE (toffset1
);
16106 if (type
!= TREE_TYPE (toffset2
))
16107 toffset2
= fold_convert (type
, toffset2
);
16109 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16110 if (!cst_and_fits_in_hwi (tdiff
))
16113 *diff
= int_cst_value (tdiff
);
16115 else if (toffset1
|| toffset2
)
16117 /* If only one of the offsets is non-constant, the difference cannot
16124 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16128 /* Simplify the floating point expression EXP when the sign of the
16129 result is not significant. Return NULL_TREE if no simplification
16133 fold_strip_sign_ops (tree exp
)
16136 location_t loc
= EXPR_LOCATION (exp
);
16138 switch (TREE_CODE (exp
))
16142 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16143 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16147 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
16149 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16150 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16151 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16152 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16153 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16154 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16157 case COMPOUND_EXPR
:
16158 arg0
= TREE_OPERAND (exp
, 0);
16159 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16161 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16165 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16166 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16168 return fold_build3_loc (loc
,
16169 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16170 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16171 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16176 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16179 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16180 /* Strip copysign function call, return the 1st argument. */
16181 arg0
= CALL_EXPR_ARG (exp
, 0);
16182 arg1
= CALL_EXPR_ARG (exp
, 1);
16183 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16186 /* Strip sign ops from the argument of "odd" math functions. */
16187 if (negate_mathfn_p (fcode
))
16189 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16191 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);
16204 /* Return OFF converted to a pointer offset type suitable as offset for
16205 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16207 convert_to_ptrofftype_loc (location_t loc
, tree off
)
16209 return fold_convert_loc (loc
, sizetype
, off
);
16212 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16214 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
16216 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16217 ptr
, convert_to_ptrofftype_loc (loc
, off
));
16220 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16222 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
16224 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16225 ptr
, size_int (off
));