1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
51 #include "double-int.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
61 #include "tree-iterator.h"
65 #include "hard-reg-set.h"
67 #include "statistics.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
80 #include "diagnostic-core.h"
82 #include "langhooks.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
89 #include "gimple-expr.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
97 #include "plugin-api.h"
100 #include "generic-match.h"
103 /* Nonzero if we are folding constants inside an initializer; zero
105 int folding_initializer
= 0;
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code
{
129 static bool negate_mathfn_p (enum built_in_function
);
130 static bool negate_expr_p (tree
);
131 static tree
negate_expr (tree
);
132 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
133 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
134 static enum comparison_code
comparison_to_compcode (enum tree_code
);
135 static enum tree_code
compcode_to_comparison (enum comparison_code
);
136 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
137 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
138 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
139 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
140 static tree
make_bit_field_ref (location_t
, tree
, tree
,
141 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
142 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
144 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
146 machine_mode
*, int *, int *,
148 static int simple_operand_p (const_tree
);
149 static bool simple_operand_p_2 (tree
);
150 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
151 static tree
range_predecessor (tree
);
152 static tree
range_successor (tree
);
153 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
154 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
155 static tree
unextend (tree
, int, int, tree
);
156 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
158 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
159 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
160 static tree
fold_binary_op_with_conditional_arg (location_t
,
161 enum tree_code
, tree
,
164 static tree
fold_mathfn_compare (location_t
,
165 enum built_in_function
, enum tree_code
,
167 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
168 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
169 static bool reorder_operands_p (const_tree
, const_tree
);
170 static tree
fold_negate_const (tree
, tree
);
171 static tree
fold_not_const (const_tree
, tree
);
172 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
173 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
174 static tree
fold_view_convert_expr (tree
, tree
);
175 static bool vec_cst_ctor_to_array (tree
, tree
*);
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
182 expr_location_or (tree t
, location_t loc
)
184 location_t tloc
= EXPR_LOCATION (t
);
185 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
192 protected_set_expr_location_unshare (tree x
, location_t loc
)
194 if (CAN_HAVE_LOCATION_P (x
)
195 && EXPR_LOCATION (x
) != loc
196 && !(TREE_CODE (x
) == SAVE_EXPR
197 || TREE_CODE (x
) == TARGET_EXPR
198 || TREE_CODE (x
) == BIND_EXPR
))
201 SET_EXPR_LOCATION (x
, loc
);
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
211 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
215 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
217 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
231 static int fold_deferring_overflow_warnings
;
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
238 static const char* fold_deferred_overflow_warning
;
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
243 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
249 fold_defer_overflow_warnings (void)
251 ++fold_deferring_overflow_warnings
;
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
264 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
269 gcc_assert (fold_deferring_overflow_warnings
> 0);
270 --fold_deferring_overflow_warnings
;
271 if (fold_deferring_overflow_warnings
> 0)
273 if (fold_deferred_overflow_warning
!= NULL
275 && code
< (int) fold_deferred_overflow_code
)
276 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
280 warnmsg
= fold_deferred_overflow_warning
;
281 fold_deferred_overflow_warning
= NULL
;
283 if (!issue
|| warnmsg
== NULL
)
286 if (gimple_no_warning_p (stmt
))
289 /* Use the smallest code level when deciding to issue the
291 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
292 code
= fold_deferred_overflow_code
;
294 if (!issue_strict_overflow_warning (code
))
298 locus
= input_location
;
300 locus
= gimple_location (stmt
);
301 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
304 /* Stop deferring overflow warnings, ignoring any deferred
308 fold_undefer_and_ignore_overflow_warnings (void)
310 fold_undefer_overflow_warnings (false, NULL
, 0);
313 /* Whether we are deferring overflow warnings. */
316 fold_deferring_overflow_warnings_p (void)
318 return fold_deferring_overflow_warnings
> 0;
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
325 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
327 if (fold_deferring_overflow_warnings
> 0)
329 if (fold_deferred_overflow_warning
== NULL
330 || wc
< fold_deferred_overflow_code
)
332 fold_deferred_overflow_warning
= gmsgid
;
333 fold_deferred_overflow_code
= wc
;
336 else if (issue_strict_overflow_warning (wc
))
337 warning (OPT_Wstrict_overflow
, gmsgid
);
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
344 negate_mathfn_p (enum built_in_function code
)
348 CASE_FLT_FN (BUILT_IN_ASIN
):
349 CASE_FLT_FN (BUILT_IN_ASINH
):
350 CASE_FLT_FN (BUILT_IN_ATAN
):
351 CASE_FLT_FN (BUILT_IN_ATANH
):
352 CASE_FLT_FN (BUILT_IN_CASIN
):
353 CASE_FLT_FN (BUILT_IN_CASINH
):
354 CASE_FLT_FN (BUILT_IN_CATAN
):
355 CASE_FLT_FN (BUILT_IN_CATANH
):
356 CASE_FLT_FN (BUILT_IN_CBRT
):
357 CASE_FLT_FN (BUILT_IN_CPROJ
):
358 CASE_FLT_FN (BUILT_IN_CSIN
):
359 CASE_FLT_FN (BUILT_IN_CSINH
):
360 CASE_FLT_FN (BUILT_IN_CTAN
):
361 CASE_FLT_FN (BUILT_IN_CTANH
):
362 CASE_FLT_FN (BUILT_IN_ERF
):
363 CASE_FLT_FN (BUILT_IN_LLROUND
):
364 CASE_FLT_FN (BUILT_IN_LROUND
):
365 CASE_FLT_FN (BUILT_IN_ROUND
):
366 CASE_FLT_FN (BUILT_IN_SIN
):
367 CASE_FLT_FN (BUILT_IN_SINH
):
368 CASE_FLT_FN (BUILT_IN_TAN
):
369 CASE_FLT_FN (BUILT_IN_TANH
):
370 CASE_FLT_FN (BUILT_IN_TRUNC
):
373 CASE_FLT_FN (BUILT_IN_LLRINT
):
374 CASE_FLT_FN (BUILT_IN_LRINT
):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
376 CASE_FLT_FN (BUILT_IN_RINT
):
377 return !flag_rounding_math
;
385 /* Check whether we may negate an integer constant T without causing
389 may_negate_without_overflow_p (const_tree t
)
393 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
395 type
= TREE_TYPE (t
);
396 if (TYPE_UNSIGNED (type
))
399 return !wi::only_sign_bit_p (t
);
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t
)
413 type
= TREE_TYPE (t
);
416 switch (TREE_CODE (t
))
419 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t
);
425 return (INTEGRAL_TYPE_P (type
)
426 && TYPE_OVERFLOW_WRAPS (type
));
432 return !TYPE_OVERFLOW_SANITIZED (type
);
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
440 return negate_expr_p (TREE_REALPART (t
))
441 && negate_expr_p (TREE_IMAGPART (t
));
445 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
448 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
450 for (i
= 0; i
< count
; i
++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
458 return negate_expr_p (TREE_OPERAND (t
, 0))
459 && negate_expr_p (TREE_OPERAND (t
, 1));
462 return negate_expr_p (TREE_OPERAND (t
, 0));
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
466 || HONOR_SIGNED_ZEROS (element_mode (type
)))
468 /* -(A + B) -> (-B) - A. */
469 if (negate_expr_p (TREE_OPERAND (t
, 1))
470 && reorder_operands_p (TREE_OPERAND (t
, 0),
471 TREE_OPERAND (t
, 1)))
473 /* -(A + B) -> (-A) - B. */
474 return negate_expr_p (TREE_OPERAND (t
, 0));
477 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
478 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
479 && !HONOR_SIGNED_ZEROS (element_mode (type
))
480 && reorder_operands_p (TREE_OPERAND (t
, 0),
481 TREE_OPERAND (t
, 1));
484 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
490 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
491 return negate_expr_p (TREE_OPERAND (t
, 1))
492 || negate_expr_p (TREE_OPERAND (t
, 0));
498 /* In general we can't negate A / B, because if A is INT_MIN and
499 B is 1, we may turn this into INT_MIN / -1 which is undefined
500 and actually traps on some architectures. But if overflow is
501 undefined, we can negate, because - (INT_MIN / 1) is an
503 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
505 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
507 /* If overflow is undefined then we have to be careful because
508 we ask whether it's ok to associate the negate with the
509 division which is not ok for example for
510 -((a - b) / c) where (-(a - b)) / c may invoke undefined
511 overflow because of negating INT_MIN. So do not use
512 negate_expr_p here but open-code the two important cases. */
513 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
514 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
515 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
518 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
520 return negate_expr_p (TREE_OPERAND (t
, 1));
523 /* Negate -((double)float) as (double)(-float). */
524 if (TREE_CODE (type
) == REAL_TYPE
)
526 tree tem
= strip_float_extensions (t
);
528 return negate_expr_p (tem
);
533 /* Negate -f(x) as f(-x). */
534 if (negate_mathfn_p (builtin_mathfn_code (t
)))
535 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
539 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
540 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
542 tree op1
= TREE_OPERAND (t
, 1);
543 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
554 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
555 simplification is possible.
556 If negate_expr_p would return true for T, NULL_TREE will never be
560 fold_negate_expr (location_t loc
, tree t
)
562 tree type
= TREE_TYPE (t
);
565 switch (TREE_CODE (t
))
567 /* Convert - (~A) to A + 1. */
569 if (INTEGRAL_TYPE_P (type
))
570 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
571 build_one_cst (type
));
575 tem
= fold_negate_const (t
, type
);
576 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
577 || (ANY_INTEGRAL_TYPE_P (type
)
578 && !TYPE_OVERFLOW_TRAPS (type
)
579 && TYPE_OVERFLOW_WRAPS (type
))
580 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
585 tem
= fold_negate_const (t
, type
);
589 tem
= fold_negate_const (t
, type
);
594 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
595 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
597 return build_complex (type
, rpart
, ipart
);
603 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
604 tree
*elts
= XALLOCAVEC (tree
, count
);
606 for (i
= 0; i
< count
; i
++)
608 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
609 if (elts
[i
] == NULL_TREE
)
613 return build_vector (type
, elts
);
617 if (negate_expr_p (t
))
618 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
619 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
620 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
624 if (negate_expr_p (t
))
625 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
626 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
630 if (!TYPE_OVERFLOW_SANITIZED (type
))
631 return TREE_OPERAND (t
, 0);
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
636 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
638 /* -(A + B) -> (-B) - A. */
639 if (negate_expr_p (TREE_OPERAND (t
, 1))
640 && reorder_operands_p (TREE_OPERAND (t
, 0),
641 TREE_OPERAND (t
, 1)))
643 tem
= negate_expr (TREE_OPERAND (t
, 1));
644 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
645 tem
, TREE_OPERAND (t
, 0));
648 /* -(A + B) -> (-A) - B. */
649 if (negate_expr_p (TREE_OPERAND (t
, 0)))
651 tem
= negate_expr (TREE_OPERAND (t
, 0));
652 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
653 tem
, TREE_OPERAND (t
, 1));
659 /* - (A - B) -> B - A */
660 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
661 && !HONOR_SIGNED_ZEROS (element_mode (type
))
662 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
663 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
664 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
668 if (TYPE_UNSIGNED (type
))
674 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
676 tem
= TREE_OPERAND (t
, 1);
677 if (negate_expr_p (tem
))
678 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
679 TREE_OPERAND (t
, 0), negate_expr (tem
));
680 tem
= TREE_OPERAND (t
, 0);
681 if (negate_expr_p (tem
))
682 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
683 negate_expr (tem
), TREE_OPERAND (t
, 1));
690 /* In general we can't negate A / B, because if A is INT_MIN and
691 B is 1, we may turn this into INT_MIN / -1 which is undefined
692 and actually traps on some architectures. But if overflow is
693 undefined, we can negate, because - (INT_MIN / 1) is an
695 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
697 const char * const warnmsg
= G_("assuming signed overflow does not "
698 "occur when negating a division");
699 tem
= TREE_OPERAND (t
, 1);
700 if (negate_expr_p (tem
))
702 if (INTEGRAL_TYPE_P (type
)
703 && (TREE_CODE (tem
) != INTEGER_CST
704 || integer_onep (tem
)))
705 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
706 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
707 TREE_OPERAND (t
, 0), negate_expr (tem
));
709 /* If overflow is undefined then we have to be careful because
710 we ask whether it's ok to associate the negate with the
711 division which is not ok for example for
712 -((a - b) / c) where (-(a - b)) / c may invoke undefined
713 overflow because of negating INT_MIN. So do not use
714 negate_expr_p here but open-code the two important cases. */
715 tem
= TREE_OPERAND (t
, 0);
716 if ((INTEGRAL_TYPE_P (type
)
717 && (TREE_CODE (tem
) == NEGATE_EXPR
718 || (TREE_CODE (tem
) == INTEGER_CST
719 && may_negate_without_overflow_p (tem
))))
720 || !INTEGRAL_TYPE_P (type
))
721 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
722 negate_expr (tem
), TREE_OPERAND (t
, 1));
727 /* Convert -((double)float) into (double)(-float). */
728 if (TREE_CODE (type
) == REAL_TYPE
)
730 tem
= strip_float_extensions (t
);
731 if (tem
!= t
&& negate_expr_p (tem
))
732 return fold_convert_loc (loc
, type
, negate_expr (tem
));
737 /* Negate -f(x) as f(-x). */
738 if (negate_mathfn_p (builtin_mathfn_code (t
))
739 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
743 fndecl
= get_callee_fndecl (t
);
744 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
745 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
750 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
751 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
753 tree op1
= TREE_OPERAND (t
, 1);
754 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
756 tree ntype
= TYPE_UNSIGNED (type
)
757 ? signed_type_for (type
)
758 : unsigned_type_for (type
);
759 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
760 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
761 return fold_convert_loc (loc
, type
, temp
);
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
786 loc
= EXPR_LOCATION (t
);
787 type
= TREE_TYPE (t
);
790 tem
= fold_negate_expr (loc
, t
);
792 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
793 return fold_convert_loc (loc
, type
, tem
);
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
811 If IN is itself a literal or constant, return it as appropriate.
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
817 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
818 tree
*minus_litp
, int negate_p
)
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in
);
829 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
830 || TREE_CODE (in
) == FIXED_CST
)
832 else if (TREE_CODE (in
) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
840 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
842 tree op0
= TREE_OPERAND (in
, 0);
843 tree op1
= TREE_OPERAND (in
, 1);
844 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
845 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
849 || TREE_CODE (op0
) == FIXED_CST
)
850 *litp
= op0
, op0
= 0;
851 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
852 || TREE_CODE (op1
) == FIXED_CST
)
853 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
855 if (op0
!= 0 && TREE_CONSTANT (op0
))
856 *conp
= op0
, op0
= 0;
857 else if (op1
!= 0 && TREE_CONSTANT (op1
))
858 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0
!= 0 && op1
!= 0)
867 var
= op1
, neg_var_p
= neg1_p
;
869 /* Now do any needed negations. */
871 *minus_litp
= *litp
, *litp
= 0;
873 *conp
= negate_expr (*conp
);
875 var
= negate_expr (var
);
877 else if (TREE_CODE (in
) == BIT_NOT_EXPR
878 && code
== PLUS_EXPR
)
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp
= build_one_cst (TREE_TYPE (in
));
882 var
= negate_expr (TREE_OPERAND (in
, 0));
884 else if (TREE_CONSTANT (in
))
892 *minus_litp
= *litp
, *litp
= 0;
893 else if (*minus_litp
)
894 *litp
= *minus_litp
, *minus_litp
= 0;
895 *conp
= negate_expr (*conp
);
896 var
= negate_expr (var
);
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
908 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
919 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
921 if (code
== PLUS_EXPR
)
923 if (TREE_CODE (t1
) == NEGATE_EXPR
)
924 return build2_loc (loc
, MINUS_EXPR
, type
,
925 fold_convert_loc (loc
, type
, t2
),
926 fold_convert_loc (loc
, type
,
927 TREE_OPERAND (t1
, 0)));
928 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
929 return build2_loc (loc
, MINUS_EXPR
, type
,
930 fold_convert_loc (loc
, type
, t1
),
931 fold_convert_loc (loc
, type
,
932 TREE_OPERAND (t2
, 0)));
933 else if (integer_zerop (t2
))
934 return fold_convert_loc (loc
, type
, t1
);
936 else if (code
== MINUS_EXPR
)
938 if (integer_zerop (t2
))
939 return fold_convert_loc (loc
, type
, t1
);
942 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
943 fold_convert_loc (loc
, type
, t2
));
946 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
947 fold_convert_loc (loc
, type
, t2
));
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
954 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
956 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
958 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
973 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
974 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
975 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
984 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
989 tree type
= TREE_TYPE (arg1
);
990 signop sign
= TYPE_SIGN (type
);
991 bool overflow
= false;
993 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
994 TYPE_SIGN (TREE_TYPE (parg2
)));
999 res
= wi::bit_or (arg1
, arg2
);
1003 res
= wi::bit_xor (arg1
, arg2
);
1007 res
= wi::bit_and (arg1
, arg2
);
1012 if (wi::neg_p (arg2
))
1015 if (code
== RSHIFT_EXPR
)
1021 if (code
== RSHIFT_EXPR
)
1022 /* It's unclear from the C standard whether shifts can overflow.
1023 The following code ignores overflow; perhaps a C standard
1024 interpretation ruling is needed. */
1025 res
= wi::rshift (arg1
, arg2
, sign
);
1027 res
= wi::lshift (arg1
, arg2
);
1032 if (wi::neg_p (arg2
))
1035 if (code
== RROTATE_EXPR
)
1036 code
= LROTATE_EXPR
;
1038 code
= RROTATE_EXPR
;
1041 if (code
== RROTATE_EXPR
)
1042 res
= wi::rrotate (arg1
, arg2
);
1044 res
= wi::lrotate (arg1
, arg2
);
1048 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1052 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1056 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1059 case MULT_HIGHPART_EXPR
:
1060 res
= wi::mul_high (arg1
, arg2
, sign
);
1063 case TRUNC_DIV_EXPR
:
1064 case EXACT_DIV_EXPR
:
1067 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1070 case FLOOR_DIV_EXPR
:
1073 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1079 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1082 case ROUND_DIV_EXPR
:
1085 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1088 case TRUNC_MOD_EXPR
:
1091 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1094 case FLOOR_MOD_EXPR
:
1097 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1103 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1106 case ROUND_MOD_EXPR
:
1109 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1113 res
= wi::min (arg1
, arg2
, sign
);
1117 res
= wi::max (arg1
, arg2
, sign
);
1124 t
= force_fit_type (type
, res
, overflowable
,
1125 (((sign
== SIGNED
|| overflowable
== -1)
1127 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1133 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1135 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1138 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1139 constant. We assume ARG1 and ARG2 have the same data type, or at least
1140 are the same kind of constant and the same machine mode. Return zero if
1141 combining the constants is not allowed in the current operating mode. */
1144 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1146 /* Sanity check for the recursive cases. */
1153 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1155 if (code
== POINTER_PLUS_EXPR
)
1156 return int_const_binop (PLUS_EXPR
,
1157 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1159 return int_const_binop (code
, arg1
, arg2
);
1162 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1167 REAL_VALUE_TYPE value
;
1168 REAL_VALUE_TYPE result
;
1172 /* The following codes are handled by real_arithmetic. */
1187 d1
= TREE_REAL_CST (arg1
);
1188 d2
= TREE_REAL_CST (arg2
);
1190 type
= TREE_TYPE (arg1
);
1191 mode
= TYPE_MODE (type
);
1193 /* Don't perform operation if we honor signaling NaNs and
1194 either operand is a NaN. */
1195 if (HONOR_SNANS (mode
)
1196 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1199 /* Don't perform operation if it would raise a division
1200 by zero exception. */
1201 if (code
== RDIV_EXPR
1202 && REAL_VALUES_EQUAL (d2
, dconst0
)
1203 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1206 /* If either operand is a NaN, just return it. Otherwise, set up
1207 for floating-point trap; we return an overflow. */
1208 if (REAL_VALUE_ISNAN (d1
))
1210 else if (REAL_VALUE_ISNAN (d2
))
1213 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1214 real_convert (&result
, mode
, &value
);
1216 /* Don't constant fold this floating point operation if
1217 the result has overflowed and flag_trapping_math. */
1218 if (flag_trapping_math
1219 && MODE_HAS_INFINITIES (mode
)
1220 && REAL_VALUE_ISINF (result
)
1221 && !REAL_VALUE_ISINF (d1
)
1222 && !REAL_VALUE_ISINF (d2
))
1225 /* Don't constant fold this floating point operation if the
1226 result may dependent upon the run-time rounding mode and
1227 flag_rounding_math is set, or if GCC's software emulation
1228 is unable to accurately represent the result. */
1229 if ((flag_rounding_math
1230 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1231 && (inexact
|| !real_identical (&result
, &value
)))
1234 t
= build_real (type
, result
);
1236 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1240 if (TREE_CODE (arg1
) == FIXED_CST
)
1242 FIXED_VALUE_TYPE f1
;
1243 FIXED_VALUE_TYPE f2
;
1244 FIXED_VALUE_TYPE result
;
1249 /* The following codes are handled by fixed_arithmetic. */
1255 case TRUNC_DIV_EXPR
:
1256 if (TREE_CODE (arg2
) != FIXED_CST
)
1258 f2
= TREE_FIXED_CST (arg2
);
1264 if (TREE_CODE (arg2
) != INTEGER_CST
)
1267 f2
.data
.high
= w2
.elt (1);
1268 f2
.data
.low
= w2
.elt (0);
1277 f1
= TREE_FIXED_CST (arg1
);
1278 type
= TREE_TYPE (arg1
);
1279 sat_p
= TYPE_SATURATING (type
);
1280 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1281 t
= build_fixed (type
, result
);
1282 /* Propagate overflow flags. */
1283 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1284 TREE_OVERFLOW (t
) = 1;
1288 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1290 tree type
= TREE_TYPE (arg1
);
1291 tree r1
= TREE_REALPART (arg1
);
1292 tree i1
= TREE_IMAGPART (arg1
);
1293 tree r2
= TREE_REALPART (arg2
);
1294 tree i2
= TREE_IMAGPART (arg2
);
1301 real
= const_binop (code
, r1
, r2
);
1302 imag
= const_binop (code
, i1
, i2
);
1306 if (COMPLEX_FLOAT_TYPE_P (type
))
1307 return do_mpc_arg2 (arg1
, arg2
, type
,
1308 /* do_nonfinite= */ folding_initializer
,
1311 real
= const_binop (MINUS_EXPR
,
1312 const_binop (MULT_EXPR
, r1
, r2
),
1313 const_binop (MULT_EXPR
, i1
, i2
));
1314 imag
= const_binop (PLUS_EXPR
,
1315 const_binop (MULT_EXPR
, r1
, i2
),
1316 const_binop (MULT_EXPR
, i1
, r2
));
1320 if (COMPLEX_FLOAT_TYPE_P (type
))
1321 return do_mpc_arg2 (arg1
, arg2
, type
,
1322 /* do_nonfinite= */ folding_initializer
,
1325 case TRUNC_DIV_EXPR
:
1327 case FLOOR_DIV_EXPR
:
1328 case ROUND_DIV_EXPR
:
1329 if (flag_complex_method
== 0)
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_straight().
1334 Expand complex division to scalars, straightforward algorithm.
1335 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1339 = const_binop (PLUS_EXPR
,
1340 const_binop (MULT_EXPR
, r2
, r2
),
1341 const_binop (MULT_EXPR
, i2
, i2
));
1343 = const_binop (PLUS_EXPR
,
1344 const_binop (MULT_EXPR
, r1
, r2
),
1345 const_binop (MULT_EXPR
, i1
, i2
));
1347 = const_binop (MINUS_EXPR
,
1348 const_binop (MULT_EXPR
, i1
, r2
),
1349 const_binop (MULT_EXPR
, r1
, i2
));
1351 real
= const_binop (code
, t1
, magsquared
);
1352 imag
= const_binop (code
, t2
, magsquared
);
1356 /* Keep this algorithm in sync with
1357 tree-complex.c:expand_complex_div_wide().
1359 Expand complex division to scalars, modified algorithm to minimize
1360 overflow with wide input ranges. */
1361 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1362 fold_abs_const (r2
, TREE_TYPE (type
)),
1363 fold_abs_const (i2
, TREE_TYPE (type
)));
1365 if (integer_nonzerop (compare
))
1367 /* In the TRUE branch, we compute
1369 div = (br * ratio) + bi;
1370 tr = (ar * ratio) + ai;
1371 ti = (ai * ratio) - ar;
1374 tree ratio
= const_binop (code
, r2
, i2
);
1375 tree div
= const_binop (PLUS_EXPR
, i2
,
1376 const_binop (MULT_EXPR
, r2
, ratio
));
1377 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1378 real
= const_binop (PLUS_EXPR
, real
, i1
);
1379 real
= const_binop (code
, real
, div
);
1381 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1382 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1383 imag
= const_binop (code
, imag
, div
);
1387 /* In the FALSE branch, we compute
1389 divisor = (d * ratio) + c;
1390 tr = (b * ratio) + a;
1391 ti = b - (a * ratio);
1394 tree ratio
= const_binop (code
, i2
, r2
);
1395 tree div
= const_binop (PLUS_EXPR
, r2
,
1396 const_binop (MULT_EXPR
, i2
, ratio
));
1398 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1399 real
= const_binop (PLUS_EXPR
, real
, r1
);
1400 real
= const_binop (code
, real
, div
);
1402 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1403 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1404 imag
= const_binop (code
, imag
, div
);
1414 return build_complex (type
, real
, imag
);
1417 if (TREE_CODE (arg1
) == VECTOR_CST
1418 && TREE_CODE (arg2
) == VECTOR_CST
)
1420 tree type
= TREE_TYPE (arg1
);
1421 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1422 tree
*elts
= XALLOCAVEC (tree
, count
);
1424 for (i
= 0; i
< count
; i
++)
1426 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1427 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1429 elts
[i
] = const_binop (code
, elem1
, elem2
);
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE */
1433 if (elts
[i
] == NULL_TREE
)
1437 return build_vector (type
, elts
);
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1
) == VECTOR_CST
1442 && TREE_CODE (arg2
) == INTEGER_CST
)
1444 tree type
= TREE_TYPE (arg1
);
1445 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1446 tree
*elts
= XALLOCAVEC (tree
, count
);
1448 for (i
= 0; i
< count
; i
++)
1450 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1452 elts
[i
] = const_binop (code
, elem1
, arg2
);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elts
[i
] == NULL_TREE
)
1460 return build_vector (type
, elts
);
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1469 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1471 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1472 return fold_relational_const (code
, type
, arg1
, arg2
);
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1479 if ((TREE_CODE (arg1
) == REAL_CST
1480 && TREE_CODE (arg2
) == REAL_CST
)
1481 || (TREE_CODE (arg1
) == INTEGER_CST
1482 && TREE_CODE (arg2
) == INTEGER_CST
))
1483 return build_complex (type
, arg1
, arg2
);
1486 case VEC_PACK_TRUNC_EXPR
:
1487 case VEC_PACK_FIX_TRUNC_EXPR
:
1489 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1492 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1493 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1494 if (TREE_CODE (arg1
) != VECTOR_CST
1495 || TREE_CODE (arg2
) != VECTOR_CST
)
1498 elts
= XALLOCAVEC (tree
, nelts
);
1499 if (!vec_cst_ctor_to_array (arg1
, elts
)
1500 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1503 for (i
= 0; i
< nelts
; i
++)
1505 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1506 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1507 TREE_TYPE (type
), elts
[i
]);
1508 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1512 return build_vector (type
, elts
);
1515 case VEC_WIDEN_MULT_LO_EXPR
:
1516 case VEC_WIDEN_MULT_HI_EXPR
:
1517 case VEC_WIDEN_MULT_EVEN_EXPR
:
1518 case VEC_WIDEN_MULT_ODD_EXPR
:
1520 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1521 unsigned int out
, ofs
, scale
;
1524 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1525 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1526 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1529 elts
= XALLOCAVEC (tree
, nelts
* 4);
1530 if (!vec_cst_ctor_to_array (arg1
, elts
)
1531 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1534 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1535 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1536 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1537 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1538 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1540 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1543 for (out
= 0; out
< nelts
; out
++)
1545 unsigned int in1
= (out
<< scale
) + ofs
;
1546 unsigned int in2
= in1
+ nelts
* 2;
1549 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1550 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1552 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1554 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1555 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1559 return build_vector (type
, elts
);
1565 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1568 /* Make sure type and arg0 have the same saturating flag. */
1569 gcc_checking_assert (TYPE_SATURATING (type
)
1570 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1572 return const_binop (code
, arg1
, arg2
);
1575 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1576 Return zero if computing the constants is not possible. */
1579 const_unop (enum tree_code code
, tree type
, tree arg0
)
1585 case FIX_TRUNC_EXPR
:
1586 case FIXED_CONVERT_EXPR
:
1587 return fold_convert_const (code
, type
, arg0
);
1589 case ADDR_SPACE_CONVERT_EXPR
:
1590 if (integer_zerop (arg0
))
1591 return fold_convert_const (code
, type
, arg0
);
1594 case VIEW_CONVERT_EXPR
:
1595 return fold_view_convert_expr (type
, arg0
);
1599 /* Can't call fold_negate_const directly here as that doesn't
1600 handle all cases and we might not be able to negate some
1602 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1603 if (tem
&& CONSTANT_CLASS_P (tem
))
1609 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1610 return fold_abs_const (arg0
, type
);
1614 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1616 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1618 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1623 if (TREE_CODE (arg0
) == INTEGER_CST
)
1624 return fold_not_const (arg0
, type
);
1625 /* Perform BIT_NOT_EXPR on each element individually. */
1626 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1630 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1632 elements
= XALLOCAVEC (tree
, count
);
1633 for (i
= 0; i
< count
; i
++)
1635 elem
= VECTOR_CST_ELT (arg0
, i
);
1636 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1637 if (elem
== NULL_TREE
)
1642 return build_vector (type
, elements
);
1646 case TRUTH_NOT_EXPR
:
1647 if (TREE_CODE (arg0
) == INTEGER_CST
)
1648 return constant_boolean_node (integer_zerop (arg0
), type
);
1652 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1653 return fold_convert (type
, TREE_REALPART (arg0
));
1657 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1658 return fold_convert (type
, TREE_IMAGPART (arg0
));
1661 case VEC_UNPACK_LO_EXPR
:
1662 case VEC_UNPACK_HI_EXPR
:
1663 case VEC_UNPACK_FLOAT_LO_EXPR
:
1664 case VEC_UNPACK_FLOAT_HI_EXPR
:
1666 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1668 enum tree_code subcode
;
1670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1671 if (TREE_CODE (arg0
) != VECTOR_CST
)
1674 elts
= XALLOCAVEC (tree
, nelts
* 2);
1675 if (!vec_cst_ctor_to_array (arg0
, elts
))
1678 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1679 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1682 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1685 subcode
= FLOAT_EXPR
;
1687 for (i
= 0; i
< nelts
; i
++)
1689 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1690 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1694 return build_vector (type
, elts
);
1697 case REDUC_MIN_EXPR
:
1698 case REDUC_MAX_EXPR
:
1699 case REDUC_PLUS_EXPR
:
1701 unsigned int nelts
, i
;
1703 enum tree_code subcode
;
1705 if (TREE_CODE (arg0
) != VECTOR_CST
)
1707 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1709 elts
= XALLOCAVEC (tree
, nelts
);
1710 if (!vec_cst_ctor_to_array (arg0
, elts
))
1715 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1716 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1717 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1718 default: gcc_unreachable ();
1721 for (i
= 1; i
< nelts
; i
++)
1723 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1724 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1738 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1739 indicates which particular sizetype to create. */
1742 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1744 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1747 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1748 is a tree code. The type of the result is taken from the operands.
1749 Both must be equivalent integer types, ala int_binop_types_match_p.
1750 If the operands are constant, so is the result. */
1753 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1755 tree type
= TREE_TYPE (arg0
);
1757 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1758 return error_mark_node
;
1760 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1763 /* Handle the special case of two integer constants faster. */
1764 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1766 /* And some specific cases even faster than that. */
1767 if (code
== PLUS_EXPR
)
1769 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1771 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1774 else if (code
== MINUS_EXPR
)
1776 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1779 else if (code
== MULT_EXPR
)
1781 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1785 /* Handle general case of two integer constants. For sizetype
1786 constant calculations we always want to know about overflow,
1787 even in the unsigned case. */
1788 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1791 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1794 /* Given two values, either both of sizetype or both of bitsizetype,
1795 compute the difference between the two values. Return the value
1796 in signed type corresponding to the type of the operands. */
1799 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1801 tree type
= TREE_TYPE (arg0
);
1804 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1807 /* If the type is already signed, just do the simple thing. */
1808 if (!TYPE_UNSIGNED (type
))
1809 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1811 if (type
== sizetype
)
1813 else if (type
== bitsizetype
)
1814 ctype
= sbitsizetype
;
1816 ctype
= signed_type_for (type
);
1818 /* If either operand is not a constant, do the conversions to the signed
1819 type and subtract. The hardware will do the right thing with any
1820 overflow in the subtraction. */
1821 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1822 return size_binop_loc (loc
, MINUS_EXPR
,
1823 fold_convert_loc (loc
, ctype
, arg0
),
1824 fold_convert_loc (loc
, ctype
, arg1
));
1826 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1827 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1828 overflow) and negate (which can't either). Special-case a result
1829 of zero while we're here. */
1830 if (tree_int_cst_equal (arg0
, arg1
))
1831 return build_int_cst (ctype
, 0);
1832 else if (tree_int_cst_lt (arg1
, arg0
))
1833 return fold_convert_loc (loc
, ctype
,
1834 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1836 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1837 fold_convert_loc (loc
, ctype
,
1838 size_binop_loc (loc
,
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1847 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1849 /* Given an integer constant, make new constant with new type,
1850 appropriately sign-extended or truncated. Use widest_int
1851 so that any extension is done according ARG1's type. */
1852 return force_fit_type (type
, wi::to_widest (arg1
),
1853 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1854 TREE_OVERFLOW (arg1
));
1857 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1858 to an integer type. */
1861 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1863 bool overflow
= false;
1866 /* The following code implements the floating point to integer
1867 conversion rules required by the Java Language Specification,
1868 that IEEE NaNs are mapped to zero and values that overflow
1869 the target precision saturate, i.e. values greater than
1870 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1871 are mapped to INT_MIN. These semantics are allowed by the
1872 C and C++ standards that simply state that the behavior of
1873 FP-to-integer conversion is unspecified upon overflow. */
1877 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1881 case FIX_TRUNC_EXPR
:
1882 real_trunc (&r
, VOIDmode
, &x
);
1889 /* If R is NaN, return zero and show we have an overflow. */
1890 if (REAL_VALUE_ISNAN (r
))
1893 val
= wi::zero (TYPE_PRECISION (type
));
1896 /* See if R is less than the lower bound or greater than the
1901 tree lt
= TYPE_MIN_VALUE (type
);
1902 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1903 if (REAL_VALUES_LESS (r
, l
))
1912 tree ut
= TYPE_MAX_VALUE (type
);
1915 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1916 if (REAL_VALUES_LESS (u
, r
))
1925 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1927 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1931 /* A subroutine of fold_convert_const handling conversions of a
1932 FIXED_CST to an integer type. */
1935 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1938 double_int temp
, temp_trunc
;
1941 /* Right shift FIXED_CST to temp by fbit. */
1942 temp
= TREE_FIXED_CST (arg1
).data
;
1943 mode
= TREE_FIXED_CST (arg1
).mode
;
1944 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1946 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1947 HOST_BITS_PER_DOUBLE_INT
,
1948 SIGNED_FIXED_POINT_MODE_P (mode
));
1950 /* Left shift temp to temp_trunc by fbit. */
1951 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1952 HOST_BITS_PER_DOUBLE_INT
,
1953 SIGNED_FIXED_POINT_MODE_P (mode
));
1957 temp
= double_int_zero
;
1958 temp_trunc
= double_int_zero
;
1961 /* If FIXED_CST is negative, we need to round the value toward 0.
1962 By checking if the fractional bits are not zero to add 1 to temp. */
1963 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1964 && temp_trunc
.is_negative ()
1965 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1966 temp
+= double_int_one
;
1968 /* Given a fixed-point constant, make new constant with new type,
1969 appropriately sign-extended or truncated. */
1970 t
= force_fit_type (type
, temp
, -1,
1971 (temp
.is_negative ()
1972 && (TYPE_UNSIGNED (type
)
1973 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1974 | TREE_OVERFLOW (arg1
));
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1983 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1985 REAL_VALUE_TYPE value
;
1988 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1989 t
= build_real (type
, value
);
1991 /* If converting an infinity or NAN to a representation that doesn't
1992 have one, set the overflow bit so that we can produce some kind of
1993 error message at the appropriate point if necessary. It's not the
1994 most user-friendly message, but it's better than nothing. */
1995 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1996 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1997 TREE_OVERFLOW (t
) = 1;
1998 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1999 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2000 TREE_OVERFLOW (t
) = 1;
2001 /* Regular overflow, conversion produced an infinity in a mode that
2002 can't represent them. */
2003 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2004 && REAL_VALUE_ISINF (value
)
2005 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2006 TREE_OVERFLOW (t
) = 1;
2008 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2012 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2013 to a floating point type. */
2016 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2018 REAL_VALUE_TYPE value
;
2021 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2022 t
= build_real (type
, value
);
2024 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2028 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2029 to another fixed-point type. */
2032 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2034 FIXED_VALUE_TYPE value
;
2038 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2039 TYPE_SATURATING (type
));
2040 t
= build_fixed (type
, value
);
2042 /* Propagate overflow flags. */
2043 if (overflow_p
| TREE_OVERFLOW (arg1
))
2044 TREE_OVERFLOW (t
) = 1;
2048 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2049 to a fixed-point type. */
2052 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2054 FIXED_VALUE_TYPE value
;
2059 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2061 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2062 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2063 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2065 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2067 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2068 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2069 TYPE_SATURATING (type
));
2070 t
= build_fixed (type
, value
);
2072 /* Propagate overflow flags. */
2073 if (overflow_p
| TREE_OVERFLOW (arg1
))
2074 TREE_OVERFLOW (t
) = 1;
2078 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2079 to a fixed-point type. */
2082 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2084 FIXED_VALUE_TYPE value
;
2088 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2089 &TREE_REAL_CST (arg1
),
2090 TYPE_SATURATING (type
));
2091 t
= build_fixed (type
, value
);
2093 /* Propagate overflow flags. */
2094 if (overflow_p
| TREE_OVERFLOW (arg1
))
2095 TREE_OVERFLOW (t
) = 1;
2099 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2100 type TYPE. If no simplification can be done return NULL_TREE. */
2103 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2105 if (TREE_TYPE (arg1
) == type
)
2108 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2109 || TREE_CODE (type
) == OFFSET_TYPE
)
2111 if (TREE_CODE (arg1
) == INTEGER_CST
)
2112 return fold_convert_const_int_from_int (type
, arg1
);
2113 else if (TREE_CODE (arg1
) == REAL_CST
)
2114 return fold_convert_const_int_from_real (code
, type
, arg1
);
2115 else if (TREE_CODE (arg1
) == FIXED_CST
)
2116 return fold_convert_const_int_from_fixed (type
, arg1
);
2118 else if (TREE_CODE (type
) == REAL_TYPE
)
2120 if (TREE_CODE (arg1
) == INTEGER_CST
)
2121 return build_real_from_int_cst (type
, arg1
);
2122 else if (TREE_CODE (arg1
) == REAL_CST
)
2123 return fold_convert_const_real_from_real (type
, arg1
);
2124 else if (TREE_CODE (arg1
) == FIXED_CST
)
2125 return fold_convert_const_real_from_fixed (type
, arg1
);
2127 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2129 if (TREE_CODE (arg1
) == FIXED_CST
)
2130 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2131 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2132 return fold_convert_const_fixed_from_int (type
, arg1
);
2133 else if (TREE_CODE (arg1
) == REAL_CST
)
2134 return fold_convert_const_fixed_from_real (type
, arg1
);
2139 /* Construct a vector of zero elements of vector type TYPE. */
2142 build_zero_vector (tree type
)
2146 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2147 return build_vector_from_val (type
, t
);
2150 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2153 fold_convertible_p (const_tree type
, const_tree arg
)
2155 tree orig
= TREE_TYPE (arg
);
2160 if (TREE_CODE (arg
) == ERROR_MARK
2161 || TREE_CODE (type
) == ERROR_MARK
2162 || TREE_CODE (orig
) == ERROR_MARK
)
2165 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2168 switch (TREE_CODE (type
))
2170 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2171 case POINTER_TYPE
: case REFERENCE_TYPE
:
2173 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2174 || TREE_CODE (orig
) == OFFSET_TYPE
)
2176 return (TREE_CODE (orig
) == VECTOR_TYPE
2177 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2180 case FIXED_POINT_TYPE
:
2184 return TREE_CODE (type
) == TREE_CODE (orig
);
2191 /* Convert expression ARG to type TYPE. Used by the middle-end for
2192 simple conversions in preference to calling the front-end's convert. */
2195 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2197 tree orig
= TREE_TYPE (arg
);
2203 if (TREE_CODE (arg
) == ERROR_MARK
2204 || TREE_CODE (type
) == ERROR_MARK
2205 || TREE_CODE (orig
) == ERROR_MARK
)
2206 return error_mark_node
;
2208 switch (TREE_CODE (type
))
2211 case REFERENCE_TYPE
:
2212 /* Handle conversions between pointers to different address spaces. */
2213 if (POINTER_TYPE_P (orig
)
2214 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2215 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2216 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2219 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2221 if (TREE_CODE (arg
) == INTEGER_CST
)
2223 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2224 if (tem
!= NULL_TREE
)
2227 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2228 || TREE_CODE (orig
) == OFFSET_TYPE
)
2229 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2230 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2231 return fold_convert_loc (loc
, type
,
2232 fold_build1_loc (loc
, REALPART_EXPR
,
2233 TREE_TYPE (orig
), arg
));
2234 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2235 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2236 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2239 if (TREE_CODE (arg
) == INTEGER_CST
)
2241 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2242 if (tem
!= NULL_TREE
)
2245 else if (TREE_CODE (arg
) == REAL_CST
)
2247 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2248 if (tem
!= NULL_TREE
)
2251 else if (TREE_CODE (arg
) == FIXED_CST
)
2253 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2254 if (tem
!= NULL_TREE
)
2258 switch (TREE_CODE (orig
))
2261 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2262 case POINTER_TYPE
: case REFERENCE_TYPE
:
2263 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2266 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2268 case FIXED_POINT_TYPE
:
2269 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2272 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2273 return fold_convert_loc (loc
, type
, tem
);
2279 case FIXED_POINT_TYPE
:
2280 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2281 || TREE_CODE (arg
) == REAL_CST
)
2283 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2284 if (tem
!= NULL_TREE
)
2285 goto fold_convert_exit
;
2288 switch (TREE_CODE (orig
))
2290 case FIXED_POINT_TYPE
:
2295 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2298 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2299 return fold_convert_loc (loc
, type
, tem
);
2306 switch (TREE_CODE (orig
))
2309 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2310 case POINTER_TYPE
: case REFERENCE_TYPE
:
2312 case FIXED_POINT_TYPE
:
2313 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2314 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2315 fold_convert_loc (loc
, TREE_TYPE (type
),
2316 integer_zero_node
));
2321 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2323 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2324 TREE_OPERAND (arg
, 0));
2325 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2326 TREE_OPERAND (arg
, 1));
2327 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2330 arg
= save_expr (arg
);
2331 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2332 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2333 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2334 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2335 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2343 if (integer_zerop (arg
))
2344 return build_zero_vector (type
);
2345 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2346 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2347 || TREE_CODE (orig
) == VECTOR_TYPE
);
2348 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2351 tem
= fold_ignored_result (arg
);
2352 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2355 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2356 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2360 protected_set_expr_location_unshare (tem
, loc
);
2364 /* Return false if expr can be assumed not to be an lvalue, true
2368 maybe_lvalue_p (const_tree x
)
2370 /* We only need to wrap lvalue tree codes. */
2371 switch (TREE_CODE (x
))
2384 case ARRAY_RANGE_REF
:
2390 case PREINCREMENT_EXPR
:
2391 case PREDECREMENT_EXPR
:
2393 case TRY_CATCH_EXPR
:
2394 case WITH_CLEANUP_EXPR
:
2403 /* Assume the worst for front-end tree codes. */
2404 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2412 /* Return an expr equal to X but certainly not valid as an lvalue. */
2415 non_lvalue_loc (location_t loc
, tree x
)
2417 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2422 if (! maybe_lvalue_p (x
))
2424 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2427 /* When pedantic, return an expr equal to X but certainly not valid as a
2428 pedantic lvalue. Otherwise, return X. */
2431 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2433 return protected_set_expr_location_unshare (x
, loc
);
2436 /* Given a tree comparison code, return the code that is the logical inverse.
2437 It is generally not safe to do this for floating-point comparisons, except
2438 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2439 ERROR_MARK in this case. */
2442 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2444 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2445 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2455 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2457 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2459 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2461 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2475 return UNORDERED_EXPR
;
2476 case UNORDERED_EXPR
:
2477 return ORDERED_EXPR
;
2483 /* Similar, but return the comparison that results if the operands are
2484 swapped. This is safe for floating-point. */
2487 swap_tree_comparison (enum tree_code code
)
2494 case UNORDERED_EXPR
:
2520 /* Convert a comparison tree code from an enum tree_code representation
2521 into a compcode bit-based encoding. This function is the inverse of
2522 compcode_to_comparison. */
2524 static enum comparison_code
2525 comparison_to_compcode (enum tree_code code
)
2542 return COMPCODE_ORD
;
2543 case UNORDERED_EXPR
:
2544 return COMPCODE_UNORD
;
2546 return COMPCODE_UNLT
;
2548 return COMPCODE_UNEQ
;
2550 return COMPCODE_UNLE
;
2552 return COMPCODE_UNGT
;
2554 return COMPCODE_LTGT
;
2556 return COMPCODE_UNGE
;
2562 /* Convert a compcode bit-based encoding of a comparison operator back
2563 to GCC's enum tree_code representation. This function is the
2564 inverse of comparison_to_compcode. */
2566 static enum tree_code
2567 compcode_to_comparison (enum comparison_code code
)
2584 return ORDERED_EXPR
;
2585 case COMPCODE_UNORD
:
2586 return UNORDERED_EXPR
;
2604 /* Return a tree for the comparison which is the combination of
2605 doing the AND or OR (depending on CODE) of the two operations LCODE
2606 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2607 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2608 if this makes the transformation invalid. */
2611 combine_comparisons (location_t loc
,
2612 enum tree_code code
, enum tree_code lcode
,
2613 enum tree_code rcode
, tree truth_type
,
2614 tree ll_arg
, tree lr_arg
)
2616 bool honor_nans
= HONOR_NANS (ll_arg
);
2617 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2618 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2623 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2624 compcode
= lcompcode
& rcompcode
;
2627 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2628 compcode
= lcompcode
| rcompcode
;
2637 /* Eliminate unordered comparisons, as well as LTGT and ORD
2638 which are not used unless the mode has NaNs. */
2639 compcode
&= ~COMPCODE_UNORD
;
2640 if (compcode
== COMPCODE_LTGT
)
2641 compcode
= COMPCODE_NE
;
2642 else if (compcode
== COMPCODE_ORD
)
2643 compcode
= COMPCODE_TRUE
;
2645 else if (flag_trapping_math
)
2647 /* Check that the original operation and the optimized ones will trap
2648 under the same condition. */
2649 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2650 && (lcompcode
!= COMPCODE_EQ
)
2651 && (lcompcode
!= COMPCODE_ORD
);
2652 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2653 && (rcompcode
!= COMPCODE_EQ
)
2654 && (rcompcode
!= COMPCODE_ORD
);
2655 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2656 && (compcode
!= COMPCODE_EQ
)
2657 && (compcode
!= COMPCODE_ORD
);
2659 /* In a short-circuited boolean expression the LHS might be
2660 such that the RHS, if evaluated, will never trap. For
2661 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2662 if neither x nor y is NaN. (This is a mixed blessing: for
2663 example, the expression above will never trap, hence
2664 optimizing it to x < y would be invalid). */
2665 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2666 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2669 /* If the comparison was short-circuited, and only the RHS
2670 trapped, we may now generate a spurious trap. */
2672 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2675 /* If we changed the conditions that cause a trap, we lose. */
2676 if ((ltrap
|| rtrap
) != trap
)
2680 if (compcode
== COMPCODE_TRUE
)
2681 return constant_boolean_node (true, truth_type
);
2682 else if (compcode
== COMPCODE_FALSE
)
2683 return constant_boolean_node (false, truth_type
);
2686 enum tree_code tcode
;
2688 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2689 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2693 /* Return nonzero if two operands (typically of the same tree node)
2694 are necessarily equal. If either argument has side-effects this
2695 function returns zero. FLAGS modifies behavior as follows:
2697 If OEP_ONLY_CONST is set, only return nonzero for constants.
2698 This function tests whether the operands are indistinguishable;
2699 it does not test whether they are equal using C's == operation.
2700 The distinction is important for IEEE floating point, because
2701 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2702 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2704 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2705 even though it may hold multiple values during a function.
2706 This is because a GCC tree node guarantees that nothing else is
2707 executed between the evaluation of its "operands" (which may often
2708 be evaluated in arbitrary order). Hence if the operands themselves
2709 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2710 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2711 unset means assuming isochronic (or instantaneous) tree equivalence.
2712 Unless comparing arbitrary expression trees, such as from different
2713 statements, this flag can usually be left unset.
2715 If OEP_PURE_SAME is set, then pure functions with identical arguments
2716 are considered the same. It is used when the caller has other ways
2717 to ensure that global memory is unchanged in between. */
2720 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2722 /* If either is ERROR_MARK, they aren't equal. */
2723 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2724 || TREE_TYPE (arg0
) == error_mark_node
2725 || TREE_TYPE (arg1
) == error_mark_node
)
2728 /* Similar, if either does not have a type (like a released SSA name),
2729 they aren't equal. */
2730 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2733 /* Check equality of integer constants before bailing out due to
2734 precision differences. */
2735 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2736 return tree_int_cst_equal (arg0
, arg1
);
2738 /* If both types don't have the same signedness, then we can't consider
2739 them equal. We must check this before the STRIP_NOPS calls
2740 because they may change the signedness of the arguments. As pointers
2741 strictly don't have a signedness, require either two pointers or
2742 two non-pointers as well. */
2743 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2744 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2747 /* We cannot consider pointers to different address space equal. */
2748 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2749 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2750 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2753 /* If both types don't have the same precision, then it is not safe
2755 if (element_precision (TREE_TYPE (arg0
))
2756 != element_precision (TREE_TYPE (arg1
)))
2762 /* In case both args are comparisons but with different comparison
2763 code, try to swap the comparison operands of one arg to produce
2764 a match and compare that variant. */
2765 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2766 && COMPARISON_CLASS_P (arg0
)
2767 && COMPARISON_CLASS_P (arg1
))
2769 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2771 if (TREE_CODE (arg0
) == swap_code
)
2772 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2773 TREE_OPERAND (arg1
, 1), flags
)
2774 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2775 TREE_OPERAND (arg1
, 0), flags
);
2778 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2779 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2780 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2783 /* This is needed for conversions and for COMPONENT_REF.
2784 Might as well play it safe and always test this. */
2785 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2786 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2787 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2790 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2791 We don't care about side effects in that case because the SAVE_EXPR
2792 takes care of that for us. In all other cases, two expressions are
2793 equal if they have no side effects. If we have two identical
2794 expressions with side effects that should be treated the same due
2795 to the only side effects being identical SAVE_EXPR's, that will
2796 be detected in the recursive calls below.
2797 If we are taking an invariant address of two identical objects
2798 they are necessarily equal as well. */
2799 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2800 && (TREE_CODE (arg0
) == SAVE_EXPR
2801 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2802 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2805 /* Next handle constant cases, those for which we can return 1 even
2806 if ONLY_CONST is set. */
2807 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2808 switch (TREE_CODE (arg0
))
2811 return tree_int_cst_equal (arg0
, arg1
);
2814 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2815 TREE_FIXED_CST (arg1
));
2818 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2819 TREE_REAL_CST (arg1
)))
2823 if (!HONOR_SIGNED_ZEROS (arg0
))
2825 /* If we do not distinguish between signed and unsigned zero,
2826 consider them equal. */
2827 if (real_zerop (arg0
) && real_zerop (arg1
))
2836 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2839 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2841 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2842 VECTOR_CST_ELT (arg1
, i
), flags
))
2849 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2851 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2855 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2856 && ! memcmp (TREE_STRING_POINTER (arg0
),
2857 TREE_STRING_POINTER (arg1
),
2858 TREE_STRING_LENGTH (arg0
)));
2861 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2862 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2863 ? OEP_CONSTANT_ADDRESS_OF
| OEP_ADDRESS_OF
: 0);
2868 if (flags
& OEP_ONLY_CONST
)
2871 /* Define macros to test an operand from arg0 and arg1 for equality and a
2872 variant that allows null and views null as being different from any
2873 non-null value. In the latter case, if either is null, the both
2874 must be; otherwise, do the normal comparison. */
2875 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2876 TREE_OPERAND (arg1, N), flags)
2878 #define OP_SAME_WITH_NULL(N) \
2879 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2880 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2882 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2885 /* Two conversions are equal only if signedness and modes match. */
2886 switch (TREE_CODE (arg0
))
2889 case FIX_TRUNC_EXPR
:
2890 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2891 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2901 case tcc_comparison
:
2903 if (OP_SAME (0) && OP_SAME (1))
2906 /* For commutative ops, allow the other order. */
2907 return (commutative_tree_code (TREE_CODE (arg0
))
2908 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2909 TREE_OPERAND (arg1
, 1), flags
)
2910 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2911 TREE_OPERAND (arg1
, 0), flags
));
2914 /* If either of the pointer (or reference) expressions we are
2915 dereferencing contain a side effect, these cannot be equal,
2916 but their addresses can be. */
2917 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2918 && (TREE_SIDE_EFFECTS (arg0
)
2919 || TREE_SIDE_EFFECTS (arg1
)))
2922 switch (TREE_CODE (arg0
))
2925 if (!(flags
& OEP_ADDRESS_OF
)
2926 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2927 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2929 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2936 case TARGET_MEM_REF
:
2937 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2938 /* Require equal extra operands and then fall through to MEM_REF
2939 handling of the two common operands. */
2940 if (!OP_SAME_WITH_NULL (2)
2941 || !OP_SAME_WITH_NULL (3)
2942 || !OP_SAME_WITH_NULL (4))
2946 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2947 /* Require equal access sizes, and similar pointer types.
2948 We can have incomplete types for array references of
2949 variable-sized arrays from the Fortran frontend
2950 though. Also verify the types are compatible. */
2951 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2952 || (TYPE_SIZE (TREE_TYPE (arg0
))
2953 && TYPE_SIZE (TREE_TYPE (arg1
))
2954 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2955 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2956 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2957 && ((flags
& OEP_ADDRESS_OF
)
2958 || (alias_ptr_types_compatible_p
2959 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2960 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2961 && (MR_DEPENDENCE_CLIQUE (arg0
)
2962 == MR_DEPENDENCE_CLIQUE (arg1
))
2963 && (MR_DEPENDENCE_BASE (arg0
)
2964 == MR_DEPENDENCE_BASE (arg1
))
2965 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2966 == TYPE_ALIGN (TREE_TYPE (arg1
)))))
2967 && OP_SAME (0) && OP_SAME (1));
2970 case ARRAY_RANGE_REF
:
2971 /* Operands 2 and 3 may be null.
2972 Compare the array index by value if it is constant first as we
2973 may have different types but same value here. */
2976 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2977 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2978 TREE_OPERAND (arg1
, 1))
2980 && OP_SAME_WITH_NULL (2)
2981 && OP_SAME_WITH_NULL (3));
2984 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2985 may be NULL when we're called to compare MEM_EXPRs. */
2986 if (!OP_SAME_WITH_NULL (0)
2989 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2990 return OP_SAME_WITH_NULL (2);
2995 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2996 return OP_SAME (1) && OP_SAME (2);
3002 case tcc_expression
:
3003 switch (TREE_CODE (arg0
))
3006 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3007 TREE_OPERAND (arg1
, 0),
3008 flags
| OEP_ADDRESS_OF
);
3010 case TRUTH_NOT_EXPR
:
3013 case TRUTH_ANDIF_EXPR
:
3014 case TRUTH_ORIF_EXPR
:
3015 return OP_SAME (0) && OP_SAME (1);
3018 case WIDEN_MULT_PLUS_EXPR
:
3019 case WIDEN_MULT_MINUS_EXPR
:
3022 /* The multiplcation operands are commutative. */
3025 case TRUTH_AND_EXPR
:
3027 case TRUTH_XOR_EXPR
:
3028 if (OP_SAME (0) && OP_SAME (1))
3031 /* Otherwise take into account this is a commutative operation. */
3032 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3033 TREE_OPERAND (arg1
, 1), flags
)
3034 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3035 TREE_OPERAND (arg1
, 0), flags
));
3040 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3047 switch (TREE_CODE (arg0
))
3050 /* If the CALL_EXPRs call different functions, then they
3051 clearly can not be equal. */
3052 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3057 unsigned int cef
= call_expr_flags (arg0
);
3058 if (flags
& OEP_PURE_SAME
)
3059 cef
&= ECF_CONST
| ECF_PURE
;
3066 /* Now see if all the arguments are the same. */
3068 const_call_expr_arg_iterator iter0
, iter1
;
3070 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3071 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3073 a0
= next_const_call_expr_arg (&iter0
),
3074 a1
= next_const_call_expr_arg (&iter1
))
3075 if (! operand_equal_p (a0
, a1
, flags
))
3078 /* If we get here and both argument lists are exhausted
3079 then the CALL_EXPRs are equal. */
3080 return ! (a0
|| a1
);
3086 case tcc_declaration
:
3087 /* Consider __builtin_sqrt equal to sqrt. */
3088 return (TREE_CODE (arg0
) == FUNCTION_DECL
3089 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3090 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3091 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3098 #undef OP_SAME_WITH_NULL
3101 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3102 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3104 When in doubt, return 0. */
3107 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3109 int unsignedp1
, unsignedpo
;
3110 tree primarg0
, primarg1
, primother
;
3111 unsigned int correct_width
;
3113 if (operand_equal_p (arg0
, arg1
, 0))
3116 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3117 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3120 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3121 and see if the inner values are the same. This removes any
3122 signedness comparison, which doesn't matter here. */
3123 primarg0
= arg0
, primarg1
= arg1
;
3124 STRIP_NOPS (primarg0
);
3125 STRIP_NOPS (primarg1
);
3126 if (operand_equal_p (primarg0
, primarg1
, 0))
3129 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3130 actual comparison operand, ARG0.
3132 First throw away any conversions to wider types
3133 already present in the operands. */
3135 primarg1
= get_narrower (arg1
, &unsignedp1
);
3136 primother
= get_narrower (other
, &unsignedpo
);
3138 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3139 if (unsignedp1
== unsignedpo
3140 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3141 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3143 tree type
= TREE_TYPE (arg0
);
3145 /* Make sure shorter operand is extended the right way
3146 to match the longer operand. */
3147 primarg1
= fold_convert (signed_or_unsigned_type_for
3148 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3150 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3157 /* See if ARG is an expression that is either a comparison or is performing
3158 arithmetic on comparisons. The comparisons must only be comparing
3159 two different values, which will be stored in *CVAL1 and *CVAL2; if
3160 they are nonzero it means that some operands have already been found.
3161 No variables may be used anywhere else in the expression except in the
3162 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3163 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3165 If this is true, return 1. Otherwise, return zero. */
3168 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3170 enum tree_code code
= TREE_CODE (arg
);
3171 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3173 /* We can handle some of the tcc_expression cases here. */
3174 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3176 else if (tclass
== tcc_expression
3177 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3178 || code
== COMPOUND_EXPR
))
3179 tclass
= tcc_binary
;
3181 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3182 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3184 /* If we've already found a CVAL1 or CVAL2, this expression is
3185 two complex to handle. */
3186 if (*cval1
|| *cval2
)
3196 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3199 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3200 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3201 cval1
, cval2
, save_p
));
3206 case tcc_expression
:
3207 if (code
== COND_EXPR
)
3208 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3209 cval1
, cval2
, save_p
)
3210 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3211 cval1
, cval2
, save_p
)
3212 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3213 cval1
, cval2
, save_p
));
3216 case tcc_comparison
:
3217 /* First see if we can handle the first operand, then the second. For
3218 the second operand, we know *CVAL1 can't be zero. It must be that
3219 one side of the comparison is each of the values; test for the
3220 case where this isn't true by failing if the two operands
3223 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3224 TREE_OPERAND (arg
, 1), 0))
3228 *cval1
= TREE_OPERAND (arg
, 0);
3229 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3231 else if (*cval2
== 0)
3232 *cval2
= TREE_OPERAND (arg
, 0);
3233 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3238 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3240 else if (*cval2
== 0)
3241 *cval2
= TREE_OPERAND (arg
, 1);
3242 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3254 /* ARG is a tree that is known to contain just arithmetic operations and
3255 comparisons. Evaluate the operations in the tree substituting NEW0 for
3256 any occurrence of OLD0 as an operand of a comparison and likewise for
3260 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3261 tree old1
, tree new1
)
3263 tree type
= TREE_TYPE (arg
);
3264 enum tree_code code
= TREE_CODE (arg
);
3265 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3267 /* We can handle some of the tcc_expression cases here. */
3268 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3270 else if (tclass
== tcc_expression
3271 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3272 tclass
= tcc_binary
;
3277 return fold_build1_loc (loc
, code
, type
,
3278 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3279 old0
, new0
, old1
, new1
));
3282 return fold_build2_loc (loc
, code
, type
,
3283 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3284 old0
, new0
, old1
, new1
),
3285 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3286 old0
, new0
, old1
, new1
));
3288 case tcc_expression
:
3292 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3296 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3300 return fold_build3_loc (loc
, code
, type
,
3301 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3302 old0
, new0
, old1
, new1
),
3303 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3304 old0
, new0
, old1
, new1
),
3305 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3306 old0
, new0
, old1
, new1
));
3310 /* Fall through - ??? */
3312 case tcc_comparison
:
3314 tree arg0
= TREE_OPERAND (arg
, 0);
3315 tree arg1
= TREE_OPERAND (arg
, 1);
3317 /* We need to check both for exact equality and tree equality. The
3318 former will be true if the operand has a side-effect. In that
3319 case, we know the operand occurred exactly once. */
3321 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3323 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3326 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3328 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3331 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3339 /* Return a tree for the case when the result of an expression is RESULT
3340 converted to TYPE and OMITTED was previously an operand of the expression
3341 but is now not needed (e.g., we folded OMITTED * 0).
3343 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3344 the conversion of RESULT to TYPE. */
3347 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3349 tree t
= fold_convert_loc (loc
, type
, result
);
3351 /* If the resulting operand is an empty statement, just return the omitted
3352 statement casted to void. */
3353 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3354 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3355 fold_ignored_result (omitted
));
3357 if (TREE_SIDE_EFFECTS (omitted
))
3358 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3359 fold_ignored_result (omitted
), t
);
3361 return non_lvalue_loc (loc
, t
);
3364 /* Return a tree for the case when the result of an expression is RESULT
3365 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3366 of the expression but are now not needed.
3368 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3369 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3370 evaluated before OMITTED2. Otherwise, if neither has side effects,
3371 just do the conversion of RESULT to TYPE. */
3374 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3375 tree omitted1
, tree omitted2
)
3377 tree t
= fold_convert_loc (loc
, type
, result
);
3379 if (TREE_SIDE_EFFECTS (omitted2
))
3380 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3381 if (TREE_SIDE_EFFECTS (omitted1
))
3382 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3384 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3388 /* Return a simplified tree node for the truth-negation of ARG. This
3389 never alters ARG itself. We assume that ARG is an operation that
3390 returns a truth value (0 or 1).
3392 FIXME: one would think we would fold the result, but it causes
3393 problems with the dominator optimizer. */
3396 fold_truth_not_expr (location_t loc
, tree arg
)
3398 tree type
= TREE_TYPE (arg
);
3399 enum tree_code code
= TREE_CODE (arg
);
3400 location_t loc1
, loc2
;
3402 /* If this is a comparison, we can simply invert it, except for
3403 floating-point non-equality comparisons, in which case we just
3404 enclose a TRUTH_NOT_EXPR around what we have. */
3406 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3408 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3409 if (FLOAT_TYPE_P (op_type
)
3410 && flag_trapping_math
3411 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3412 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3415 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3416 if (code
== ERROR_MARK
)
3419 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3420 TREE_OPERAND (arg
, 1));
3426 return constant_boolean_node (integer_zerop (arg
), type
);
3428 case TRUTH_AND_EXPR
:
3429 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3430 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3431 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3432 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3433 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3436 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3437 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3438 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3439 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3440 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3442 case TRUTH_XOR_EXPR
:
3443 /* Here we can invert either operand. We invert the first operand
3444 unless the second operand is a TRUTH_NOT_EXPR in which case our
3445 result is the XOR of the first operand with the inside of the
3446 negation of the second operand. */
3448 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3449 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3450 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3452 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3453 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3454 TREE_OPERAND (arg
, 1));
3456 case TRUTH_ANDIF_EXPR
:
3457 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3458 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3459 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3460 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3461 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3463 case TRUTH_ORIF_EXPR
:
3464 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3465 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3466 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3467 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3468 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3470 case TRUTH_NOT_EXPR
:
3471 return TREE_OPERAND (arg
, 0);
3475 tree arg1
= TREE_OPERAND (arg
, 1);
3476 tree arg2
= TREE_OPERAND (arg
, 2);
3478 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3479 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3481 /* A COND_EXPR may have a throw as one operand, which
3482 then has void type. Just leave void operands
3484 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3485 VOID_TYPE_P (TREE_TYPE (arg1
))
3486 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3487 VOID_TYPE_P (TREE_TYPE (arg2
))
3488 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3492 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3493 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3494 TREE_OPERAND (arg
, 0),
3495 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3497 case NON_LVALUE_EXPR
:
3498 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3499 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3502 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3503 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3505 /* ... fall through ... */
3508 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3509 return build1_loc (loc
, TREE_CODE (arg
), type
,
3510 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3513 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3515 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3518 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3520 case CLEANUP_POINT_EXPR
:
3521 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3522 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3523 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3530 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3531 assume that ARG is an operation that returns a truth value (0 or 1
3532 for scalars, 0 or -1 for vectors). Return the folded expression if
3533 folding is successful. Otherwise, return NULL_TREE. */
3536 fold_invert_truthvalue (location_t loc
, tree arg
)
3538 tree type
= TREE_TYPE (arg
);
3539 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3545 /* Return a simplified tree node for the truth-negation of ARG. This
3546 never alters ARG itself. We assume that ARG is an operation that
3547 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3550 invert_truthvalue_loc (location_t loc
, tree arg
)
3552 if (TREE_CODE (arg
) == ERROR_MARK
)
3555 tree type
= TREE_TYPE (arg
);
3556 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3562 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3563 operands are another bit-wise operation with a common input. If so,
3564 distribute the bit operations to save an operation and possibly two if
3565 constants are involved. For example, convert
3566 (A | B) & (A | C) into A | (B & C)
3567 Further simplification will occur if B and C are constants.
3569 If this optimization cannot be done, 0 will be returned. */
3572 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3573 tree arg0
, tree arg1
)
3578 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3579 || TREE_CODE (arg0
) == code
3580 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3581 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3584 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3586 common
= TREE_OPERAND (arg0
, 0);
3587 left
= TREE_OPERAND (arg0
, 1);
3588 right
= TREE_OPERAND (arg1
, 1);
3590 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3592 common
= TREE_OPERAND (arg0
, 0);
3593 left
= TREE_OPERAND (arg0
, 1);
3594 right
= TREE_OPERAND (arg1
, 0);
3596 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3598 common
= TREE_OPERAND (arg0
, 1);
3599 left
= TREE_OPERAND (arg0
, 0);
3600 right
= TREE_OPERAND (arg1
, 1);
3602 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3604 common
= TREE_OPERAND (arg0
, 1);
3605 left
= TREE_OPERAND (arg0
, 0);
3606 right
= TREE_OPERAND (arg1
, 0);
3611 common
= fold_convert_loc (loc
, type
, common
);
3612 left
= fold_convert_loc (loc
, type
, left
);
3613 right
= fold_convert_loc (loc
, type
, right
);
3614 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3615 fold_build2_loc (loc
, code
, type
, left
, right
));
3618 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3619 with code CODE. This optimization is unsafe. */
3621 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3622 tree arg0
, tree arg1
)
3624 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3625 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3627 /* (A / C) +- (B / C) -> (A +- B) / C. */
3629 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3630 TREE_OPERAND (arg1
, 1), 0))
3631 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3632 fold_build2_loc (loc
, code
, type
,
3633 TREE_OPERAND (arg0
, 0),
3634 TREE_OPERAND (arg1
, 0)),
3635 TREE_OPERAND (arg0
, 1));
3637 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3638 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3639 TREE_OPERAND (arg1
, 0), 0)
3640 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3641 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3643 REAL_VALUE_TYPE r0
, r1
;
3644 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3645 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3647 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3649 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3650 real_arithmetic (&r0
, code
, &r0
, &r1
);
3651 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3652 TREE_OPERAND (arg0
, 0),
3653 build_real (type
, r0
));
3659 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3660 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3663 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3664 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3666 tree result
, bftype
;
3670 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3671 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3672 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3673 && tree_fits_shwi_p (size
)
3674 && tree_to_shwi (size
) == bitsize
)
3675 return fold_convert_loc (loc
, type
, inner
);
3679 if (TYPE_PRECISION (bftype
) != bitsize
3680 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3681 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3683 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3684 size_int (bitsize
), bitsize_int (bitpos
));
3687 result
= fold_convert_loc (loc
, type
, result
);
3692 /* Optimize a bit-field compare.
3694 There are two cases: First is a compare against a constant and the
3695 second is a comparison of two items where the fields are at the same
3696 bit position relative to the start of a chunk (byte, halfword, word)
3697 large enough to contain it. In these cases we can avoid the shift
3698 implicit in bitfield extractions.
3700 For constants, we emit a compare of the shifted constant with the
3701 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3702 compared. For two fields at the same position, we do the ANDs with the
3703 similar mask and compare the result of the ANDs.
3705 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3706 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3707 are the left and right operands of the comparison, respectively.
3709 If the optimization described above can be done, we return the resulting
3710 tree. Otherwise we return zero. */
3713 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3714 tree compare_type
, tree lhs
, tree rhs
)
3716 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3717 tree type
= TREE_TYPE (lhs
);
3719 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3720 machine_mode lmode
, rmode
, nmode
;
3721 int lunsignedp
, runsignedp
;
3722 int lvolatilep
= 0, rvolatilep
= 0;
3723 tree linner
, rinner
= NULL_TREE
;
3727 /* Get all the information about the extractions being done. If the bit size
3728 if the same as the size of the underlying object, we aren't doing an
3729 extraction at all and so can do nothing. We also don't want to
3730 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3731 then will no longer be able to replace it. */
3732 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3733 &lunsignedp
, &lvolatilep
, false);
3734 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3735 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3740 /* If this is not a constant, we can only do something if bit positions,
3741 sizes, and signedness are the same. */
3742 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3743 &runsignedp
, &rvolatilep
, false);
3745 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3746 || lunsignedp
!= runsignedp
|| offset
!= 0
3747 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3751 /* See if we can find a mode to refer to this field. We should be able to,
3752 but fail if we can't. */
3753 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3754 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3755 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3756 TYPE_ALIGN (TREE_TYPE (rinner
))),
3758 if (nmode
== VOIDmode
)
3761 /* Set signed and unsigned types of the precision of this mode for the
3763 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3765 /* Compute the bit position and size for the new reference and our offset
3766 within it. If the new reference is the same size as the original, we
3767 won't optimize anything, so return zero. */
3768 nbitsize
= GET_MODE_BITSIZE (nmode
);
3769 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3771 if (nbitsize
== lbitsize
)
3774 if (BYTES_BIG_ENDIAN
)
3775 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3777 /* Make the mask to be used against the extracted field. */
3778 mask
= build_int_cst_type (unsigned_type
, -1);
3779 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3780 mask
= const_binop (RSHIFT_EXPR
, mask
,
3781 size_int (nbitsize
- lbitsize
- lbitpos
));
3784 /* If not comparing with constant, just rework the comparison
3786 return fold_build2_loc (loc
, code
, compare_type
,
3787 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3788 make_bit_field_ref (loc
, linner
,
3793 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3794 make_bit_field_ref (loc
, rinner
,
3800 /* Otherwise, we are handling the constant case. See if the constant is too
3801 big for the field. Warn and return a tree of for 0 (false) if so. We do
3802 this not only for its own sake, but to avoid having to test for this
3803 error case below. If we didn't, we might generate wrong code.
3805 For unsigned fields, the constant shifted right by the field length should
3806 be all zero. For signed fields, the high-order bits should agree with
3811 if (wi::lrshift (rhs
, lbitsize
) != 0)
3813 warning (0, "comparison is always %d due to width of bit-field",
3815 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3820 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3821 if (tem
!= 0 && tem
!= -1)
3823 warning (0, "comparison is always %d due to width of bit-field",
3825 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3829 /* Single-bit compares should always be against zero. */
3830 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3832 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3833 rhs
= build_int_cst (type
, 0);
3836 /* Make a new bitfield reference, shift the constant over the
3837 appropriate number of bits and mask it with the computed mask
3838 (in case this was a signed field). If we changed it, make a new one. */
3839 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3841 rhs
= const_binop (BIT_AND_EXPR
,
3842 const_binop (LSHIFT_EXPR
,
3843 fold_convert_loc (loc
, unsigned_type
, rhs
),
3844 size_int (lbitpos
)),
3847 lhs
= build2_loc (loc
, code
, compare_type
,
3848 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3852 /* Subroutine for fold_truth_andor_1: decode a field reference.
3854 If EXP is a comparison reference, we return the innermost reference.
3856 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3857 set to the starting bit number.
3859 If the innermost field can be completely contained in a mode-sized
3860 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3862 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3863 otherwise it is not changed.
3865 *PUNSIGNEDP is set to the signedness of the field.
3867 *PMASK is set to the mask used. This is either contained in a
3868 BIT_AND_EXPR or derived from the width of the field.
3870 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3872 Return 0 if this is not a component reference or is one that we can't
3873 do anything with. */
3876 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3877 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3878 int *punsignedp
, int *pvolatilep
,
3879 tree
*pmask
, tree
*pand_mask
)
3881 tree outer_type
= 0;
3883 tree mask
, inner
, offset
;
3885 unsigned int precision
;
3887 /* All the optimizations using this function assume integer fields.
3888 There are problems with FP fields since the type_for_size call
3889 below can fail for, e.g., XFmode. */
3890 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3893 /* We are interested in the bare arrangement of bits, so strip everything
3894 that doesn't affect the machine mode. However, record the type of the
3895 outermost expression if it may matter below. */
3896 if (CONVERT_EXPR_P (exp
)
3897 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3898 outer_type
= TREE_TYPE (exp
);
3901 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3903 and_mask
= TREE_OPERAND (exp
, 1);
3904 exp
= TREE_OPERAND (exp
, 0);
3905 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3906 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3910 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3911 punsignedp
, pvolatilep
, false);
3912 if ((inner
== exp
&& and_mask
== 0)
3913 || *pbitsize
< 0 || offset
!= 0
3914 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3917 /* If the number of bits in the reference is the same as the bitsize of
3918 the outer type, then the outer type gives the signedness. Otherwise
3919 (in case of a small bitfield) the signedness is unchanged. */
3920 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3921 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3923 /* Compute the mask to access the bitfield. */
3924 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3925 precision
= TYPE_PRECISION (unsigned_type
);
3927 mask
= build_int_cst_type (unsigned_type
, -1);
3929 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3930 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3932 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3934 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3935 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3938 *pand_mask
= and_mask
;
3942 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3943 bit positions and MASK is SIGNED. */
3946 all_ones_mask_p (const_tree mask
, unsigned int size
)
3948 tree type
= TREE_TYPE (mask
);
3949 unsigned int precision
= TYPE_PRECISION (type
);
3951 /* If this function returns true when the type of the mask is
3952 UNSIGNED, then there will be errors. In particular see
3953 gcc.c-torture/execute/990326-1.c. There does not appear to be
3954 any documentation paper trail as to why this is so. But the pre
3955 wide-int worked with that restriction and it has been preserved
3957 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3960 return wi::mask (size
, false, precision
) == mask
;
3963 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3964 represents the sign bit of EXP's type. If EXP represents a sign
3965 or zero extension, also test VAL against the unextended type.
3966 The return value is the (sub)expression whose sign bit is VAL,
3967 or NULL_TREE otherwise. */
3970 sign_bit_p (tree exp
, const_tree val
)
3975 /* Tree EXP must have an integral type. */
3976 t
= TREE_TYPE (exp
);
3977 if (! INTEGRAL_TYPE_P (t
))
3980 /* Tree VAL must be an integer constant. */
3981 if (TREE_CODE (val
) != INTEGER_CST
3982 || TREE_OVERFLOW (val
))
3985 width
= TYPE_PRECISION (t
);
3986 if (wi::only_sign_bit_p (val
, width
))
3989 /* Handle extension from a narrower type. */
3990 if (TREE_CODE (exp
) == NOP_EXPR
3991 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3992 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3997 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3998 to be evaluated unconditionally. */
4001 simple_operand_p (const_tree exp
)
4003 /* Strip any conversions that don't change the machine mode. */
4006 return (CONSTANT_CLASS_P (exp
)
4007 || TREE_CODE (exp
) == SSA_NAME
4009 && ! TREE_ADDRESSABLE (exp
)
4010 && ! TREE_THIS_VOLATILE (exp
)
4011 && ! DECL_NONLOCAL (exp
)
4012 /* Don't regard global variables as simple. They may be
4013 allocated in ways unknown to the compiler (shared memory,
4014 #pragma weak, etc). */
4015 && ! TREE_PUBLIC (exp
)
4016 && ! DECL_EXTERNAL (exp
)
4017 /* Weakrefs are not safe to be read, since they can be NULL.
4018 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4019 have DECL_WEAK flag set. */
4020 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4021 /* Loading a static variable is unduly expensive, but global
4022 registers aren't expensive. */
4023 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4026 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4027 to be evaluated unconditionally.
4028 I addition to simple_operand_p, we assume that comparisons, conversions,
4029 and logic-not operations are simple, if their operands are simple, too. */
4032 simple_operand_p_2 (tree exp
)
4034 enum tree_code code
;
4036 if (TREE_SIDE_EFFECTS (exp
)
4037 || tree_could_trap_p (exp
))
4040 while (CONVERT_EXPR_P (exp
))
4041 exp
= TREE_OPERAND (exp
, 0);
4043 code
= TREE_CODE (exp
);
4045 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4046 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4047 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4049 if (code
== TRUTH_NOT_EXPR
)
4050 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4052 return simple_operand_p (exp
);
4056 /* The following functions are subroutines to fold_range_test and allow it to
4057 try to change a logical combination of comparisons into a range test.
4060 X == 2 || X == 3 || X == 4 || X == 5
4064 (unsigned) (X - 2) <= 3
4066 We describe each set of comparisons as being either inside or outside
4067 a range, using a variable named like IN_P, and then describe the
4068 range with a lower and upper bound. If one of the bounds is omitted,
4069 it represents either the highest or lowest value of the type.
4071 In the comments below, we represent a range by two numbers in brackets
4072 preceded by a "+" to designate being inside that range, or a "-" to
4073 designate being outside that range, so the condition can be inverted by
4074 flipping the prefix. An omitted bound is represented by a "-". For
4075 example, "- [-, 10]" means being outside the range starting at the lowest
4076 possible value and ending at 10, in other words, being greater than 10.
4077 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4080 We set up things so that the missing bounds are handled in a consistent
4081 manner so neither a missing bound nor "true" and "false" need to be
4082 handled using a special case. */
4084 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4085 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4086 and UPPER1_P are nonzero if the respective argument is an upper bound
4087 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4088 must be specified for a comparison. ARG1 will be converted to ARG0's
4089 type if both are specified. */
4092 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4093 tree arg1
, int upper1_p
)
4099 /* If neither arg represents infinity, do the normal operation.
4100 Else, if not a comparison, return infinity. Else handle the special
4101 comparison rules. Note that most of the cases below won't occur, but
4102 are handled for consistency. */
4104 if (arg0
!= 0 && arg1
!= 0)
4106 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4107 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4109 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4112 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4115 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4116 for neither. In real maths, we cannot assume open ended ranges are
4117 the same. But, this is computer arithmetic, where numbers are finite.
4118 We can therefore make the transformation of any unbounded range with
4119 the value Z, Z being greater than any representable number. This permits
4120 us to treat unbounded ranges as equal. */
4121 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4122 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4126 result
= sgn0
== sgn1
;
4129 result
= sgn0
!= sgn1
;
4132 result
= sgn0
< sgn1
;
4135 result
= sgn0
<= sgn1
;
4138 result
= sgn0
> sgn1
;
4141 result
= sgn0
>= sgn1
;
4147 return constant_boolean_node (result
, type
);
4150 /* Helper routine for make_range. Perform one step for it, return
4151 new expression if the loop should continue or NULL_TREE if it should
4155 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4156 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4157 bool *strict_overflow_p
)
4159 tree arg0_type
= TREE_TYPE (arg0
);
4160 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4161 int in_p
= *p_in_p
, n_in_p
;
4165 case TRUTH_NOT_EXPR
:
4166 /* We can only do something if the range is testing for zero. */
4167 if (low
== NULL_TREE
|| high
== NULL_TREE
4168 || ! integer_zerop (low
) || ! integer_zerop (high
))
4173 case EQ_EXPR
: case NE_EXPR
:
4174 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4175 /* We can only do something if the range is testing for zero
4176 and if the second operand is an integer constant. Note that
4177 saying something is "in" the range we make is done by
4178 complementing IN_P since it will set in the initial case of
4179 being not equal to zero; "out" is leaving it alone. */
4180 if (low
== NULL_TREE
|| high
== NULL_TREE
4181 || ! integer_zerop (low
) || ! integer_zerop (high
)
4182 || TREE_CODE (arg1
) != INTEGER_CST
)
4187 case NE_EXPR
: /* - [c, c] */
4190 case EQ_EXPR
: /* + [c, c] */
4191 in_p
= ! in_p
, low
= high
= arg1
;
4193 case GT_EXPR
: /* - [-, c] */
4194 low
= 0, high
= arg1
;
4196 case GE_EXPR
: /* + [c, -] */
4197 in_p
= ! in_p
, low
= arg1
, high
= 0;
4199 case LT_EXPR
: /* - [c, -] */
4200 low
= arg1
, high
= 0;
4202 case LE_EXPR
: /* + [-, c] */
4203 in_p
= ! in_p
, low
= 0, high
= arg1
;
4209 /* If this is an unsigned comparison, we also know that EXP is
4210 greater than or equal to zero. We base the range tests we make
4211 on that fact, so we record it here so we can parse existing
4212 range tests. We test arg0_type since often the return type
4213 of, e.g. EQ_EXPR, is boolean. */
4214 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4216 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4218 build_int_cst (arg0_type
, 0),
4222 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4224 /* If the high bound is missing, but we have a nonzero low
4225 bound, reverse the range so it goes from zero to the low bound
4227 if (high
== 0 && low
&& ! integer_zerop (low
))
4230 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4231 build_int_cst (TREE_TYPE (low
), 1), 0);
4232 low
= build_int_cst (arg0_type
, 0);
4242 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4243 low and high are non-NULL, then normalize will DTRT. */
4244 if (!TYPE_UNSIGNED (arg0_type
)
4245 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4247 if (low
== NULL_TREE
)
4248 low
= TYPE_MIN_VALUE (arg0_type
);
4249 if (high
== NULL_TREE
)
4250 high
= TYPE_MAX_VALUE (arg0_type
);
4253 /* (-x) IN [a,b] -> x in [-b, -a] */
4254 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4255 build_int_cst (exp_type
, 0),
4257 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4258 build_int_cst (exp_type
, 0),
4260 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4266 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4267 build_int_cst (exp_type
, 1));
4271 if (TREE_CODE (arg1
) != INTEGER_CST
)
4274 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4275 move a constant to the other side. */
4276 if (!TYPE_UNSIGNED (arg0_type
)
4277 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4280 /* If EXP is signed, any overflow in the computation is undefined,
4281 so we don't worry about it so long as our computations on
4282 the bounds don't overflow. For unsigned, overflow is defined
4283 and this is exactly the right thing. */
4284 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4285 arg0_type
, low
, 0, arg1
, 0);
4286 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4287 arg0_type
, high
, 1, arg1
, 0);
4288 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4289 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4292 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4293 *strict_overflow_p
= true;
4296 /* Check for an unsigned range which has wrapped around the maximum
4297 value thus making n_high < n_low, and normalize it. */
4298 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4300 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4301 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4302 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4303 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4305 /* If the range is of the form +/- [ x+1, x ], we won't
4306 be able to normalize it. But then, it represents the
4307 whole range or the empty set, so make it
4309 if (tree_int_cst_equal (n_low
, low
)
4310 && tree_int_cst_equal (n_high
, high
))
4316 low
= n_low
, high
= n_high
;
4324 case NON_LVALUE_EXPR
:
4325 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4328 if (! INTEGRAL_TYPE_P (arg0_type
)
4329 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4330 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4333 n_low
= low
, n_high
= high
;
4336 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4339 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4341 /* If we're converting arg0 from an unsigned type, to exp,
4342 a signed type, we will be doing the comparison as unsigned.
4343 The tests above have already verified that LOW and HIGH
4346 So we have to ensure that we will handle large unsigned
4347 values the same way that the current signed bounds treat
4350 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4354 /* For fixed-point modes, we need to pass the saturating flag
4355 as the 2nd parameter. */
4356 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4358 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4359 TYPE_SATURATING (arg0_type
));
4362 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4364 /* A range without an upper bound is, naturally, unbounded.
4365 Since convert would have cropped a very large value, use
4366 the max value for the destination type. */
4368 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4369 : TYPE_MAX_VALUE (arg0_type
);
4371 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4372 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4373 fold_convert_loc (loc
, arg0_type
,
4375 build_int_cst (arg0_type
, 1));
4377 /* If the low bound is specified, "and" the range with the
4378 range for which the original unsigned value will be
4382 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4383 1, fold_convert_loc (loc
, arg0_type
,
4388 in_p
= (n_in_p
== in_p
);
4392 /* Otherwise, "or" the range with the range of the input
4393 that will be interpreted as negative. */
4394 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4395 1, fold_convert_loc (loc
, arg0_type
,
4400 in_p
= (in_p
!= n_in_p
);
4414 /* Given EXP, a logical expression, set the range it is testing into
4415 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4416 actually being tested. *PLOW and *PHIGH will be made of the same
4417 type as the returned expression. If EXP is not a comparison, we
4418 will most likely not be returning a useful value and range. Set
4419 *STRICT_OVERFLOW_P to true if the return value is only valid
4420 because signed overflow is undefined; otherwise, do not change
4421 *STRICT_OVERFLOW_P. */
4424 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4425 bool *strict_overflow_p
)
4427 enum tree_code code
;
4428 tree arg0
, arg1
= NULL_TREE
;
4429 tree exp_type
, nexp
;
4432 location_t loc
= EXPR_LOCATION (exp
);
4434 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4435 and see if we can refine the range. Some of the cases below may not
4436 happen, but it doesn't seem worth worrying about this. We "continue"
4437 the outer loop when we've changed something; otherwise we "break"
4438 the switch, which will "break" the while. */
4441 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4445 code
= TREE_CODE (exp
);
4446 exp_type
= TREE_TYPE (exp
);
4449 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4451 if (TREE_OPERAND_LENGTH (exp
) > 0)
4452 arg0
= TREE_OPERAND (exp
, 0);
4453 if (TREE_CODE_CLASS (code
) == tcc_binary
4454 || TREE_CODE_CLASS (code
) == tcc_comparison
4455 || (TREE_CODE_CLASS (code
) == tcc_expression
4456 && TREE_OPERAND_LENGTH (exp
) > 1))
4457 arg1
= TREE_OPERAND (exp
, 1);
4459 if (arg0
== NULL_TREE
)
4462 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4463 &high
, &in_p
, strict_overflow_p
);
4464 if (nexp
== NULL_TREE
)
4469 /* If EXP is a constant, we can evaluate whether this is true or false. */
4470 if (TREE_CODE (exp
) == INTEGER_CST
)
4472 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4474 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4480 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4484 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4485 type, TYPE, return an expression to test if EXP is in (or out of, depending
4486 on IN_P) the range. Return 0 if the test couldn't be created. */
4489 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4490 tree low
, tree high
)
4492 tree etype
= TREE_TYPE (exp
), value
;
4494 #ifdef HAVE_canonicalize_funcptr_for_compare
4495 /* Disable this optimization for function pointer expressions
4496 on targets that require function pointer canonicalization. */
4497 if (HAVE_canonicalize_funcptr_for_compare
4498 && TREE_CODE (etype
) == POINTER_TYPE
4499 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4505 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4507 return invert_truthvalue_loc (loc
, value
);
4512 if (low
== 0 && high
== 0)
4513 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4516 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4517 fold_convert_loc (loc
, etype
, high
));
4520 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4521 fold_convert_loc (loc
, etype
, low
));
4523 if (operand_equal_p (low
, high
, 0))
4524 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4525 fold_convert_loc (loc
, etype
, low
));
4527 if (integer_zerop (low
))
4529 if (! TYPE_UNSIGNED (etype
))
4531 etype
= unsigned_type_for (etype
);
4532 high
= fold_convert_loc (loc
, etype
, high
);
4533 exp
= fold_convert_loc (loc
, etype
, exp
);
4535 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4538 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4539 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4541 int prec
= TYPE_PRECISION (etype
);
4543 if (wi::mask (prec
- 1, false, prec
) == high
)
4545 if (TYPE_UNSIGNED (etype
))
4547 tree signed_etype
= signed_type_for (etype
);
4548 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4550 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4552 etype
= signed_etype
;
4553 exp
= fold_convert_loc (loc
, etype
, exp
);
4555 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4556 build_int_cst (etype
, 0));
4560 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4561 This requires wrap-around arithmetics for the type of the expression.
4562 First make sure that arithmetics in this type is valid, then make sure
4563 that it wraps around. */
4564 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4565 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4566 TYPE_UNSIGNED (etype
));
4568 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4570 tree utype
, minv
, maxv
;
4572 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4573 for the type in question, as we rely on this here. */
4574 utype
= unsigned_type_for (etype
);
4575 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4576 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4577 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4578 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4580 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4587 high
= fold_convert_loc (loc
, etype
, high
);
4588 low
= fold_convert_loc (loc
, etype
, low
);
4589 exp
= fold_convert_loc (loc
, etype
, exp
);
4591 value
= const_binop (MINUS_EXPR
, high
, low
);
4594 if (POINTER_TYPE_P (etype
))
4596 if (value
!= 0 && !TREE_OVERFLOW (value
))
4598 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4599 return build_range_check (loc
, type
,
4600 fold_build_pointer_plus_loc (loc
, exp
, low
),
4601 1, build_int_cst (etype
, 0), value
);
4606 if (value
!= 0 && !TREE_OVERFLOW (value
))
4607 return build_range_check (loc
, type
,
4608 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4609 1, build_int_cst (etype
, 0), value
);
4614 /* Return the predecessor of VAL in its type, handling the infinite case. */
4617 range_predecessor (tree val
)
4619 tree type
= TREE_TYPE (val
);
4621 if (INTEGRAL_TYPE_P (type
)
4622 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4625 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4626 build_int_cst (TREE_TYPE (val
), 1), 0);
4629 /* Return the successor of VAL in its type, handling the infinite case. */
4632 range_successor (tree val
)
4634 tree type
= TREE_TYPE (val
);
4636 if (INTEGRAL_TYPE_P (type
)
4637 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4640 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4641 build_int_cst (TREE_TYPE (val
), 1), 0);
4644 /* Given two ranges, see if we can merge them into one. Return 1 if we
4645 can, 0 if we can't. Set the output range into the specified parameters. */
4648 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4649 tree high0
, int in1_p
, tree low1
, tree high1
)
4657 int lowequal
= ((low0
== 0 && low1
== 0)
4658 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4659 low0
, 0, low1
, 0)));
4660 int highequal
= ((high0
== 0 && high1
== 0)
4661 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4662 high0
, 1, high1
, 1)));
4664 /* Make range 0 be the range that starts first, or ends last if they
4665 start at the same value. Swap them if it isn't. */
4666 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4669 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4670 high1
, 1, high0
, 1))))
4672 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4673 tem
= low0
, low0
= low1
, low1
= tem
;
4674 tem
= high0
, high0
= high1
, high1
= tem
;
4677 /* Now flag two cases, whether the ranges are disjoint or whether the
4678 second range is totally subsumed in the first. Note that the tests
4679 below are simplified by the ones above. */
4680 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4681 high0
, 1, low1
, 0));
4682 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4683 high1
, 1, high0
, 1));
4685 /* We now have four cases, depending on whether we are including or
4686 excluding the two ranges. */
4689 /* If they don't overlap, the result is false. If the second range
4690 is a subset it is the result. Otherwise, the range is from the start
4691 of the second to the end of the first. */
4693 in_p
= 0, low
= high
= 0;
4695 in_p
= 1, low
= low1
, high
= high1
;
4697 in_p
= 1, low
= low1
, high
= high0
;
4700 else if (in0_p
&& ! in1_p
)
4702 /* If they don't overlap, the result is the first range. If they are
4703 equal, the result is false. If the second range is a subset of the
4704 first, and the ranges begin at the same place, we go from just after
4705 the end of the second range to the end of the first. If the second
4706 range is not a subset of the first, or if it is a subset and both
4707 ranges end at the same place, the range starts at the start of the
4708 first range and ends just before the second range.
4709 Otherwise, we can't describe this as a single range. */
4711 in_p
= 1, low
= low0
, high
= high0
;
4712 else if (lowequal
&& highequal
)
4713 in_p
= 0, low
= high
= 0;
4714 else if (subset
&& lowequal
)
4716 low
= range_successor (high1
);
4721 /* We are in the weird situation where high0 > high1 but
4722 high1 has no successor. Punt. */
4726 else if (! subset
|| highequal
)
4729 high
= range_predecessor (low1
);
4733 /* low0 < low1 but low1 has no predecessor. Punt. */
4741 else if (! in0_p
&& in1_p
)
4743 /* If they don't overlap, the result is the second range. If the second
4744 is a subset of the first, the result is false. Otherwise,
4745 the range starts just after the first range and ends at the
4746 end of the second. */
4748 in_p
= 1, low
= low1
, high
= high1
;
4749 else if (subset
|| highequal
)
4750 in_p
= 0, low
= high
= 0;
4753 low
= range_successor (high0
);
4758 /* high1 > high0 but high0 has no successor. Punt. */
4766 /* The case where we are excluding both ranges. Here the complex case
4767 is if they don't overlap. In that case, the only time we have a
4768 range is if they are adjacent. If the second is a subset of the
4769 first, the result is the first. Otherwise, the range to exclude
4770 starts at the beginning of the first range and ends at the end of the
4774 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4775 range_successor (high0
),
4777 in_p
= 0, low
= low0
, high
= high1
;
4780 /* Canonicalize - [min, x] into - [-, x]. */
4781 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4782 switch (TREE_CODE (TREE_TYPE (low0
)))
4785 if (TYPE_PRECISION (TREE_TYPE (low0
))
4786 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4790 if (tree_int_cst_equal (low0
,
4791 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4795 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4796 && integer_zerop (low0
))
4803 /* Canonicalize - [x, max] into - [x, -]. */
4804 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4805 switch (TREE_CODE (TREE_TYPE (high1
)))
4808 if (TYPE_PRECISION (TREE_TYPE (high1
))
4809 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4813 if (tree_int_cst_equal (high1
,
4814 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4818 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4819 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4821 build_int_cst (TREE_TYPE (high1
), 1),
4829 /* The ranges might be also adjacent between the maximum and
4830 minimum values of the given type. For
4831 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4832 return + [x + 1, y - 1]. */
4833 if (low0
== 0 && high1
== 0)
4835 low
= range_successor (high0
);
4836 high
= range_predecessor (low1
);
4837 if (low
== 0 || high
== 0)
4847 in_p
= 0, low
= low0
, high
= high0
;
4849 in_p
= 0, low
= low0
, high
= high1
;
4852 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4857 /* Subroutine of fold, looking inside expressions of the form
4858 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4859 of the COND_EXPR. This function is being used also to optimize
4860 A op B ? C : A, by reversing the comparison first.
4862 Return a folded expression whose code is not a COND_EXPR
4863 anymore, or NULL_TREE if no folding opportunity is found. */
4866 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4867 tree arg0
, tree arg1
, tree arg2
)
4869 enum tree_code comp_code
= TREE_CODE (arg0
);
4870 tree arg00
= TREE_OPERAND (arg0
, 0);
4871 tree arg01
= TREE_OPERAND (arg0
, 1);
4872 tree arg1_type
= TREE_TYPE (arg1
);
4878 /* If we have A op 0 ? A : -A, consider applying the following
4881 A == 0? A : -A same as -A
4882 A != 0? A : -A same as A
4883 A >= 0? A : -A same as abs (A)
4884 A > 0? A : -A same as abs (A)
4885 A <= 0? A : -A same as -abs (A)
4886 A < 0? A : -A same as -abs (A)
4888 None of these transformations work for modes with signed
4889 zeros. If A is +/-0, the first two transformations will
4890 change the sign of the result (from +0 to -0, or vice
4891 versa). The last four will fix the sign of the result,
4892 even though the original expressions could be positive or
4893 negative, depending on the sign of A.
4895 Note that all these transformations are correct if A is
4896 NaN, since the two alternatives (A and -A) are also NaNs. */
4897 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4898 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4899 ? real_zerop (arg01
)
4900 : integer_zerop (arg01
))
4901 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4902 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4903 /* In the case that A is of the form X-Y, '-A' (arg2) may
4904 have already been folded to Y-X, check for that. */
4905 || (TREE_CODE (arg1
) == MINUS_EXPR
4906 && TREE_CODE (arg2
) == MINUS_EXPR
4907 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4908 TREE_OPERAND (arg2
, 1), 0)
4909 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4910 TREE_OPERAND (arg2
, 0), 0))))
4915 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4916 return pedantic_non_lvalue_loc (loc
,
4917 fold_convert_loc (loc
, type
,
4918 negate_expr (tem
)));
4921 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4924 if (flag_trapping_math
)
4929 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4930 arg1
= fold_convert_loc (loc
, signed_type_for
4931 (TREE_TYPE (arg1
)), arg1
);
4932 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4933 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4936 if (flag_trapping_math
)
4940 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4941 arg1
= fold_convert_loc (loc
, signed_type_for
4942 (TREE_TYPE (arg1
)), arg1
);
4943 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4944 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4946 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4950 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4951 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4952 both transformations are correct when A is NaN: A != 0
4953 is then true, and A == 0 is false. */
4955 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4956 && integer_zerop (arg01
) && integer_zerop (arg2
))
4958 if (comp_code
== NE_EXPR
)
4959 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4960 else if (comp_code
== EQ_EXPR
)
4961 return build_zero_cst (type
);
4964 /* Try some transformations of A op B ? A : B.
4966 A == B? A : B same as B
4967 A != B? A : B same as A
4968 A >= B? A : B same as max (A, B)
4969 A > B? A : B same as max (B, A)
4970 A <= B? A : B same as min (A, B)
4971 A < B? A : B same as min (B, A)
4973 As above, these transformations don't work in the presence
4974 of signed zeros. For example, if A and B are zeros of
4975 opposite sign, the first two transformations will change
4976 the sign of the result. In the last four, the original
4977 expressions give different results for (A=+0, B=-0) and
4978 (A=-0, B=+0), but the transformed expressions do not.
4980 The first two transformations are correct if either A or B
4981 is a NaN. In the first transformation, the condition will
4982 be false, and B will indeed be chosen. In the case of the
4983 second transformation, the condition A != B will be true,
4984 and A will be chosen.
4986 The conversions to max() and min() are not correct if B is
4987 a number and A is not. The conditions in the original
4988 expressions will be false, so all four give B. The min()
4989 and max() versions would give a NaN instead. */
4990 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4991 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4992 /* Avoid these transformations if the COND_EXPR may be used
4993 as an lvalue in the C++ front-end. PR c++/19199. */
4995 || VECTOR_TYPE_P (type
)
4996 || (! lang_GNU_CXX ()
4997 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4998 || ! maybe_lvalue_p (arg1
)
4999 || ! maybe_lvalue_p (arg2
)))
5001 tree comp_op0
= arg00
;
5002 tree comp_op1
= arg01
;
5003 tree comp_type
= TREE_TYPE (comp_op0
);
5005 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5006 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5016 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5018 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5023 /* In C++ a ?: expression can be an lvalue, so put the
5024 operand which will be used if they are equal first
5025 so that we can convert this back to the
5026 corresponding COND_EXPR. */
5027 if (!HONOR_NANS (arg1
))
5029 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5030 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5031 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5032 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5033 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5034 comp_op1
, comp_op0
);
5035 return pedantic_non_lvalue_loc (loc
,
5036 fold_convert_loc (loc
, type
, tem
));
5043 if (!HONOR_NANS (arg1
))
5045 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5046 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5047 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5048 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5049 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5050 comp_op1
, comp_op0
);
5051 return pedantic_non_lvalue_loc (loc
,
5052 fold_convert_loc (loc
, type
, tem
));
5056 if (!HONOR_NANS (arg1
))
5057 return pedantic_non_lvalue_loc (loc
,
5058 fold_convert_loc (loc
, type
, arg2
));
5061 if (!HONOR_NANS (arg1
))
5062 return pedantic_non_lvalue_loc (loc
,
5063 fold_convert_loc (loc
, type
, arg1
));
5066 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5071 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5072 we might still be able to simplify this. For example,
5073 if C1 is one less or one more than C2, this might have started
5074 out as a MIN or MAX and been transformed by this function.
5075 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5077 if (INTEGRAL_TYPE_P (type
)
5078 && TREE_CODE (arg01
) == INTEGER_CST
5079 && TREE_CODE (arg2
) == INTEGER_CST
)
5083 if (TREE_CODE (arg1
) == INTEGER_CST
)
5085 /* We can replace A with C1 in this case. */
5086 arg1
= fold_convert_loc (loc
, type
, arg01
);
5087 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5090 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5091 MIN_EXPR, to preserve the signedness of the comparison. */
5092 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5094 && operand_equal_p (arg01
,
5095 const_binop (PLUS_EXPR
, arg2
,
5096 build_int_cst (type
, 1)),
5099 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5100 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5102 return pedantic_non_lvalue_loc (loc
,
5103 fold_convert_loc (loc
, type
, tem
));
5108 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5110 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5112 && operand_equal_p (arg01
,
5113 const_binop (MINUS_EXPR
, arg2
,
5114 build_int_cst (type
, 1)),
5117 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5118 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5120 return pedantic_non_lvalue_loc (loc
,
5121 fold_convert_loc (loc
, type
, tem
));
5126 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5127 MAX_EXPR, to preserve the signedness of the comparison. */
5128 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5130 && operand_equal_p (arg01
,
5131 const_binop (MINUS_EXPR
, arg2
,
5132 build_int_cst (type
, 1)),
5135 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5136 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5138 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5143 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5144 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5146 && operand_equal_p (arg01
,
5147 const_binop (PLUS_EXPR
, arg2
,
5148 build_int_cst (type
, 1)),
5151 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5152 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5154 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5168 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5169 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5170 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5174 /* EXP is some logical combination of boolean tests. See if we can
5175 merge it into some range test. Return the new tree if so. */
5178 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5181 int or_op
= (code
== TRUTH_ORIF_EXPR
5182 || code
== TRUTH_OR_EXPR
);
5183 int in0_p
, in1_p
, in_p
;
5184 tree low0
, low1
, low
, high0
, high1
, high
;
5185 bool strict_overflow_p
= false;
5187 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5188 "when simplifying range test");
5190 if (!INTEGRAL_TYPE_P (type
))
5193 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5194 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5196 /* If this is an OR operation, invert both sides; we will invert
5197 again at the end. */
5199 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5201 /* If both expressions are the same, if we can merge the ranges, and we
5202 can build the range test, return it or it inverted. If one of the
5203 ranges is always true or always false, consider it to be the same
5204 expression as the other. */
5205 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5206 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5208 && 0 != (tem
= (build_range_check (loc
, type
,
5210 : rhs
!= 0 ? rhs
: integer_zero_node
,
5213 if (strict_overflow_p
)
5214 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5215 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5218 /* On machines where the branch cost is expensive, if this is a
5219 short-circuited branch and the underlying object on both sides
5220 is the same, make a non-short-circuit operation. */
5221 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5222 && lhs
!= 0 && rhs
!= 0
5223 && (code
== TRUTH_ANDIF_EXPR
5224 || code
== TRUTH_ORIF_EXPR
)
5225 && operand_equal_p (lhs
, rhs
, 0))
5227 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5228 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5229 which cases we can't do this. */
5230 if (simple_operand_p (lhs
))
5231 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5232 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5235 else if (!lang_hooks
.decls
.global_bindings_p ()
5236 && !CONTAINS_PLACEHOLDER_P (lhs
))
5238 tree common
= save_expr (lhs
);
5240 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5241 or_op
? ! in0_p
: in0_p
,
5243 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5244 or_op
? ! in1_p
: in1_p
,
5247 if (strict_overflow_p
)
5248 fold_overflow_warning (warnmsg
,
5249 WARN_STRICT_OVERFLOW_COMPARISON
);
5250 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5251 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5260 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5261 bit value. Arrange things so the extra bits will be set to zero if and
5262 only if C is signed-extended to its full width. If MASK is nonzero,
5263 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5266 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5268 tree type
= TREE_TYPE (c
);
5269 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5272 if (p
== modesize
|| unsignedp
)
5275 /* We work by getting just the sign bit into the low-order bit, then
5276 into the high-order bit, then sign-extend. We then XOR that value
5278 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5280 /* We must use a signed type in order to get an arithmetic right shift.
5281 However, we must also avoid introducing accidental overflows, so that
5282 a subsequent call to integer_zerop will work. Hence we must
5283 do the type conversion here. At this point, the constant is either
5284 zero or one, and the conversion to a signed type can never overflow.
5285 We could get an overflow if this conversion is done anywhere else. */
5286 if (TYPE_UNSIGNED (type
))
5287 temp
= fold_convert (signed_type_for (type
), temp
);
5289 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5290 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5292 temp
= const_binop (BIT_AND_EXPR
, temp
,
5293 fold_convert (TREE_TYPE (c
), mask
));
5294 /* If necessary, convert the type back to match the type of C. */
5295 if (TYPE_UNSIGNED (type
))
5296 temp
= fold_convert (type
, temp
);
5298 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5301 /* For an expression that has the form
5305 we can drop one of the inner expressions and simplify to
5309 LOC is the location of the resulting expression. OP is the inner
5310 logical operation; the left-hand side in the examples above, while CMPOP
5311 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5312 removing a condition that guards another, as in
5313 (A != NULL && A->...) || A == NULL
5314 which we must not transform. If RHS_ONLY is true, only eliminate the
5315 right-most operand of the inner logical operation. */
5318 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5321 tree type
= TREE_TYPE (cmpop
);
5322 enum tree_code code
= TREE_CODE (cmpop
);
5323 enum tree_code truthop_code
= TREE_CODE (op
);
5324 tree lhs
= TREE_OPERAND (op
, 0);
5325 tree rhs
= TREE_OPERAND (op
, 1);
5326 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5327 enum tree_code rhs_code
= TREE_CODE (rhs
);
5328 enum tree_code lhs_code
= TREE_CODE (lhs
);
5329 enum tree_code inv_code
;
5331 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5334 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5337 if (rhs_code
== truthop_code
)
5339 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5340 if (newrhs
!= NULL_TREE
)
5343 rhs_code
= TREE_CODE (rhs
);
5346 if (lhs_code
== truthop_code
&& !rhs_only
)
5348 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5349 if (newlhs
!= NULL_TREE
)
5352 lhs_code
= TREE_CODE (lhs
);
5356 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5357 if (inv_code
== rhs_code
5358 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5359 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5361 if (!rhs_only
&& inv_code
== lhs_code
5362 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5363 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5365 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5366 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5371 /* Find ways of folding logical expressions of LHS and RHS:
5372 Try to merge two comparisons to the same innermost item.
5373 Look for range tests like "ch >= '0' && ch <= '9'".
5374 Look for combinations of simple terms on machines with expensive branches
5375 and evaluate the RHS unconditionally.
5377 For example, if we have p->a == 2 && p->b == 4 and we can make an
5378 object large enough to span both A and B, we can do this with a comparison
5379 against the object ANDed with the a mask.
5381 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5382 operations to do this with one comparison.
5384 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5385 function and the one above.
5387 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5388 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5390 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5393 We return the simplified tree or 0 if no optimization is possible. */
5396 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5399 /* If this is the "or" of two comparisons, we can do something if
5400 the comparisons are NE_EXPR. If this is the "and", we can do something
5401 if the comparisons are EQ_EXPR. I.e.,
5402 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5404 WANTED_CODE is this operation code. For single bit fields, we can
5405 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5406 comparison for one-bit fields. */
5408 enum tree_code wanted_code
;
5409 enum tree_code lcode
, rcode
;
5410 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5411 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5412 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5413 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5414 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5415 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5416 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5417 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5418 machine_mode lnmode
, rnmode
;
5419 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5420 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5421 tree l_const
, r_const
;
5422 tree lntype
, rntype
, result
;
5423 HOST_WIDE_INT first_bit
, end_bit
;
5426 /* Start by getting the comparison codes. Fail if anything is volatile.
5427 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5428 it were surrounded with a NE_EXPR. */
5430 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5433 lcode
= TREE_CODE (lhs
);
5434 rcode
= TREE_CODE (rhs
);
5436 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5438 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5439 build_int_cst (TREE_TYPE (lhs
), 0));
5443 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5445 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5446 build_int_cst (TREE_TYPE (rhs
), 0));
5450 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5451 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5454 ll_arg
= TREE_OPERAND (lhs
, 0);
5455 lr_arg
= TREE_OPERAND (lhs
, 1);
5456 rl_arg
= TREE_OPERAND (rhs
, 0);
5457 rr_arg
= TREE_OPERAND (rhs
, 1);
5459 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5460 if (simple_operand_p (ll_arg
)
5461 && simple_operand_p (lr_arg
))
5463 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5464 && operand_equal_p (lr_arg
, rr_arg
, 0))
5466 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5467 truth_type
, ll_arg
, lr_arg
);
5471 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5472 && operand_equal_p (lr_arg
, rl_arg
, 0))
5474 result
= combine_comparisons (loc
, code
, lcode
,
5475 swap_tree_comparison (rcode
),
5476 truth_type
, ll_arg
, lr_arg
);
5482 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5483 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5485 /* If the RHS can be evaluated unconditionally and its operands are
5486 simple, it wins to evaluate the RHS unconditionally on machines
5487 with expensive branches. In this case, this isn't a comparison
5488 that can be merged. */
5490 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5492 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5493 && simple_operand_p (rl_arg
)
5494 && simple_operand_p (rr_arg
))
5496 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5497 if (code
== TRUTH_OR_EXPR
5498 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5499 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5500 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5501 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5502 return build2_loc (loc
, NE_EXPR
, truth_type
,
5503 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5505 build_int_cst (TREE_TYPE (ll_arg
), 0));
5507 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5508 if (code
== TRUTH_AND_EXPR
5509 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5510 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5511 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5512 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5513 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5514 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5516 build_int_cst (TREE_TYPE (ll_arg
), 0));
5519 /* See if the comparisons can be merged. Then get all the parameters for
5522 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5523 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5527 ll_inner
= decode_field_reference (loc
, ll_arg
,
5528 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5529 &ll_unsignedp
, &volatilep
, &ll_mask
,
5531 lr_inner
= decode_field_reference (loc
, lr_arg
,
5532 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5533 &lr_unsignedp
, &volatilep
, &lr_mask
,
5535 rl_inner
= decode_field_reference (loc
, rl_arg
,
5536 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5537 &rl_unsignedp
, &volatilep
, &rl_mask
,
5539 rr_inner
= decode_field_reference (loc
, rr_arg
,
5540 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5541 &rr_unsignedp
, &volatilep
, &rr_mask
,
5544 /* It must be true that the inner operation on the lhs of each
5545 comparison must be the same if we are to be able to do anything.
5546 Then see if we have constants. If not, the same must be true for
5548 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5549 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5552 if (TREE_CODE (lr_arg
) == INTEGER_CST
5553 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5554 l_const
= lr_arg
, r_const
= rr_arg
;
5555 else if (lr_inner
== 0 || rr_inner
== 0
5556 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5559 l_const
= r_const
= 0;
5561 /* If either comparison code is not correct for our logical operation,
5562 fail. However, we can convert a one-bit comparison against zero into
5563 the opposite comparison against that bit being set in the field. */
5565 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5566 if (lcode
!= wanted_code
)
5568 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5570 /* Make the left operand unsigned, since we are only interested
5571 in the value of one bit. Otherwise we are doing the wrong
5580 /* This is analogous to the code for l_const above. */
5581 if (rcode
!= wanted_code
)
5583 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5592 /* See if we can find a mode that contains both fields being compared on
5593 the left. If we can't, fail. Otherwise, update all constants and masks
5594 to be relative to a field of that size. */
5595 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5596 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5597 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5598 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5600 if (lnmode
== VOIDmode
)
5603 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5604 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5605 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5606 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5608 if (BYTES_BIG_ENDIAN
)
5610 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5611 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5614 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5615 size_int (xll_bitpos
));
5616 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5617 size_int (xrl_bitpos
));
5621 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5622 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5623 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5624 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5625 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5628 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5630 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5635 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5636 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5637 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5638 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5639 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5642 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5644 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5648 /* If the right sides are not constant, do the same for it. Also,
5649 disallow this optimization if a size or signedness mismatch occurs
5650 between the left and right sides. */
5653 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5654 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5655 /* Make sure the two fields on the right
5656 correspond to the left without being swapped. */
5657 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5660 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5661 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5662 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5663 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5665 if (rnmode
== VOIDmode
)
5668 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5669 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5670 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5671 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5673 if (BYTES_BIG_ENDIAN
)
5675 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5676 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5679 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5681 size_int (xlr_bitpos
));
5682 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5684 size_int (xrr_bitpos
));
5686 /* Make a mask that corresponds to both fields being compared.
5687 Do this for both items being compared. If the operands are the
5688 same size and the bits being compared are in the same position
5689 then we can do this by masking both and comparing the masked
5691 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5692 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5693 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5695 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5696 ll_unsignedp
|| rl_unsignedp
);
5697 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5698 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5700 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5701 lr_unsignedp
|| rr_unsignedp
);
5702 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5703 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5705 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5708 /* There is still another way we can do something: If both pairs of
5709 fields being compared are adjacent, we may be able to make a wider
5710 field containing them both.
5712 Note that we still must mask the lhs/rhs expressions. Furthermore,
5713 the mask must be shifted to account for the shift done by
5714 make_bit_field_ref. */
5715 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5716 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5717 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5718 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5722 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5723 ll_bitsize
+ rl_bitsize
,
5724 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5725 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5726 lr_bitsize
+ rr_bitsize
,
5727 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5729 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5730 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5731 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5732 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5734 /* Convert to the smaller type before masking out unwanted bits. */
5736 if (lntype
!= rntype
)
5738 if (lnbitsize
> rnbitsize
)
5740 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5741 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5744 else if (lnbitsize
< rnbitsize
)
5746 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5747 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5752 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5753 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5755 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5756 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5758 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5764 /* Handle the case of comparisons with constants. If there is something in
5765 common between the masks, those bits of the constants must be the same.
5766 If not, the condition is always false. Test for this to avoid generating
5767 incorrect code below. */
5768 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5769 if (! integer_zerop (result
)
5770 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5771 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5773 if (wanted_code
== NE_EXPR
)
5775 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5776 return constant_boolean_node (true, truth_type
);
5780 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5781 return constant_boolean_node (false, truth_type
);
5785 /* Construct the expression we will return. First get the component
5786 reference we will make. Unless the mask is all ones the width of
5787 that field, perform the mask operation. Then compare with the
5789 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5790 ll_unsignedp
|| rl_unsignedp
);
5792 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5793 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5794 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5796 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5797 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5800 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5804 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5808 enum tree_code op_code
;
5811 int consts_equal
, consts_lt
;
5814 STRIP_SIGN_NOPS (arg0
);
5816 op_code
= TREE_CODE (arg0
);
5817 minmax_const
= TREE_OPERAND (arg0
, 1);
5818 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5819 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5820 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5821 inner
= TREE_OPERAND (arg0
, 0);
5823 /* If something does not permit us to optimize, return the original tree. */
5824 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5825 || TREE_CODE (comp_const
) != INTEGER_CST
5826 || TREE_OVERFLOW (comp_const
)
5827 || TREE_CODE (minmax_const
) != INTEGER_CST
5828 || TREE_OVERFLOW (minmax_const
))
5831 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5832 and GT_EXPR, doing the rest with recursive calls using logical
5836 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5839 = optimize_minmax_comparison (loc
,
5840 invert_tree_comparison (code
, false),
5843 return invert_truthvalue_loc (loc
, tem
);
5849 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5850 optimize_minmax_comparison
5851 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5852 optimize_minmax_comparison
5853 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5856 if (op_code
== MAX_EXPR
&& consts_equal
)
5857 /* MAX (X, 0) == 0 -> X <= 0 */
5858 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5860 else if (op_code
== MAX_EXPR
&& consts_lt
)
5861 /* MAX (X, 0) == 5 -> X == 5 */
5862 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5864 else if (op_code
== MAX_EXPR
)
5865 /* MAX (X, 0) == -1 -> false */
5866 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5868 else if (consts_equal
)
5869 /* MIN (X, 0) == 0 -> X >= 0 */
5870 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5873 /* MIN (X, 0) == 5 -> false */
5874 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5877 /* MIN (X, 0) == -1 -> X == -1 */
5878 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5881 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5882 /* MAX (X, 0) > 0 -> X > 0
5883 MAX (X, 0) > 5 -> X > 5 */
5884 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5886 else if (op_code
== MAX_EXPR
)
5887 /* MAX (X, 0) > -1 -> true */
5888 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5890 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5891 /* MIN (X, 0) > 0 -> false
5892 MIN (X, 0) > 5 -> false */
5893 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5896 /* MIN (X, 0) > -1 -> X > -1 */
5897 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5904 /* T is an integer expression that is being multiplied, divided, or taken a
5905 modulus (CODE says which and what kind of divide or modulus) by a
5906 constant C. See if we can eliminate that operation by folding it with
5907 other operations already in T. WIDE_TYPE, if non-null, is a type that
5908 should be used for the computation if wider than our type.
5910 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5911 (X * 2) + (Y * 4). We must, however, be assured that either the original
5912 expression would not overflow or that overflow is undefined for the type
5913 in the language in question.
5915 If we return a non-null expression, it is an equivalent form of the
5916 original computation, but need not be in the original type.
5918 We set *STRICT_OVERFLOW_P to true if the return values depends on
5919 signed overflow being undefined. Otherwise we do not change
5920 *STRICT_OVERFLOW_P. */
5923 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5924 bool *strict_overflow_p
)
5926 /* To avoid exponential search depth, refuse to allow recursion past
5927 three levels. Beyond that (1) it's highly unlikely that we'll find
5928 something interesting and (2) we've probably processed it before
5929 when we built the inner expression. */
5938 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5945 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5946 bool *strict_overflow_p
)
5948 tree type
= TREE_TYPE (t
);
5949 enum tree_code tcode
= TREE_CODE (t
);
5950 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5951 > GET_MODE_SIZE (TYPE_MODE (type
)))
5952 ? wide_type
: type
);
5954 int same_p
= tcode
== code
;
5955 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5956 bool sub_strict_overflow_p
;
5958 /* Don't deal with constants of zero here; they confuse the code below. */
5959 if (integer_zerop (c
))
5962 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5963 op0
= TREE_OPERAND (t
, 0);
5965 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5966 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5968 /* Note that we need not handle conditional operations here since fold
5969 already handles those cases. So just do arithmetic here. */
5973 /* For a constant, we can always simplify if we are a multiply
5974 or (for divide and modulus) if it is a multiple of our constant. */
5975 if (code
== MULT_EXPR
5976 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5977 return const_binop (code
, fold_convert (ctype
, t
),
5978 fold_convert (ctype
, c
));
5981 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5982 /* If op0 is an expression ... */
5983 if ((COMPARISON_CLASS_P (op0
)
5984 || UNARY_CLASS_P (op0
)
5985 || BINARY_CLASS_P (op0
)
5986 || VL_EXP_CLASS_P (op0
)
5987 || EXPRESSION_CLASS_P (op0
))
5988 /* ... and has wrapping overflow, and its type is smaller
5989 than ctype, then we cannot pass through as widening. */
5990 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5991 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
5992 && (TYPE_PRECISION (ctype
)
5993 > TYPE_PRECISION (TREE_TYPE (op0
))))
5994 /* ... or this is a truncation (t is narrower than op0),
5995 then we cannot pass through this narrowing. */
5996 || (TYPE_PRECISION (type
)
5997 < TYPE_PRECISION (TREE_TYPE (op0
)))
5998 /* ... or signedness changes for division or modulus,
5999 then we cannot pass through this conversion. */
6000 || (code
!= MULT_EXPR
6001 && (TYPE_UNSIGNED (ctype
)
6002 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6003 /* ... or has undefined overflow while the converted to
6004 type has not, we cannot do the operation in the inner type
6005 as that would introduce undefined overflow. */
6006 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6007 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6008 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6011 /* Pass the constant down and see if we can make a simplification. If
6012 we can, replace this expression with the inner simplification for
6013 possible later conversion to our or some other type. */
6014 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6015 && TREE_CODE (t2
) == INTEGER_CST
6016 && !TREE_OVERFLOW (t2
)
6017 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6019 ? ctype
: NULL_TREE
,
6020 strict_overflow_p
))))
6025 /* If widening the type changes it from signed to unsigned, then we
6026 must avoid building ABS_EXPR itself as unsigned. */
6027 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6029 tree cstype
= (*signed_type_for
) (ctype
);
6030 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6033 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6034 return fold_convert (ctype
, t1
);
6038 /* If the constant is negative, we cannot simplify this. */
6039 if (tree_int_cst_sgn (c
) == -1)
6043 /* For division and modulus, type can't be unsigned, as e.g.
6044 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6045 For signed types, even with wrapping overflow, this is fine. */
6046 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6048 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6050 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6053 case MIN_EXPR
: case MAX_EXPR
:
6054 /* If widening the type changes the signedness, then we can't perform
6055 this optimization as that changes the result. */
6056 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6059 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6060 sub_strict_overflow_p
= false;
6061 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6062 &sub_strict_overflow_p
)) != 0
6063 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6064 &sub_strict_overflow_p
)) != 0)
6066 if (tree_int_cst_sgn (c
) < 0)
6067 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6068 if (sub_strict_overflow_p
)
6069 *strict_overflow_p
= true;
6070 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6071 fold_convert (ctype
, t2
));
6075 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6076 /* If the second operand is constant, this is a multiplication
6077 or floor division, by a power of two, so we can treat it that
6078 way unless the multiplier or divisor overflows. Signed
6079 left-shift overflow is implementation-defined rather than
6080 undefined in C90, so do not convert signed left shift into
6082 if (TREE_CODE (op1
) == INTEGER_CST
6083 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6084 /* const_binop may not detect overflow correctly,
6085 so check for it explicitly here. */
6086 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6087 && 0 != (t1
= fold_convert (ctype
,
6088 const_binop (LSHIFT_EXPR
,
6091 && !TREE_OVERFLOW (t1
))
6092 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6093 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6095 fold_convert (ctype
, op0
),
6097 c
, code
, wide_type
, strict_overflow_p
);
6100 case PLUS_EXPR
: case MINUS_EXPR
:
6101 /* See if we can eliminate the operation on both sides. If we can, we
6102 can return a new PLUS or MINUS. If we can't, the only remaining
6103 cases where we can do anything are if the second operand is a
6105 sub_strict_overflow_p
= false;
6106 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6107 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6108 if (t1
!= 0 && t2
!= 0
6109 && (code
== MULT_EXPR
6110 /* If not multiplication, we can only do this if both operands
6111 are divisible by c. */
6112 || (multiple_of_p (ctype
, op0
, c
)
6113 && multiple_of_p (ctype
, op1
, c
))))
6115 if (sub_strict_overflow_p
)
6116 *strict_overflow_p
= true;
6117 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6118 fold_convert (ctype
, t2
));
6121 /* If this was a subtraction, negate OP1 and set it to be an addition.
6122 This simplifies the logic below. */
6123 if (tcode
== MINUS_EXPR
)
6125 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6126 /* If OP1 was not easily negatable, the constant may be OP0. */
6127 if (TREE_CODE (op0
) == INTEGER_CST
)
6138 if (TREE_CODE (op1
) != INTEGER_CST
)
6141 /* If either OP1 or C are negative, this optimization is not safe for
6142 some of the division and remainder types while for others we need
6143 to change the code. */
6144 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6146 if (code
== CEIL_DIV_EXPR
)
6147 code
= FLOOR_DIV_EXPR
;
6148 else if (code
== FLOOR_DIV_EXPR
)
6149 code
= CEIL_DIV_EXPR
;
6150 else if (code
!= MULT_EXPR
6151 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6155 /* If it's a multiply or a division/modulus operation of a multiple
6156 of our constant, do the operation and verify it doesn't overflow. */
6157 if (code
== MULT_EXPR
6158 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6160 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6161 fold_convert (ctype
, c
));
6162 /* We allow the constant to overflow with wrapping semantics. */
6164 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6170 /* If we have an unsigned type, we cannot widen the operation since it
6171 will change the result if the original computation overflowed. */
6172 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6175 /* If we were able to eliminate our operation from the first side,
6176 apply our operation to the second side and reform the PLUS. */
6177 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6178 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6180 /* The last case is if we are a multiply. In that case, we can
6181 apply the distributive law to commute the multiply and addition
6182 if the multiplication of the constants doesn't overflow
6183 and overflow is defined. With undefined overflow
6184 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6185 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6186 return fold_build2 (tcode
, ctype
,
6187 fold_build2 (code
, ctype
,
6188 fold_convert (ctype
, op0
),
6189 fold_convert (ctype
, c
)),
6195 /* We have a special case here if we are doing something like
6196 (C * 8) % 4 since we know that's zero. */
6197 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6198 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6199 /* If the multiplication can overflow we cannot optimize this. */
6200 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6201 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6202 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6204 *strict_overflow_p
= true;
6205 return omit_one_operand (type
, integer_zero_node
, op0
);
6208 /* ... fall through ... */
6210 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6211 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6212 /* If we can extract our operation from the LHS, do so and return a
6213 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6214 do something only if the second operand is a constant. */
6216 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6217 strict_overflow_p
)) != 0)
6218 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6219 fold_convert (ctype
, op1
));
6220 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6221 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6222 strict_overflow_p
)) != 0)
6223 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6224 fold_convert (ctype
, t1
));
6225 else if (TREE_CODE (op1
) != INTEGER_CST
)
6228 /* If these are the same operation types, we can associate them
6229 assuming no overflow. */
6232 bool overflow_p
= false;
6233 bool overflow_mul_p
;
6234 signop sign
= TYPE_SIGN (ctype
);
6235 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6236 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6238 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6241 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6242 wide_int_to_tree (ctype
, mul
));
6245 /* If these operations "cancel" each other, we have the main
6246 optimizations of this pass, which occur when either constant is a
6247 multiple of the other, in which case we replace this with either an
6248 operation or CODE or TCODE.
6250 If we have an unsigned type, we cannot do this since it will change
6251 the result if the original computation overflowed. */
6252 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6253 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6254 || (tcode
== MULT_EXPR
6255 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6256 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6257 && code
!= MULT_EXPR
)))
6259 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6261 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6262 *strict_overflow_p
= true;
6263 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6264 fold_convert (ctype
,
6265 const_binop (TRUNC_DIV_EXPR
,
6268 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6270 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6271 *strict_overflow_p
= true;
6272 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6273 fold_convert (ctype
,
6274 const_binop (TRUNC_DIV_EXPR
,
6287 /* Return a node which has the indicated constant VALUE (either 0 or
6288 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6289 and is of the indicated TYPE. */
6292 constant_boolean_node (bool value
, tree type
)
6294 if (type
== integer_type_node
)
6295 return value
? integer_one_node
: integer_zero_node
;
6296 else if (type
== boolean_type_node
)
6297 return value
? boolean_true_node
: boolean_false_node
;
6298 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6299 return build_vector_from_val (type
,
6300 build_int_cst (TREE_TYPE (type
),
6303 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6307 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6308 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6309 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6310 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6311 COND is the first argument to CODE; otherwise (as in the example
6312 given here), it is the second argument. TYPE is the type of the
6313 original expression. Return NULL_TREE if no simplification is
6317 fold_binary_op_with_conditional_arg (location_t loc
,
6318 enum tree_code code
,
6319 tree type
, tree op0
, tree op1
,
6320 tree cond
, tree arg
, int cond_first_p
)
6322 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6323 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6324 tree test
, true_value
, false_value
;
6325 tree lhs
= NULL_TREE
;
6326 tree rhs
= NULL_TREE
;
6327 enum tree_code cond_code
= COND_EXPR
;
6329 if (TREE_CODE (cond
) == COND_EXPR
6330 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6332 test
= TREE_OPERAND (cond
, 0);
6333 true_value
= TREE_OPERAND (cond
, 1);
6334 false_value
= TREE_OPERAND (cond
, 2);
6335 /* If this operand throws an expression, then it does not make
6336 sense to try to perform a logical or arithmetic operation
6338 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6340 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6345 tree testtype
= TREE_TYPE (cond
);
6347 true_value
= constant_boolean_node (true, testtype
);
6348 false_value
= constant_boolean_node (false, testtype
);
6351 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6352 cond_code
= VEC_COND_EXPR
;
6354 /* This transformation is only worthwhile if we don't have to wrap ARG
6355 in a SAVE_EXPR and the operation can be simplified without recursing
6356 on at least one of the branches once its pushed inside the COND_EXPR. */
6357 if (!TREE_CONSTANT (arg
)
6358 && (TREE_SIDE_EFFECTS (arg
)
6359 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6360 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6363 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6366 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6368 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6370 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6374 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6376 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6378 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6381 /* Check that we have simplified at least one of the branches. */
6382 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6385 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6389 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6391 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6392 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6393 ADDEND is the same as X.
6395 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6396 and finite. The problematic cases are when X is zero, and its mode
6397 has signed zeros. In the case of rounding towards -infinity,
6398 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6399 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6402 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6404 if (!real_zerop (addend
))
6407 /* Don't allow the fold with -fsignaling-nans. */
6408 if (HONOR_SNANS (element_mode (type
)))
6411 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6412 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6415 /* In a vector or complex, we would need to check the sign of all zeros. */
6416 if (TREE_CODE (addend
) != REAL_CST
)
6419 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6420 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6423 /* The mode has signed zeros, and we have to honor their sign.
6424 In this situation, there is only one case we can return true for.
6425 X - 0 is the same as X unless rounding towards -infinity is
6427 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6430 /* Subroutine of fold() that checks comparisons of built-in math
6431 functions against real constants.
6433 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6434 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6435 is the type of the result and ARG0 and ARG1 are the operands of the
6436 comparison. ARG1 must be a TREE_REAL_CST.
6438 The function returns the constant folded tree if a simplification
6439 can be made, and NULL_TREE otherwise. */
6442 fold_mathfn_compare (location_t loc
,
6443 enum built_in_function fcode
, enum tree_code code
,
6444 tree type
, tree arg0
, tree arg1
)
6448 if (BUILTIN_SQRT_P (fcode
))
6450 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6451 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6453 c
= TREE_REAL_CST (arg1
);
6454 if (REAL_VALUE_NEGATIVE (c
))
6456 /* sqrt(x) < y is always false, if y is negative. */
6457 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6458 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6460 /* sqrt(x) > y is always true, if y is negative and we
6461 don't care about NaNs, i.e. negative values of x. */
6462 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6463 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6465 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6466 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6467 build_real (TREE_TYPE (arg
), dconst0
));
6469 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6473 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6474 real_convert (&c2
, mode
, &c2
);
6476 if (REAL_VALUE_ISINF (c2
))
6478 /* sqrt(x) > y is x == +Inf, when y is very large. */
6479 if (HONOR_INFINITIES (mode
))
6480 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6481 build_real (TREE_TYPE (arg
), c2
));
6483 /* sqrt(x) > y is always false, when y is very large
6484 and we don't care about infinities. */
6485 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6488 /* sqrt(x) > c is the same as x > c*c. */
6489 return fold_build2_loc (loc
, code
, type
, arg
,
6490 build_real (TREE_TYPE (arg
), c2
));
6492 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6496 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6497 real_convert (&c2
, mode
, &c2
);
6499 if (REAL_VALUE_ISINF (c2
))
6501 /* sqrt(x) < y is always true, when y is a very large
6502 value and we don't care about NaNs or Infinities. */
6503 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6504 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6506 /* sqrt(x) < y is x != +Inf when y is very large and we
6507 don't care about NaNs. */
6508 if (! HONOR_NANS (mode
))
6509 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6510 build_real (TREE_TYPE (arg
), c2
));
6512 /* sqrt(x) < y is x >= 0 when y is very large and we
6513 don't care about Infinities. */
6514 if (! HONOR_INFINITIES (mode
))
6515 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6516 build_real (TREE_TYPE (arg
), dconst0
));
6518 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6519 arg
= save_expr (arg
);
6520 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6521 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6522 build_real (TREE_TYPE (arg
),
6524 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6525 build_real (TREE_TYPE (arg
),
6529 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6530 if (! HONOR_NANS (mode
))
6531 return fold_build2_loc (loc
, code
, type
, arg
,
6532 build_real (TREE_TYPE (arg
), c2
));
6534 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6535 arg
= save_expr (arg
);
6536 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6537 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6538 build_real (TREE_TYPE (arg
),
6540 fold_build2_loc (loc
, code
, type
, arg
,
6541 build_real (TREE_TYPE (arg
),
6549 /* Subroutine of fold() that optimizes comparisons against Infinities,
6550 either +Inf or -Inf.
6552 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6553 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6554 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6556 The function returns the constant folded tree if a simplification
6557 can be made, and NULL_TREE otherwise. */
6560 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6561 tree arg0
, tree arg1
)
6564 REAL_VALUE_TYPE max
;
6568 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6570 /* For negative infinity swap the sense of the comparison. */
6571 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6573 code
= swap_tree_comparison (code
);
6578 /* x > +Inf is always false, if with ignore sNANs. */
6579 if (HONOR_SNANS (mode
))
6581 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6584 /* x <= +Inf is always true, if we don't case about NaNs. */
6585 if (! HONOR_NANS (mode
))
6586 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6588 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6589 arg0
= save_expr (arg0
);
6590 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6594 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6595 real_maxval (&max
, neg
, mode
);
6596 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6597 arg0
, build_real (TREE_TYPE (arg0
), max
));
6600 /* x < +Inf is always equal to x <= DBL_MAX. */
6601 real_maxval (&max
, neg
, mode
);
6602 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6603 arg0
, build_real (TREE_TYPE (arg0
), max
));
6606 /* x != +Inf is always equal to !(x > DBL_MAX). */
6607 real_maxval (&max
, neg
, mode
);
6608 if (! HONOR_NANS (mode
))
6609 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6610 arg0
, build_real (TREE_TYPE (arg0
), max
));
6612 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6613 arg0
, build_real (TREE_TYPE (arg0
), max
));
6614 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6623 /* Subroutine of fold() that optimizes comparisons of a division by
6624 a nonzero integer constant against an integer constant, i.e.
6627 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6628 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6629 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6631 The function returns the constant folded tree if a simplification
6632 can be made, and NULL_TREE otherwise. */
6635 fold_div_compare (location_t loc
,
6636 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6638 tree prod
, tmp
, hi
, lo
;
6639 tree arg00
= TREE_OPERAND (arg0
, 0);
6640 tree arg01
= TREE_OPERAND (arg0
, 1);
6641 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6642 bool neg_overflow
= false;
6645 /* We have to do this the hard way to detect unsigned overflow.
6646 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6647 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6648 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6649 neg_overflow
= false;
6651 if (sign
== UNSIGNED
)
6653 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6654 build_int_cst (TREE_TYPE (arg01
), 1));
6657 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6658 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6659 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6660 -1, overflow
| TREE_OVERFLOW (prod
));
6662 else if (tree_int_cst_sgn (arg01
) >= 0)
6664 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6665 build_int_cst (TREE_TYPE (arg01
), 1));
6666 switch (tree_int_cst_sgn (arg1
))
6669 neg_overflow
= true;
6670 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6675 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6680 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6690 /* A negative divisor reverses the relational operators. */
6691 code
= swap_tree_comparison (code
);
6693 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6694 build_int_cst (TREE_TYPE (arg01
), 1));
6695 switch (tree_int_cst_sgn (arg1
))
6698 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6703 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6708 neg_overflow
= true;
6709 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6721 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6722 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6723 if (TREE_OVERFLOW (hi
))
6724 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6725 if (TREE_OVERFLOW (lo
))
6726 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6727 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6730 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6731 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6732 if (TREE_OVERFLOW (hi
))
6733 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6734 if (TREE_OVERFLOW (lo
))
6735 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6736 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6739 if (TREE_OVERFLOW (lo
))
6741 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6742 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6744 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6747 if (TREE_OVERFLOW (hi
))
6749 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6750 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6752 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6755 if (TREE_OVERFLOW (hi
))
6757 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6758 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6760 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6763 if (TREE_OVERFLOW (lo
))
6765 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6766 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6768 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6778 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6779 equality/inequality test, then return a simplified form of the test
6780 using a sign testing. Otherwise return NULL. TYPE is the desired
6784 fold_single_bit_test_into_sign_test (location_t loc
,
6785 enum tree_code code
, tree arg0
, tree arg1
,
6788 /* If this is testing a single bit, we can optimize the test. */
6789 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6790 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6791 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6793 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6794 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6795 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6797 if (arg00
!= NULL_TREE
6798 /* This is only a win if casting to a signed type is cheap,
6799 i.e. when arg00's type is not a partial mode. */
6800 && TYPE_PRECISION (TREE_TYPE (arg00
))
6801 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6803 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6804 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6806 fold_convert_loc (loc
, stype
, arg00
),
6807 build_int_cst (stype
, 0));
6814 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6815 equality/inequality test, then return a simplified form of
6816 the test using shifts and logical operations. Otherwise return
6817 NULL. TYPE is the desired result type. */
6820 fold_single_bit_test (location_t loc
, enum tree_code code
,
6821 tree arg0
, tree arg1
, tree result_type
)
6823 /* If this is testing a single bit, we can optimize the test. */
6824 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6825 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6826 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6828 tree inner
= TREE_OPERAND (arg0
, 0);
6829 tree type
= TREE_TYPE (arg0
);
6830 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6831 machine_mode operand_mode
= TYPE_MODE (type
);
6833 tree signed_type
, unsigned_type
, intermediate_type
;
6836 /* First, see if we can fold the single bit test into a sign-bit
6838 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6843 /* Otherwise we have (A & C) != 0 where C is a single bit,
6844 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6845 Similarly for (A & C) == 0. */
6847 /* If INNER is a right shift of a constant and it plus BITNUM does
6848 not overflow, adjust BITNUM and INNER. */
6849 if (TREE_CODE (inner
) == RSHIFT_EXPR
6850 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6851 && bitnum
< TYPE_PRECISION (type
)
6852 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6853 TYPE_PRECISION (type
) - bitnum
))
6855 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6856 inner
= TREE_OPERAND (inner
, 0);
6859 /* If we are going to be able to omit the AND below, we must do our
6860 operations as unsigned. If we must use the AND, we have a choice.
6861 Normally unsigned is faster, but for some machines signed is. */
6862 #ifdef LOAD_EXTEND_OP
6863 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6864 && !flag_syntax_only
) ? 0 : 1;
6869 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6870 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6871 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6872 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6875 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6876 inner
, size_int (bitnum
));
6878 one
= build_int_cst (intermediate_type
, 1);
6880 if (code
== EQ_EXPR
)
6881 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6883 /* Put the AND last so it can combine with more things. */
6884 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6886 /* Make sure to return the proper type. */
6887 inner
= fold_convert_loc (loc
, result_type
, inner
);
6894 /* Check whether we are allowed to reorder operands arg0 and arg1,
6895 such that the evaluation of arg1 occurs before arg0. */
6898 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6900 if (! flag_evaluation_order
)
6902 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6904 return ! TREE_SIDE_EFFECTS (arg0
)
6905 && ! TREE_SIDE_EFFECTS (arg1
);
6908 /* Test whether it is preferable two swap two operands, ARG0 and
6909 ARG1, for example because ARG0 is an integer constant and ARG1
6910 isn't. If REORDER is true, only recommend swapping if we can
6911 evaluate the operands in reverse order. */
6914 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6916 if (CONSTANT_CLASS_P (arg1
))
6918 if (CONSTANT_CLASS_P (arg0
))
6924 if (TREE_CONSTANT (arg1
))
6926 if (TREE_CONSTANT (arg0
))
6929 if (reorder
&& flag_evaluation_order
6930 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6933 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6934 for commutative and comparison operators. Ensuring a canonical
6935 form allows the optimizers to find additional redundancies without
6936 having to explicitly check for both orderings. */
6937 if (TREE_CODE (arg0
) == SSA_NAME
6938 && TREE_CODE (arg1
) == SSA_NAME
6939 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6942 /* Put SSA_NAMEs last. */
6943 if (TREE_CODE (arg1
) == SSA_NAME
)
6945 if (TREE_CODE (arg0
) == SSA_NAME
)
6948 /* Put variables last. */
6957 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6958 ARG0 is extended to a wider type. */
6961 fold_widened_comparison (location_t loc
, enum tree_code code
,
6962 tree type
, tree arg0
, tree arg1
)
6964 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6966 tree shorter_type
, outer_type
;
6970 if (arg0_unw
== arg0
)
6972 shorter_type
= TREE_TYPE (arg0_unw
);
6974 #ifdef HAVE_canonicalize_funcptr_for_compare
6975 /* Disable this optimization if we're casting a function pointer
6976 type on targets that require function pointer canonicalization. */
6977 if (HAVE_canonicalize_funcptr_for_compare
6978 && TREE_CODE (shorter_type
) == POINTER_TYPE
6979 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6983 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6986 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6988 /* If possible, express the comparison in the shorter mode. */
6989 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6990 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6991 && (TREE_TYPE (arg1_unw
) == shorter_type
6992 || ((TYPE_PRECISION (shorter_type
)
6993 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6994 && (TYPE_UNSIGNED (shorter_type
)
6995 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6996 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6997 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6998 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6999 && int_fits_type_p (arg1_unw
, shorter_type
))))
7000 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
7001 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
7003 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7004 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7005 || !int_fits_type_p (arg1_unw
, shorter_type
))
7008 /* If we are comparing with the integer that does not fit into the range
7009 of the shorter type, the result is known. */
7010 outer_type
= TREE_TYPE (arg1_unw
);
7011 min
= lower_bound_in_type (outer_type
, shorter_type
);
7012 max
= upper_bound_in_type (outer_type
, shorter_type
);
7014 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7016 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7023 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7028 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7034 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7036 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7041 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7043 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7052 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7053 ARG0 just the signedness is changed. */
7056 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
7057 tree arg0
, tree arg1
)
7060 tree inner_type
, outer_type
;
7062 if (!CONVERT_EXPR_P (arg0
))
7065 outer_type
= TREE_TYPE (arg0
);
7066 arg0_inner
= TREE_OPERAND (arg0
, 0);
7067 inner_type
= TREE_TYPE (arg0_inner
);
7069 #ifdef HAVE_canonicalize_funcptr_for_compare
7070 /* Disable this optimization if we're casting a function pointer
7071 type on targets that require function pointer canonicalization. */
7072 if (HAVE_canonicalize_funcptr_for_compare
7073 && TREE_CODE (inner_type
) == POINTER_TYPE
7074 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7078 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7081 if (TREE_CODE (arg1
) != INTEGER_CST
7082 && !(CONVERT_EXPR_P (arg1
)
7083 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7086 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7091 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
7094 if (TREE_CODE (arg1
) == INTEGER_CST
)
7095 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
7096 TREE_OVERFLOW (arg1
));
7098 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
7100 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
7104 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7105 means A >= Y && A != MAX, but in this case we know that
7106 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7109 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7111 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7113 if (TREE_CODE (bound
) == LT_EXPR
)
7114 a
= TREE_OPERAND (bound
, 0);
7115 else if (TREE_CODE (bound
) == GT_EXPR
)
7116 a
= TREE_OPERAND (bound
, 1);
7120 typea
= TREE_TYPE (a
);
7121 if (!INTEGRAL_TYPE_P (typea
)
7122 && !POINTER_TYPE_P (typea
))
7125 if (TREE_CODE (ineq
) == LT_EXPR
)
7127 a1
= TREE_OPERAND (ineq
, 1);
7128 y
= TREE_OPERAND (ineq
, 0);
7130 else if (TREE_CODE (ineq
) == GT_EXPR
)
7132 a1
= TREE_OPERAND (ineq
, 0);
7133 y
= TREE_OPERAND (ineq
, 1);
7138 if (TREE_TYPE (a1
) != typea
)
7141 if (POINTER_TYPE_P (typea
))
7143 /* Convert the pointer types into integer before taking the difference. */
7144 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7145 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7146 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7149 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7151 if (!diff
|| !integer_onep (diff
))
7154 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7157 /* Fold a sum or difference of at least one multiplication.
7158 Returns the folded tree or NULL if no simplification could be made. */
7161 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7162 tree arg0
, tree arg1
)
7164 tree arg00
, arg01
, arg10
, arg11
;
7165 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7167 /* (A * C) +- (B * C) -> (A+-B) * C.
7168 (A * C) +- A -> A * (C+-1).
7169 We are most concerned about the case where C is a constant,
7170 but other combinations show up during loop reduction. Since
7171 it is not difficult, try all four possibilities. */
7173 if (TREE_CODE (arg0
) == MULT_EXPR
)
7175 arg00
= TREE_OPERAND (arg0
, 0);
7176 arg01
= TREE_OPERAND (arg0
, 1);
7178 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7180 arg00
= build_one_cst (type
);
7185 /* We cannot generate constant 1 for fract. */
7186 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7189 arg01
= build_one_cst (type
);
7191 if (TREE_CODE (arg1
) == MULT_EXPR
)
7193 arg10
= TREE_OPERAND (arg1
, 0);
7194 arg11
= TREE_OPERAND (arg1
, 1);
7196 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7198 arg10
= build_one_cst (type
);
7199 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7200 the purpose of this canonicalization. */
7201 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7202 && negate_expr_p (arg1
)
7203 && code
== PLUS_EXPR
)
7205 arg11
= negate_expr (arg1
);
7213 /* We cannot generate constant 1 for fract. */
7214 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7217 arg11
= build_one_cst (type
);
7221 if (operand_equal_p (arg01
, arg11
, 0))
7222 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7223 else if (operand_equal_p (arg00
, arg10
, 0))
7224 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7225 else if (operand_equal_p (arg00
, arg11
, 0))
7226 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7227 else if (operand_equal_p (arg01
, arg10
, 0))
7228 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7230 /* No identical multiplicands; see if we can find a common
7231 power-of-two factor in non-power-of-two multiplies. This
7232 can help in multi-dimensional array access. */
7233 else if (tree_fits_shwi_p (arg01
)
7234 && tree_fits_shwi_p (arg11
))
7236 HOST_WIDE_INT int01
, int11
, tmp
;
7239 int01
= tree_to_shwi (arg01
);
7240 int11
= tree_to_shwi (arg11
);
7242 /* Move min of absolute values to int11. */
7243 if (absu_hwi (int01
) < absu_hwi (int11
))
7245 tmp
= int01
, int01
= int11
, int11
= tmp
;
7246 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7253 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7254 /* The remainder should not be a constant, otherwise we
7255 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7256 increased the number of multiplications necessary. */
7257 && TREE_CODE (arg10
) != INTEGER_CST
)
7259 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7260 build_int_cst (TREE_TYPE (arg00
),
7265 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7270 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7271 fold_build2_loc (loc
, code
, type
,
7272 fold_convert_loc (loc
, type
, alt0
),
7273 fold_convert_loc (loc
, type
, alt1
)),
7274 fold_convert_loc (loc
, type
, same
));
7279 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7285 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7287 tree type
= TREE_TYPE (expr
);
7288 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7289 int byte
, offset
, word
, words
;
7290 unsigned char value
;
7292 if ((off
== -1 && total_bytes
> len
)
7293 || off
>= total_bytes
)
7297 words
= total_bytes
/ UNITS_PER_WORD
;
7299 for (byte
= 0; byte
< total_bytes
; byte
++)
7301 int bitpos
= byte
* BITS_PER_UNIT
;
7302 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7304 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7306 if (total_bytes
> UNITS_PER_WORD
)
7308 word
= byte
/ UNITS_PER_WORD
;
7309 if (WORDS_BIG_ENDIAN
)
7310 word
= (words
- 1) - word
;
7311 offset
= word
* UNITS_PER_WORD
;
7312 if (BYTES_BIG_ENDIAN
)
7313 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7315 offset
+= byte
% UNITS_PER_WORD
;
7318 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7320 && offset
- off
< len
)
7321 ptr
[offset
- off
] = value
;
7323 return MIN (len
, total_bytes
- off
);
7327 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7328 specified by EXPR into the buffer PTR of length LEN bytes.
7329 Return the number of bytes placed in the buffer, or zero
7333 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7335 tree type
= TREE_TYPE (expr
);
7336 machine_mode mode
= TYPE_MODE (type
);
7337 int total_bytes
= GET_MODE_SIZE (mode
);
7338 FIXED_VALUE_TYPE value
;
7339 tree i_value
, i_type
;
7341 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7344 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7346 if (NULL_TREE
== i_type
7347 || TYPE_PRECISION (i_type
) != total_bytes
)
7350 value
= TREE_FIXED_CST (expr
);
7351 i_value
= double_int_to_tree (i_type
, value
.data
);
7353 return native_encode_int (i_value
, ptr
, len
, off
);
7357 /* Subroutine of native_encode_expr. Encode the REAL_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7363 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7365 tree type
= TREE_TYPE (expr
);
7366 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7367 int byte
, offset
, word
, words
, bitpos
;
7368 unsigned char value
;
7370 /* There are always 32 bits in each long, no matter the size of
7371 the hosts long. We handle floating point representations with
7375 if ((off
== -1 && total_bytes
> len
)
7376 || off
>= total_bytes
)
7380 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7382 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7384 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7385 bitpos
+= BITS_PER_UNIT
)
7387 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7388 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7390 if (UNITS_PER_WORD
< 4)
7392 word
= byte
/ UNITS_PER_WORD
;
7393 if (WORDS_BIG_ENDIAN
)
7394 word
= (words
- 1) - word
;
7395 offset
= word
* UNITS_PER_WORD
;
7396 if (BYTES_BIG_ENDIAN
)
7397 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7399 offset
+= byte
% UNITS_PER_WORD
;
7402 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7403 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7405 && offset
- off
< len
)
7406 ptr
[offset
- off
] = value
;
7408 return MIN (len
, total_bytes
- off
);
7411 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7412 specified by EXPR into the buffer PTR of length LEN bytes.
7413 Return the number of bytes placed in the buffer, or zero
7417 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7422 part
= TREE_REALPART (expr
);
7423 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7427 part
= TREE_IMAGPART (expr
);
7429 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7430 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7434 return rsize
+ isize
;
7438 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7439 specified by EXPR into the buffer PTR of length LEN bytes.
7440 Return the number of bytes placed in the buffer, or zero
7444 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7451 count
= VECTOR_CST_NELTS (expr
);
7452 itype
= TREE_TYPE (TREE_TYPE (expr
));
7453 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7454 for (i
= 0; i
< count
; i
++)
7461 elem
= VECTOR_CST_ELT (expr
, i
);
7462 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7463 if ((off
== -1 && res
!= size
)
7476 /* Subroutine of native_encode_expr. Encode the STRING_CST
7477 specified by EXPR into the buffer PTR of length LEN bytes.
7478 Return the number of bytes placed in the buffer, or zero
7482 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7484 tree type
= TREE_TYPE (expr
);
7485 HOST_WIDE_INT total_bytes
;
7487 if (TREE_CODE (type
) != ARRAY_TYPE
7488 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7489 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7490 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7492 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7493 if ((off
== -1 && total_bytes
> len
)
7494 || off
>= total_bytes
)
7498 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7501 if (off
< TREE_STRING_LENGTH (expr
))
7503 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7504 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7506 memset (ptr
+ written
, 0,
7507 MIN (total_bytes
- written
, len
- written
));
7510 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7511 return MIN (total_bytes
- off
, len
);
7515 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7516 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7517 buffer PTR of length LEN bytes. If OFF is not -1 then start
7518 the encoding at byte offset OFF and encode at most LEN bytes.
7519 Return the number of bytes placed in the buffer, or zero upon failure. */
7522 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7524 switch (TREE_CODE (expr
))
7527 return native_encode_int (expr
, ptr
, len
, off
);
7530 return native_encode_real (expr
, ptr
, len
, off
);
7533 return native_encode_fixed (expr
, ptr
, len
, off
);
7536 return native_encode_complex (expr
, ptr
, len
, off
);
7539 return native_encode_vector (expr
, ptr
, len
, off
);
7542 return native_encode_string (expr
, ptr
, len
, off
);
7550 /* Subroutine of native_interpret_expr. Interpret the contents of
7551 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7552 If the buffer cannot be interpreted, return NULL_TREE. */
7555 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7557 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7559 if (total_bytes
> len
7560 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7563 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7565 return wide_int_to_tree (type
, result
);
7569 /* Subroutine of native_interpret_expr. Interpret the contents of
7570 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7571 If the buffer cannot be interpreted, return NULL_TREE. */
7574 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7576 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7578 FIXED_VALUE_TYPE fixed_value
;
7580 if (total_bytes
> len
7581 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7584 result
= double_int::from_buffer (ptr
, total_bytes
);
7585 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7587 return build_fixed (type
, fixed_value
);
7591 /* Subroutine of native_interpret_expr. Interpret the contents of
7592 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7593 If the buffer cannot be interpreted, return NULL_TREE. */
7596 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7598 machine_mode mode
= TYPE_MODE (type
);
7599 int total_bytes
= GET_MODE_SIZE (mode
);
7600 int byte
, offset
, word
, words
, bitpos
;
7601 unsigned char value
;
7602 /* There are always 32 bits in each long, no matter the size of
7603 the hosts long. We handle floating point representations with
7608 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7609 if (total_bytes
> len
|| total_bytes
> 24)
7611 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7613 memset (tmp
, 0, sizeof (tmp
));
7614 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7615 bitpos
+= BITS_PER_UNIT
)
7617 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7618 if (UNITS_PER_WORD
< 4)
7620 word
= byte
/ UNITS_PER_WORD
;
7621 if (WORDS_BIG_ENDIAN
)
7622 word
= (words
- 1) - word
;
7623 offset
= word
* UNITS_PER_WORD
;
7624 if (BYTES_BIG_ENDIAN
)
7625 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7627 offset
+= byte
% UNITS_PER_WORD
;
7630 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7631 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7633 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7636 real_from_target (&r
, tmp
, mode
);
7637 return build_real (type
, r
);
7641 /* Subroutine of native_interpret_expr. Interpret the contents of
7642 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7643 If the buffer cannot be interpreted, return NULL_TREE. */
7646 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7648 tree etype
, rpart
, ipart
;
7651 etype
= TREE_TYPE (type
);
7652 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7655 rpart
= native_interpret_expr (etype
, ptr
, size
);
7658 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7661 return build_complex (type
, rpart
, ipart
);
7665 /* Subroutine of native_interpret_expr. Interpret the contents of
7666 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7667 If the buffer cannot be interpreted, return NULL_TREE. */
7670 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7676 etype
= TREE_TYPE (type
);
7677 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7678 count
= TYPE_VECTOR_SUBPARTS (type
);
7679 if (size
* count
> len
)
7682 elements
= XALLOCAVEC (tree
, count
);
7683 for (i
= count
- 1; i
>= 0; i
--)
7685 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7690 return build_vector (type
, elements
);
7694 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7695 the buffer PTR of length LEN as a constant of type TYPE. For
7696 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7697 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7698 return NULL_TREE. */
7701 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7703 switch (TREE_CODE (type
))
7709 case REFERENCE_TYPE
:
7710 return native_interpret_int (type
, ptr
, len
);
7713 return native_interpret_real (type
, ptr
, len
);
7715 case FIXED_POINT_TYPE
:
7716 return native_interpret_fixed (type
, ptr
, len
);
7719 return native_interpret_complex (type
, ptr
, len
);
7722 return native_interpret_vector (type
, ptr
, len
);
7729 /* Returns true if we can interpret the contents of a native encoding
7733 can_native_interpret_type_p (tree type
)
7735 switch (TREE_CODE (type
))
7741 case REFERENCE_TYPE
:
7742 case FIXED_POINT_TYPE
:
7752 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7753 TYPE at compile-time. If we're unable to perform the conversion
7754 return NULL_TREE. */
7757 fold_view_convert_expr (tree type
, tree expr
)
7759 /* We support up to 512-bit values (for V8DFmode). */
7760 unsigned char buffer
[64];
7763 /* Check that the host and target are sane. */
7764 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7767 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7771 return native_interpret_expr (type
, buffer
, len
);
7774 /* Build an expression for the address of T. Folds away INDIRECT_REF
7775 to avoid confusing the gimplify process. */
7778 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7780 /* The size of the object is not relevant when talking about its address. */
7781 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7782 t
= TREE_OPERAND (t
, 0);
7784 if (TREE_CODE (t
) == INDIRECT_REF
)
7786 t
= TREE_OPERAND (t
, 0);
7788 if (TREE_TYPE (t
) != ptrtype
)
7789 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7791 else if (TREE_CODE (t
) == MEM_REF
7792 && integer_zerop (TREE_OPERAND (t
, 1)))
7793 return TREE_OPERAND (t
, 0);
7794 else if (TREE_CODE (t
) == MEM_REF
7795 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7796 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7797 TREE_OPERAND (t
, 0),
7798 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7799 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7801 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7803 if (TREE_TYPE (t
) != ptrtype
)
7804 t
= fold_convert_loc (loc
, ptrtype
, t
);
7807 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7812 /* Build an expression for the address of T. */
7815 build_fold_addr_expr_loc (location_t loc
, tree t
)
7817 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7819 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7822 /* Fold a unary expression of code CODE and type TYPE with operand
7823 OP0. Return the folded expression if folding is successful.
7824 Otherwise, return NULL_TREE. */
7827 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7831 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7833 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7834 && TREE_CODE_LENGTH (code
) == 1);
7839 if (CONVERT_EXPR_CODE_P (code
)
7840 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7842 /* Don't use STRIP_NOPS, because signedness of argument type
7844 STRIP_SIGN_NOPS (arg0
);
7848 /* Strip any conversions that don't change the mode. This
7849 is safe for every expression, except for a comparison
7850 expression because its signedness is derived from its
7853 Note that this is done as an internal manipulation within
7854 the constant folder, in order to find the simplest
7855 representation of the arguments so that their form can be
7856 studied. In any cases, the appropriate type conversions
7857 should be put back in the tree that will get out of the
7862 if (CONSTANT_CLASS_P (arg0
))
7864 tree tem
= const_unop (code
, type
, arg0
);
7867 if (TREE_TYPE (tem
) != type
)
7868 tem
= fold_convert_loc (loc
, type
, tem
);
7874 tem
= generic_simplify (loc
, code
, type
, op0
);
7878 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7880 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7881 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7882 fold_build1_loc (loc
, code
, type
,
7883 fold_convert_loc (loc
, TREE_TYPE (op0
),
7884 TREE_OPERAND (arg0
, 1))));
7885 else if (TREE_CODE (arg0
) == COND_EXPR
)
7887 tree arg01
= TREE_OPERAND (arg0
, 1);
7888 tree arg02
= TREE_OPERAND (arg0
, 2);
7889 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7890 arg01
= fold_build1_loc (loc
, code
, type
,
7891 fold_convert_loc (loc
,
7892 TREE_TYPE (op0
), arg01
));
7893 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7894 arg02
= fold_build1_loc (loc
, code
, type
,
7895 fold_convert_loc (loc
,
7896 TREE_TYPE (op0
), arg02
));
7897 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7900 /* If this was a conversion, and all we did was to move into
7901 inside the COND_EXPR, bring it back out. But leave it if
7902 it is a conversion from integer to integer and the
7903 result precision is no wider than a word since such a
7904 conversion is cheap and may be optimized away by combine,
7905 while it couldn't if it were outside the COND_EXPR. Then return
7906 so we don't get into an infinite recursion loop taking the
7907 conversion out and then back in. */
7909 if ((CONVERT_EXPR_CODE_P (code
)
7910 || code
== NON_LVALUE_EXPR
)
7911 && TREE_CODE (tem
) == COND_EXPR
7912 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7913 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7914 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7915 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7916 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7917 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7918 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7920 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7921 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7922 || flag_syntax_only
))
7923 tem
= build1_loc (loc
, code
, type
,
7925 TREE_TYPE (TREE_OPERAND
7926 (TREE_OPERAND (tem
, 1), 0)),
7927 TREE_OPERAND (tem
, 0),
7928 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7929 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7937 case NON_LVALUE_EXPR
:
7938 if (!maybe_lvalue_p (op0
))
7939 return fold_convert_loc (loc
, type
, op0
);
7944 case FIX_TRUNC_EXPR
:
7945 if (COMPARISON_CLASS_P (op0
))
7947 /* If we have (type) (a CMP b) and type is an integral type, return
7948 new expression involving the new type. Canonicalize
7949 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7951 Do not fold the result as that would not simplify further, also
7952 folding again results in recursions. */
7953 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7954 return build2_loc (loc
, TREE_CODE (op0
), type
,
7955 TREE_OPERAND (op0
, 0),
7956 TREE_OPERAND (op0
, 1));
7957 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7958 && TREE_CODE (type
) != VECTOR_TYPE
)
7959 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7960 constant_boolean_node (true, type
),
7961 constant_boolean_node (false, type
));
7964 /* Handle (T *)&A.B.C for A being of type T and B and C
7965 living at offset zero. This occurs frequently in
7966 C++ upcasting and then accessing the base. */
7967 if (TREE_CODE (op0
) == ADDR_EXPR
7968 && POINTER_TYPE_P (type
)
7969 && handled_component_p (TREE_OPERAND (op0
, 0)))
7971 HOST_WIDE_INT bitsize
, bitpos
;
7974 int unsignedp
, volatilep
;
7975 tree base
= TREE_OPERAND (op0
, 0);
7976 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7977 &mode
, &unsignedp
, &volatilep
, false);
7978 /* If the reference was to a (constant) zero offset, we can use
7979 the address of the base if it has the same base type
7980 as the result type and the pointer type is unqualified. */
7981 if (! offset
&& bitpos
== 0
7982 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7983 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7984 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7985 return fold_convert_loc (loc
, type
,
7986 build_fold_addr_expr_loc (loc
, base
));
7989 if (TREE_CODE (op0
) == MODIFY_EXPR
7990 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7991 /* Detect assigning a bitfield. */
7992 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7994 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7996 /* Don't leave an assignment inside a conversion
7997 unless assigning a bitfield. */
7998 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7999 /* First do the assignment, then return converted constant. */
8000 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8001 TREE_NO_WARNING (tem
) = 1;
8002 TREE_USED (tem
) = 1;
8006 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8007 constants (if x has signed type, the sign bit cannot be set
8008 in c). This folds extension into the BIT_AND_EXPR.
8009 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8010 very likely don't have maximal range for their precision and this
8011 transformation effectively doesn't preserve non-maximal ranges. */
8012 if (TREE_CODE (type
) == INTEGER_TYPE
8013 && TREE_CODE (op0
) == BIT_AND_EXPR
8014 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8016 tree and_expr
= op0
;
8017 tree and0
= TREE_OPERAND (and_expr
, 0);
8018 tree and1
= TREE_OPERAND (and_expr
, 1);
8021 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8022 || (TYPE_PRECISION (type
)
8023 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8025 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8026 <= HOST_BITS_PER_WIDE_INT
8027 && tree_fits_uhwi_p (and1
))
8029 unsigned HOST_WIDE_INT cst
;
8031 cst
= tree_to_uhwi (and1
);
8032 cst
&= HOST_WIDE_INT_M1U
8033 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8034 change
= (cst
== 0);
8035 #ifdef LOAD_EXTEND_OP
8037 && !flag_syntax_only
8038 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8041 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8042 and0
= fold_convert_loc (loc
, uns
, and0
);
8043 and1
= fold_convert_loc (loc
, uns
, and1
);
8049 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8050 TREE_OVERFLOW (and1
));
8051 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8052 fold_convert_loc (loc
, type
, and0
), tem
);
8056 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8057 when one of the new casts will fold away. Conservatively we assume
8058 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8059 if (POINTER_TYPE_P (type
)
8060 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8061 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8062 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8063 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8064 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8066 tree arg00
= TREE_OPERAND (arg0
, 0);
8067 tree arg01
= TREE_OPERAND (arg0
, 1);
8069 return fold_build_pointer_plus_loc
8070 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8073 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8074 of the same precision, and X is an integer type not narrower than
8075 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8076 if (INTEGRAL_TYPE_P (type
)
8077 && TREE_CODE (op0
) == BIT_NOT_EXPR
8078 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8079 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8080 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8082 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8083 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8084 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8085 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8086 fold_convert_loc (loc
, type
, tem
));
8089 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8090 type of X and Y (integer types only). */
8091 if (INTEGRAL_TYPE_P (type
)
8092 && TREE_CODE (op0
) == MULT_EXPR
8093 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8094 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8096 /* Be careful not to introduce new overflows. */
8098 if (TYPE_OVERFLOW_WRAPS (type
))
8101 mult_type
= unsigned_type_for (type
);
8103 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8105 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8106 fold_convert_loc (loc
, mult_type
,
8107 TREE_OPERAND (op0
, 0)),
8108 fold_convert_loc (loc
, mult_type
,
8109 TREE_OPERAND (op0
, 1)));
8110 return fold_convert_loc (loc
, type
, tem
);
8116 case VIEW_CONVERT_EXPR
:
8117 if (TREE_CODE (op0
) == MEM_REF
)
8118 return fold_build2_loc (loc
, MEM_REF
, type
,
8119 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8124 tem
= fold_negate_expr (loc
, arg0
);
8126 return fold_convert_loc (loc
, type
, tem
);
8130 /* Convert fabs((double)float) into (double)fabsf(float). */
8131 if (TREE_CODE (arg0
) == NOP_EXPR
8132 && TREE_CODE (type
) == REAL_TYPE
)
8134 tree targ0
= strip_float_extensions (arg0
);
8136 return fold_convert_loc (loc
, type
,
8137 fold_build1_loc (loc
, ABS_EXPR
,
8141 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8142 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8145 /* Strip sign ops from argument. */
8146 if (TREE_CODE (type
) == REAL_TYPE
)
8148 tem
= fold_strip_sign_ops (arg0
);
8150 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8151 fold_convert_loc (loc
, type
, tem
));
8156 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8157 return fold_convert_loc (loc
, type
, arg0
);
8158 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8160 tree itype
= TREE_TYPE (type
);
8161 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8162 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8163 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8164 negate_expr (ipart
));
8166 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8167 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8171 /* Convert ~ (-A) to A - 1. */
8172 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8173 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8174 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8175 build_int_cst (type
, 1));
8176 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8177 else if (INTEGRAL_TYPE_P (type
)
8178 && ((TREE_CODE (arg0
) == MINUS_EXPR
8179 && integer_onep (TREE_OPERAND (arg0
, 1)))
8180 || (TREE_CODE (arg0
) == PLUS_EXPR
8181 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8183 /* Perform the negation in ARG0's type and only then convert
8184 to TYPE as to avoid introducing undefined behavior. */
8185 tree t
= fold_build1_loc (loc
, NEGATE_EXPR
,
8186 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
8187 TREE_OPERAND (arg0
, 0));
8188 return fold_convert_loc (loc
, type
, t
);
8190 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8191 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8192 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8193 fold_convert_loc (loc
, type
,
8194 TREE_OPERAND (arg0
, 0)))))
8195 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8196 fold_convert_loc (loc
, type
,
8197 TREE_OPERAND (arg0
, 1)));
8198 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8199 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8200 fold_convert_loc (loc
, type
,
8201 TREE_OPERAND (arg0
, 1)))))
8202 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8203 fold_convert_loc (loc
, type
,
8204 TREE_OPERAND (arg0
, 0)), tem
);
8208 case TRUTH_NOT_EXPR
:
8209 /* Note that the operand of this must be an int
8210 and its values must be 0 or 1.
8211 ("true" is a fixed value perhaps depending on the language,
8212 but we don't handle values other than 1 correctly yet.) */
8213 tem
= fold_truth_not_expr (loc
, arg0
);
8216 return fold_convert_loc (loc
, type
, tem
);
8219 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8220 return fold_convert_loc (loc
, type
, arg0
);
8221 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8223 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8224 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8225 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8226 TREE_OPERAND (arg0
, 0)),
8227 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8228 TREE_OPERAND (arg0
, 1)));
8229 return fold_convert_loc (loc
, type
, tem
);
8231 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8233 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8234 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8235 TREE_OPERAND (arg0
, 0));
8236 return fold_convert_loc (loc
, type
, tem
);
8238 if (TREE_CODE (arg0
) == CALL_EXPR
)
8240 tree fn
= get_callee_fndecl (arg0
);
8241 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8242 switch (DECL_FUNCTION_CODE (fn
))
8244 CASE_FLT_FN (BUILT_IN_CEXPI
):
8245 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8247 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8257 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8258 return build_zero_cst (type
);
8259 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8261 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8262 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8263 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8264 TREE_OPERAND (arg0
, 0)),
8265 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8266 TREE_OPERAND (arg0
, 1)));
8267 return fold_convert_loc (loc
, type
, tem
);
8269 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8271 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8272 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8273 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8275 if (TREE_CODE (arg0
) == CALL_EXPR
)
8277 tree fn
= get_callee_fndecl (arg0
);
8278 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8279 switch (DECL_FUNCTION_CODE (fn
))
8281 CASE_FLT_FN (BUILT_IN_CEXPI
):
8282 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8284 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8294 /* Fold *&X to X if X is an lvalue. */
8295 if (TREE_CODE (op0
) == ADDR_EXPR
)
8297 tree op00
= TREE_OPERAND (op0
, 0);
8298 if ((TREE_CODE (op00
) == VAR_DECL
8299 || TREE_CODE (op00
) == PARM_DECL
8300 || TREE_CODE (op00
) == RESULT_DECL
)
8301 && !TREE_READONLY (op00
))
8308 } /* switch (code) */
8312 /* If the operation was a conversion do _not_ mark a resulting constant
8313 with TREE_OVERFLOW if the original constant was not. These conversions
8314 have implementation defined behavior and retaining the TREE_OVERFLOW
8315 flag here would confuse later passes such as VRP. */
8317 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8318 tree type
, tree op0
)
8320 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8322 && TREE_CODE (res
) == INTEGER_CST
8323 && TREE_CODE (op0
) == INTEGER_CST
8324 && CONVERT_EXPR_CODE_P (code
))
8325 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8330 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8331 operands OP0 and OP1. LOC is the location of the resulting expression.
8332 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8333 Return the folded expression if folding is successful. Otherwise,
8334 return NULL_TREE. */
8336 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8337 tree arg0
, tree arg1
, tree op0
, tree op1
)
8341 /* We only do these simplifications if we are optimizing. */
8345 /* Check for things like (A || B) && (A || C). We can convert this
8346 to A || (B && C). Note that either operator can be any of the four
8347 truth and/or operations and the transformation will still be
8348 valid. Also note that we only care about order for the
8349 ANDIF and ORIF operators. If B contains side effects, this
8350 might change the truth-value of A. */
8351 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8352 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8353 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8354 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8355 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8356 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8358 tree a00
= TREE_OPERAND (arg0
, 0);
8359 tree a01
= TREE_OPERAND (arg0
, 1);
8360 tree a10
= TREE_OPERAND (arg1
, 0);
8361 tree a11
= TREE_OPERAND (arg1
, 1);
8362 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8363 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8364 && (code
== TRUTH_AND_EXPR
8365 || code
== TRUTH_OR_EXPR
));
8367 if (operand_equal_p (a00
, a10
, 0))
8368 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8369 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8370 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8371 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8372 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8373 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8374 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8375 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8377 /* This case if tricky because we must either have commutative
8378 operators or else A10 must not have side-effects. */
8380 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8381 && operand_equal_p (a01
, a11
, 0))
8382 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8383 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8387 /* See if we can build a range comparison. */
8388 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8391 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8392 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8394 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8396 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8399 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8400 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8402 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8404 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8407 /* Check for the possibility of merging component references. If our
8408 lhs is another similar operation, try to merge its rhs with our
8409 rhs. Then try to merge our lhs and rhs. */
8410 if (TREE_CODE (arg0
) == code
8411 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8412 TREE_OPERAND (arg0
, 1), arg1
)))
8413 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8415 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8418 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8419 && (code
== TRUTH_AND_EXPR
8420 || code
== TRUTH_ANDIF_EXPR
8421 || code
== TRUTH_OR_EXPR
8422 || code
== TRUTH_ORIF_EXPR
))
8424 enum tree_code ncode
, icode
;
8426 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8427 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8428 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8430 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8431 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8432 We don't want to pack more than two leafs to a non-IF AND/OR
8434 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8435 equal to IF-CODE, then we don't want to add right-hand operand.
8436 If the inner right-hand side of left-hand operand has
8437 side-effects, or isn't simple, then we can't add to it,
8438 as otherwise we might destroy if-sequence. */
8439 if (TREE_CODE (arg0
) == icode
8440 && simple_operand_p_2 (arg1
)
8441 /* Needed for sequence points to handle trappings, and
8443 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8445 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8447 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8450 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8451 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8452 else if (TREE_CODE (arg1
) == icode
8453 && simple_operand_p_2 (arg0
)
8454 /* Needed for sequence points to handle trappings, and
8456 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8458 tem
= fold_build2_loc (loc
, ncode
, type
,
8459 arg0
, TREE_OPERAND (arg1
, 0));
8460 return fold_build2_loc (loc
, icode
, type
, tem
,
8461 TREE_OPERAND (arg1
, 1));
8463 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8465 For sequence point consistancy, we need to check for trapping,
8466 and side-effects. */
8467 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8468 && simple_operand_p_2 (arg1
))
8469 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8475 /* Fold a binary expression of code CODE and type TYPE with operands
8476 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8477 Return the folded expression if folding is successful. Otherwise,
8478 return NULL_TREE. */
8481 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8483 enum tree_code compl_code
;
8485 if (code
== MIN_EXPR
)
8486 compl_code
= MAX_EXPR
;
8487 else if (code
== MAX_EXPR
)
8488 compl_code
= MIN_EXPR
;
8492 /* MIN (MAX (a, b), b) == b. */
8493 if (TREE_CODE (op0
) == compl_code
8494 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8495 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8497 /* MIN (MAX (b, a), b) == b. */
8498 if (TREE_CODE (op0
) == compl_code
8499 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8500 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8501 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8503 /* MIN (a, MAX (a, b)) == a. */
8504 if (TREE_CODE (op1
) == compl_code
8505 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8506 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8507 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8509 /* MIN (a, MAX (b, a)) == a. */
8510 if (TREE_CODE (op1
) == compl_code
8511 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8512 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8513 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8518 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8519 by changing CODE to reduce the magnitude of constants involved in
8520 ARG0 of the comparison.
8521 Returns a canonicalized comparison tree if a simplification was
8522 possible, otherwise returns NULL_TREE.
8523 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8524 valid if signed overflow is undefined. */
8527 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8528 tree arg0
, tree arg1
,
8529 bool *strict_overflow_p
)
8531 enum tree_code code0
= TREE_CODE (arg0
);
8532 tree t
, cst0
= NULL_TREE
;
8536 /* Match A +- CST code arg1 and CST code arg1. We can change the
8537 first form only if overflow is undefined. */
8538 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8539 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8540 /* In principle pointers also have undefined overflow behavior,
8541 but that causes problems elsewhere. */
8542 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8543 && (code0
== MINUS_EXPR
8544 || code0
== PLUS_EXPR
)
8545 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8546 || code0
== INTEGER_CST
))
8549 /* Identify the constant in arg0 and its sign. */
8550 if (code0
== INTEGER_CST
)
8553 cst0
= TREE_OPERAND (arg0
, 1);
8554 sgn0
= tree_int_cst_sgn (cst0
);
8556 /* Overflowed constants and zero will cause problems. */
8557 if (integer_zerop (cst0
)
8558 || TREE_OVERFLOW (cst0
))
8561 /* See if we can reduce the magnitude of the constant in
8562 arg0 by changing the comparison code. */
8563 if (code0
== INTEGER_CST
)
8565 /* CST <= arg1 -> CST-1 < arg1. */
8566 if (code
== LE_EXPR
&& sgn0
== 1)
8568 /* -CST < arg1 -> -CST-1 <= arg1. */
8569 else if (code
== LT_EXPR
&& sgn0
== -1)
8571 /* CST > arg1 -> CST-1 >= arg1. */
8572 else if (code
== GT_EXPR
&& sgn0
== 1)
8574 /* -CST >= arg1 -> -CST-1 > arg1. */
8575 else if (code
== GE_EXPR
&& sgn0
== -1)
8579 /* arg1 code' CST' might be more canonical. */
8584 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8586 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8588 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8589 else if (code
== GT_EXPR
8590 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8592 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8593 else if (code
== LE_EXPR
8594 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8596 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8597 else if (code
== GE_EXPR
8598 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8602 *strict_overflow_p
= true;
8605 /* Now build the constant reduced in magnitude. But not if that
8606 would produce one outside of its types range. */
8607 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8609 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8610 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8612 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8613 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8614 /* We cannot swap the comparison here as that would cause us to
8615 endlessly recurse. */
8618 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8619 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8620 if (code0
!= INTEGER_CST
)
8621 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8622 t
= fold_convert (TREE_TYPE (arg1
), t
);
8624 /* If swapping might yield to a more canonical form, do so. */
8626 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8628 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8631 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8632 overflow further. Try to decrease the magnitude of constants involved
8633 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8634 and put sole constants at the second argument position.
8635 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8638 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8639 tree arg0
, tree arg1
)
8642 bool strict_overflow_p
;
8643 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8644 "when reducing constant in comparison");
8646 /* Try canonicalization by simplifying arg0. */
8647 strict_overflow_p
= false;
8648 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8649 &strict_overflow_p
);
8652 if (strict_overflow_p
)
8653 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8657 /* Try canonicalization by simplifying arg1 using the swapped
8659 code
= swap_tree_comparison (code
);
8660 strict_overflow_p
= false;
8661 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8662 &strict_overflow_p
);
8663 if (t
&& strict_overflow_p
)
8664 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8668 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8669 space. This is used to avoid issuing overflow warnings for
8670 expressions like &p->x which can not wrap. */
8673 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8675 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8682 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8683 if (offset
== NULL_TREE
)
8684 wi_offset
= wi::zero (precision
);
8685 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8691 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8692 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8696 if (!wi::fits_uhwi_p (total
))
8699 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8703 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8705 if (TREE_CODE (base
) == ADDR_EXPR
)
8707 HOST_WIDE_INT base_size
;
8709 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8710 if (base_size
> 0 && size
< base_size
)
8714 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8717 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8718 kind INTEGER_CST. This makes sure to properly sign-extend the
8721 static HOST_WIDE_INT
8722 size_low_cst (const_tree t
)
8724 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8725 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8726 if (prec
< HOST_BITS_PER_WIDE_INT
)
8727 return sext_hwi (w
, prec
);
8731 /* Subroutine of fold_binary. This routine performs all of the
8732 transformations that are common to the equality/inequality
8733 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8734 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8735 fold_binary should call fold_binary. Fold a comparison with
8736 tree code CODE and type TYPE with operands OP0 and OP1. Return
8737 the folded comparison or NULL_TREE. */
8740 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8743 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8744 tree arg0
, arg1
, tem
;
8749 STRIP_SIGN_NOPS (arg0
);
8750 STRIP_SIGN_NOPS (arg1
);
8752 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8753 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8755 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8756 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8757 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8758 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8759 && TREE_CODE (arg1
) == INTEGER_CST
8760 && !TREE_OVERFLOW (arg1
))
8762 const enum tree_code
8763 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8764 tree const1
= TREE_OPERAND (arg0
, 1);
8765 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8766 tree variable
= TREE_OPERAND (arg0
, 0);
8767 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8769 /* If the constant operation overflowed this can be
8770 simplified as a comparison against INT_MAX/INT_MIN. */
8771 if (TREE_OVERFLOW (new_const
)
8772 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8774 int const1_sgn
= tree_int_cst_sgn (const1
);
8775 enum tree_code code2
= code
;
8777 /* Get the sign of the constant on the lhs if the
8778 operation were VARIABLE + CONST1. */
8779 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8780 const1_sgn
= -const1_sgn
;
8782 /* The sign of the constant determines if we overflowed
8783 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8784 Canonicalize to the INT_MIN overflow by swapping the comparison
8786 if (const1_sgn
== -1)
8787 code2
= swap_tree_comparison (code
);
8789 /* We now can look at the canonicalized case
8790 VARIABLE + 1 CODE2 INT_MIN
8791 and decide on the result. */
8798 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8804 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8813 fold_overflow_warning ("assuming signed overflow does not occur "
8814 "when changing X +- C1 cmp C2 to "
8816 WARN_STRICT_OVERFLOW_COMPARISON
);
8817 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8821 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8822 if (TREE_CODE (arg0
) == MINUS_EXPR
8824 && integer_zerop (arg1
))
8826 /* ??? The transformation is valid for the other operators if overflow
8827 is undefined for the type, but performing it here badly interacts
8828 with the transformation in fold_cond_expr_with_comparison which
8829 attempts to synthetize ABS_EXPR. */
8831 fold_overflow_warning ("assuming signed overflow does not occur "
8832 "when changing X - Y cmp 0 to X cmp Y",
8833 WARN_STRICT_OVERFLOW_COMPARISON
);
8834 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8835 TREE_OPERAND (arg0
, 1));
8838 /* For comparisons of pointers we can decompose it to a compile time
8839 comparison of the base objects and the offsets into the object.
8840 This requires at least one operand being an ADDR_EXPR or a
8841 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8842 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8843 && (TREE_CODE (arg0
) == ADDR_EXPR
8844 || TREE_CODE (arg1
) == ADDR_EXPR
8845 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8846 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8848 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8849 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8851 int volatilep
, unsignedp
;
8852 bool indirect_base0
= false, indirect_base1
= false;
8854 /* Get base and offset for the access. Strip ADDR_EXPR for
8855 get_inner_reference, but put it back by stripping INDIRECT_REF
8856 off the base object if possible. indirect_baseN will be true
8857 if baseN is not an address but refers to the object itself. */
8859 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8861 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8862 &bitsize
, &bitpos0
, &offset0
, &mode
,
8863 &unsignedp
, &volatilep
, false);
8864 if (TREE_CODE (base0
) == INDIRECT_REF
)
8865 base0
= TREE_OPERAND (base0
, 0);
8867 indirect_base0
= true;
8869 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8871 base0
= TREE_OPERAND (arg0
, 0);
8872 STRIP_SIGN_NOPS (base0
);
8873 if (TREE_CODE (base0
) == ADDR_EXPR
)
8875 base0
= TREE_OPERAND (base0
, 0);
8876 indirect_base0
= true;
8878 offset0
= TREE_OPERAND (arg0
, 1);
8879 if (tree_fits_shwi_p (offset0
))
8881 HOST_WIDE_INT off
= size_low_cst (offset0
);
8882 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8884 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8886 bitpos0
= off
* BITS_PER_UNIT
;
8887 offset0
= NULL_TREE
;
8893 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8895 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8896 &bitsize
, &bitpos1
, &offset1
, &mode
,
8897 &unsignedp
, &volatilep
, false);
8898 if (TREE_CODE (base1
) == INDIRECT_REF
)
8899 base1
= TREE_OPERAND (base1
, 0);
8901 indirect_base1
= true;
8903 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8905 base1
= TREE_OPERAND (arg1
, 0);
8906 STRIP_SIGN_NOPS (base1
);
8907 if (TREE_CODE (base1
) == ADDR_EXPR
)
8909 base1
= TREE_OPERAND (base1
, 0);
8910 indirect_base1
= true;
8912 offset1
= TREE_OPERAND (arg1
, 1);
8913 if (tree_fits_shwi_p (offset1
))
8915 HOST_WIDE_INT off
= size_low_cst (offset1
);
8916 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8918 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8920 bitpos1
= off
* BITS_PER_UNIT
;
8921 offset1
= NULL_TREE
;
8926 /* A local variable can never be pointed to by
8927 the default SSA name of an incoming parameter. */
8928 if ((TREE_CODE (arg0
) == ADDR_EXPR
8930 && TREE_CODE (base0
) == VAR_DECL
8931 && auto_var_in_fn_p (base0
, current_function_decl
)
8933 && TREE_CODE (base1
) == SSA_NAME
8934 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8935 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8936 || (TREE_CODE (arg1
) == ADDR_EXPR
8938 && TREE_CODE (base1
) == VAR_DECL
8939 && auto_var_in_fn_p (base1
, current_function_decl
)
8941 && TREE_CODE (base0
) == SSA_NAME
8942 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8943 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8945 if (code
== NE_EXPR
)
8946 return constant_boolean_node (1, type
);
8947 else if (code
== EQ_EXPR
)
8948 return constant_boolean_node (0, type
);
8950 /* If we have equivalent bases we might be able to simplify. */
8951 else if (indirect_base0
== indirect_base1
8952 && operand_equal_p (base0
, base1
, 0))
8954 /* We can fold this expression to a constant if the non-constant
8955 offset parts are equal. */
8956 if ((offset0
== offset1
8957 || (offset0
&& offset1
8958 && operand_equal_p (offset0
, offset1
, 0)))
8961 || (indirect_base0
&& DECL_P (base0
))
8962 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8966 && bitpos0
!= bitpos1
8967 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8968 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8969 fold_overflow_warning (("assuming pointer wraparound does not "
8970 "occur when comparing P +- C1 with "
8972 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8977 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8979 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8981 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8983 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8985 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8987 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8991 /* We can simplify the comparison to a comparison of the variable
8992 offset parts if the constant offset parts are equal.
8993 Be careful to use signed sizetype here because otherwise we
8994 mess with array offsets in the wrong way. This is possible
8995 because pointer arithmetic is restricted to retain within an
8996 object and overflow on pointer differences is undefined as of
8997 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8998 else if (bitpos0
== bitpos1
9000 || (indirect_base0
&& DECL_P (base0
))
9001 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9003 /* By converting to signed sizetype we cover middle-end pointer
9004 arithmetic which operates on unsigned pointer types of size
9005 type size and ARRAY_REF offsets which are properly sign or
9006 zero extended from their type in case it is narrower than
9008 if (offset0
== NULL_TREE
)
9009 offset0
= build_int_cst (ssizetype
, 0);
9011 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9012 if (offset1
== NULL_TREE
)
9013 offset1
= build_int_cst (ssizetype
, 0);
9015 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9018 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9019 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9020 fold_overflow_warning (("assuming pointer wraparound does not "
9021 "occur when comparing P +- C1 with "
9023 WARN_STRICT_OVERFLOW_COMPARISON
);
9025 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9028 /* For non-equal bases we can simplify if they are addresses
9029 declarations with different addresses. */
9030 else if (indirect_base0
&& indirect_base1
9031 /* We know that !operand_equal_p (base0, base1, 0)
9032 because the if condition was false. But make
9033 sure two decls are not the same. */
9035 && TREE_CODE (arg0
) == ADDR_EXPR
9036 && TREE_CODE (arg1
) == ADDR_EXPR
9039 /* Watch for aliases. */
9040 && (!decl_in_symtab_p (base0
)
9041 || !decl_in_symtab_p (base1
)
9042 || !symtab_node::get_create (base0
)->equal_address_to
9043 (symtab_node::get_create (base1
))))
9045 if (code
== EQ_EXPR
)
9046 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9048 else if (code
== NE_EXPR
)
9049 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9052 /* For equal offsets we can simplify to a comparison of the
9054 else if (bitpos0
== bitpos1
9056 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9058 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9059 && ((offset0
== offset1
)
9060 || (offset0
&& offset1
9061 && operand_equal_p (offset0
, offset1
, 0))))
9064 base0
= build_fold_addr_expr_loc (loc
, base0
);
9066 base1
= build_fold_addr_expr_loc (loc
, base1
);
9067 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9071 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9072 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9073 the resulting offset is smaller in absolute value than the
9074 original one and has the same sign. */
9075 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9076 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9077 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9078 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9079 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9080 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9081 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9082 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9084 tree const1
= TREE_OPERAND (arg0
, 1);
9085 tree const2
= TREE_OPERAND (arg1
, 1);
9086 tree variable1
= TREE_OPERAND (arg0
, 0);
9087 tree variable2
= TREE_OPERAND (arg1
, 0);
9089 const char * const warnmsg
= G_("assuming signed overflow does not "
9090 "occur when combining constants around "
9093 /* Put the constant on the side where it doesn't overflow and is
9094 of lower absolute value and of same sign than before. */
9095 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9096 ? MINUS_EXPR
: PLUS_EXPR
,
9098 if (!TREE_OVERFLOW (cst
)
9099 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9100 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9102 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9103 return fold_build2_loc (loc
, code
, type
,
9105 fold_build2_loc (loc
, TREE_CODE (arg1
),
9110 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9111 ? MINUS_EXPR
: PLUS_EXPR
,
9113 if (!TREE_OVERFLOW (cst
)
9114 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9115 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9117 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9118 return fold_build2_loc (loc
, code
, type
,
9119 fold_build2_loc (loc
, TREE_CODE (arg0
),
9126 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9127 signed arithmetic case. That form is created by the compiler
9128 often enough for folding it to be of value. One example is in
9129 computing loop trip counts after Operator Strength Reduction. */
9130 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9131 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9132 && TREE_CODE (arg0
) == MULT_EXPR
9133 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9134 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9135 && integer_zerop (arg1
))
9137 tree const1
= TREE_OPERAND (arg0
, 1);
9138 tree const2
= arg1
; /* zero */
9139 tree variable1
= TREE_OPERAND (arg0
, 0);
9140 enum tree_code cmp_code
= code
;
9142 /* Handle unfolded multiplication by zero. */
9143 if (integer_zerop (const1
))
9144 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9146 fold_overflow_warning (("assuming signed overflow does not occur when "
9147 "eliminating multiplication in comparison "
9149 WARN_STRICT_OVERFLOW_COMPARISON
);
9151 /* If const1 is negative we swap the sense of the comparison. */
9152 if (tree_int_cst_sgn (const1
) < 0)
9153 cmp_code
= swap_tree_comparison (cmp_code
);
9155 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9158 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9162 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9164 tree targ0
= strip_float_extensions (arg0
);
9165 tree targ1
= strip_float_extensions (arg1
);
9166 tree newtype
= TREE_TYPE (targ0
);
9168 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9169 newtype
= TREE_TYPE (targ1
);
9171 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9172 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9173 return fold_build2_loc (loc
, code
, type
,
9174 fold_convert_loc (loc
, newtype
, targ0
),
9175 fold_convert_loc (loc
, newtype
, targ1
));
9177 /* (-a) CMP (-b) -> b CMP a */
9178 if (TREE_CODE (arg0
) == NEGATE_EXPR
9179 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9180 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9181 TREE_OPERAND (arg0
, 0));
9183 if (TREE_CODE (arg1
) == REAL_CST
)
9185 REAL_VALUE_TYPE cst
;
9186 cst
= TREE_REAL_CST (arg1
);
9188 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9189 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9190 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9191 TREE_OPERAND (arg0
, 0),
9192 build_real (TREE_TYPE (arg1
),
9193 real_value_negate (&cst
)));
9195 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9196 /* a CMP (-0) -> a CMP 0 */
9197 if (REAL_VALUE_MINUS_ZERO (cst
))
9198 return fold_build2_loc (loc
, code
, type
, arg0
,
9199 build_real (TREE_TYPE (arg1
), dconst0
));
9201 /* x != NaN is always true, other ops are always false. */
9202 if (REAL_VALUE_ISNAN (cst
)
9203 && ! HONOR_SNANS (arg1
))
9205 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9206 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9209 /* Fold comparisons against infinity. */
9210 if (REAL_VALUE_ISINF (cst
)
9211 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9213 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9214 if (tem
!= NULL_TREE
)
9219 /* If this is a comparison of a real constant with a PLUS_EXPR
9220 or a MINUS_EXPR of a real constant, we can convert it into a
9221 comparison with a revised real constant as long as no overflow
9222 occurs when unsafe_math_optimizations are enabled. */
9223 if (flag_unsafe_math_optimizations
9224 && TREE_CODE (arg1
) == REAL_CST
9225 && (TREE_CODE (arg0
) == PLUS_EXPR
9226 || TREE_CODE (arg0
) == MINUS_EXPR
)
9227 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9228 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9229 ? MINUS_EXPR
: PLUS_EXPR
,
9230 arg1
, TREE_OPERAND (arg0
, 1)))
9231 && !TREE_OVERFLOW (tem
))
9232 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9234 /* Likewise, we can simplify a comparison of a real constant with
9235 a MINUS_EXPR whose first operand is also a real constant, i.e.
9236 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9237 floating-point types only if -fassociative-math is set. */
9238 if (flag_associative_math
9239 && TREE_CODE (arg1
) == REAL_CST
9240 && TREE_CODE (arg0
) == MINUS_EXPR
9241 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9242 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9244 && !TREE_OVERFLOW (tem
))
9245 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9246 TREE_OPERAND (arg0
, 1), tem
);
9248 /* Fold comparisons against built-in math functions. */
9249 if (TREE_CODE (arg1
) == REAL_CST
9250 && flag_unsafe_math_optimizations
9251 && ! flag_errno_math
)
9253 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9255 if (fcode
!= END_BUILTINS
)
9257 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9258 if (tem
!= NULL_TREE
)
9264 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9265 && CONVERT_EXPR_P (arg0
))
9267 /* If we are widening one operand of an integer comparison,
9268 see if the other operand is similarly being widened. Perhaps we
9269 can do the comparison in the narrower type. */
9270 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9274 /* Or if we are changing signedness. */
9275 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9280 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9281 constant, we can simplify it. */
9282 if (TREE_CODE (arg1
) == INTEGER_CST
9283 && (TREE_CODE (arg0
) == MIN_EXPR
9284 || TREE_CODE (arg0
) == MAX_EXPR
)
9285 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9287 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9292 /* Simplify comparison of something with itself. (For IEEE
9293 floating-point, we can only do some of these simplifications.) */
9294 if (operand_equal_p (arg0
, arg1
, 0))
9299 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9300 || ! HONOR_NANS (arg0
))
9301 return constant_boolean_node (1, type
);
9306 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9307 || ! HONOR_NANS (arg0
))
9308 return constant_boolean_node (1, type
);
9309 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9312 /* For NE, we can only do this simplification if integer
9313 or we don't honor IEEE floating point NaNs. */
9314 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9315 && HONOR_NANS (arg0
))
9317 /* ... fall through ... */
9320 return constant_boolean_node (0, type
);
9326 /* If we are comparing an expression that just has comparisons
9327 of two integer values, arithmetic expressions of those comparisons,
9328 and constants, we can simplify it. There are only three cases
9329 to check: the two values can either be equal, the first can be
9330 greater, or the second can be greater. Fold the expression for
9331 those three values. Since each value must be 0 or 1, we have
9332 eight possibilities, each of which corresponds to the constant 0
9333 or 1 or one of the six possible comparisons.
9335 This handles common cases like (a > b) == 0 but also handles
9336 expressions like ((x > y) - (y > x)) > 0, which supposedly
9337 occur in macroized code. */
9339 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9341 tree cval1
= 0, cval2
= 0;
9344 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9345 /* Don't handle degenerate cases here; they should already
9346 have been handled anyway. */
9347 && cval1
!= 0 && cval2
!= 0
9348 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9349 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9350 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9351 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9352 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9353 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9354 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9356 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9357 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9359 /* We can't just pass T to eval_subst in case cval1 or cval2
9360 was the same as ARG1. */
9363 = fold_build2_loc (loc
, code
, type
,
9364 eval_subst (loc
, arg0
, cval1
, maxval
,
9368 = fold_build2_loc (loc
, code
, type
,
9369 eval_subst (loc
, arg0
, cval1
, maxval
,
9373 = fold_build2_loc (loc
, code
, type
,
9374 eval_subst (loc
, arg0
, cval1
, minval
,
9378 /* All three of these results should be 0 or 1. Confirm they are.
9379 Then use those values to select the proper code to use. */
9381 if (TREE_CODE (high_result
) == INTEGER_CST
9382 && TREE_CODE (equal_result
) == INTEGER_CST
9383 && TREE_CODE (low_result
) == INTEGER_CST
)
9385 /* Make a 3-bit mask with the high-order bit being the
9386 value for `>', the next for '=', and the low for '<'. */
9387 switch ((integer_onep (high_result
) * 4)
9388 + (integer_onep (equal_result
) * 2)
9389 + integer_onep (low_result
))
9393 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9414 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9419 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9420 SET_EXPR_LOCATION (tem
, loc
);
9423 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9428 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9429 into a single range test. */
9430 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9431 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9432 && TREE_CODE (arg1
) == INTEGER_CST
9433 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9434 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9435 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9436 && !TREE_OVERFLOW (arg1
))
9438 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9439 if (tem
!= NULL_TREE
)
9443 /* Fold ~X op ~Y as Y op X. */
9444 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9445 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9447 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9448 return fold_build2_loc (loc
, code
, type
,
9449 fold_convert_loc (loc
, cmp_type
,
9450 TREE_OPERAND (arg1
, 0)),
9451 TREE_OPERAND (arg0
, 0));
9454 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9455 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9456 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9458 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9459 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9460 TREE_OPERAND (arg0
, 0),
9461 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9462 fold_convert_loc (loc
, cmp_type
, arg1
)));
9469 /* Subroutine of fold_binary. Optimize complex multiplications of the
9470 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9471 argument EXPR represents the expression "z" of type TYPE. */
9474 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9476 tree itype
= TREE_TYPE (type
);
9477 tree rpart
, ipart
, tem
;
9479 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9481 rpart
= TREE_OPERAND (expr
, 0);
9482 ipart
= TREE_OPERAND (expr
, 1);
9484 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9486 rpart
= TREE_REALPART (expr
);
9487 ipart
= TREE_IMAGPART (expr
);
9491 expr
= save_expr (expr
);
9492 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9493 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9496 rpart
= save_expr (rpart
);
9497 ipart
= save_expr (ipart
);
9498 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9499 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9500 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9501 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9502 build_zero_cst (itype
));
9506 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9507 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9508 guarantees that P and N have the same least significant log2(M) bits.
9509 N is not otherwise constrained. In particular, N is not normalized to
9510 0 <= N < M as is common. In general, the precise value of P is unknown.
9511 M is chosen as large as possible such that constant N can be determined.
9513 Returns M and sets *RESIDUE to N.
9515 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9516 account. This is not always possible due to PR 35705.
9519 static unsigned HOST_WIDE_INT
9520 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9521 bool allow_func_align
)
9523 enum tree_code code
;
9527 code
= TREE_CODE (expr
);
9528 if (code
== ADDR_EXPR
)
9530 unsigned int bitalign
;
9531 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9532 *residue
/= BITS_PER_UNIT
;
9533 return bitalign
/ BITS_PER_UNIT
;
9535 else if (code
== POINTER_PLUS_EXPR
)
9538 unsigned HOST_WIDE_INT modulus
;
9539 enum tree_code inner_code
;
9541 op0
= TREE_OPERAND (expr
, 0);
9543 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9546 op1
= TREE_OPERAND (expr
, 1);
9548 inner_code
= TREE_CODE (op1
);
9549 if (inner_code
== INTEGER_CST
)
9551 *residue
+= TREE_INT_CST_LOW (op1
);
9554 else if (inner_code
== MULT_EXPR
)
9556 op1
= TREE_OPERAND (op1
, 1);
9557 if (TREE_CODE (op1
) == INTEGER_CST
)
9559 unsigned HOST_WIDE_INT align
;
9561 /* Compute the greatest power-of-2 divisor of op1. */
9562 align
= TREE_INT_CST_LOW (op1
);
9565 /* If align is non-zero and less than *modulus, replace
9566 *modulus with align., If align is 0, then either op1 is 0
9567 or the greatest power-of-2 divisor of op1 doesn't fit in an
9568 unsigned HOST_WIDE_INT. In either case, no additional
9569 constraint is imposed. */
9571 modulus
= MIN (modulus
, align
);
9578 /* If we get here, we were unable to determine anything useful about the
9583 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9584 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9587 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9589 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9591 if (TREE_CODE (arg
) == VECTOR_CST
)
9593 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9594 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9596 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9598 constructor_elt
*elt
;
9600 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9601 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9604 elts
[i
] = elt
->value
;
9608 for (; i
< nelts
; i
++)
9610 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9614 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9615 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9616 NULL_TREE otherwise. */
9619 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9621 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9623 bool need_ctor
= false;
9625 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9626 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9627 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9628 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9631 elts
= XALLOCAVEC (tree
, nelts
* 3);
9632 if (!vec_cst_ctor_to_array (arg0
, elts
)
9633 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9636 for (i
= 0; i
< nelts
; i
++)
9638 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9640 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9645 vec
<constructor_elt
, va_gc
> *v
;
9646 vec_alloc (v
, nelts
);
9647 for (i
= 0; i
< nelts
; i
++)
9648 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9649 return build_constructor (type
, v
);
9652 return build_vector (type
, &elts
[2 * nelts
]);
9655 /* Try to fold a pointer difference of type TYPE two address expressions of
9656 array references AREF0 and AREF1 using location LOC. Return a
9657 simplified expression for the difference or NULL_TREE. */
9660 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9661 tree aref0
, tree aref1
)
9663 tree base0
= TREE_OPERAND (aref0
, 0);
9664 tree base1
= TREE_OPERAND (aref1
, 0);
9665 tree base_offset
= build_int_cst (type
, 0);
9667 /* If the bases are array references as well, recurse. If the bases
9668 are pointer indirections compute the difference of the pointers.
9669 If the bases are equal, we are set. */
9670 if ((TREE_CODE (base0
) == ARRAY_REF
9671 && TREE_CODE (base1
) == ARRAY_REF
9673 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9674 || (INDIRECT_REF_P (base0
)
9675 && INDIRECT_REF_P (base1
)
9676 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9677 TREE_OPERAND (base0
, 0),
9678 TREE_OPERAND (base1
, 0))))
9679 || operand_equal_p (base0
, base1
, 0))
9681 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9682 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9683 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9684 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9685 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9687 fold_build2_loc (loc
, MULT_EXPR
, type
,
9693 /* If the real or vector real constant CST of type TYPE has an exact
9694 inverse, return it, else return NULL. */
9697 exact_inverse (tree type
, tree cst
)
9700 tree unit_type
, *elts
;
9702 unsigned vec_nelts
, i
;
9704 switch (TREE_CODE (cst
))
9707 r
= TREE_REAL_CST (cst
);
9709 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9710 return build_real (type
, r
);
9715 vec_nelts
= VECTOR_CST_NELTS (cst
);
9716 elts
= XALLOCAVEC (tree
, vec_nelts
);
9717 unit_type
= TREE_TYPE (type
);
9718 mode
= TYPE_MODE (unit_type
);
9720 for (i
= 0; i
< vec_nelts
; i
++)
9722 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9723 if (!exact_real_inverse (mode
, &r
))
9725 elts
[i
] = build_real (unit_type
, r
);
9728 return build_vector (type
, elts
);
9735 /* Mask out the tz least significant bits of X of type TYPE where
9736 tz is the number of trailing zeroes in Y. */
9738 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9740 int tz
= wi::ctz (y
);
9742 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9746 /* Return true when T is an address and is known to be nonzero.
9747 For floating point we further ensure that T is not denormal.
9748 Similar logic is present in nonzero_address in rtlanal.h.
9750 If the return value is based on the assumption that signed overflow
9751 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9752 change *STRICT_OVERFLOW_P. */
9755 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9757 tree type
= TREE_TYPE (t
);
9758 enum tree_code code
;
9760 /* Doing something useful for floating point would need more work. */
9761 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9764 code
= TREE_CODE (t
);
9765 switch (TREE_CODE_CLASS (code
))
9768 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9771 case tcc_comparison
:
9772 return tree_binary_nonzero_warnv_p (code
, type
,
9773 TREE_OPERAND (t
, 0),
9774 TREE_OPERAND (t
, 1),
9777 case tcc_declaration
:
9779 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9787 case TRUTH_NOT_EXPR
:
9788 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9791 case TRUTH_AND_EXPR
:
9793 case TRUTH_XOR_EXPR
:
9794 return tree_binary_nonzero_warnv_p (code
, type
,
9795 TREE_OPERAND (t
, 0),
9796 TREE_OPERAND (t
, 1),
9804 case WITH_SIZE_EXPR
:
9806 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9811 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9815 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9820 tree fndecl
= get_callee_fndecl (t
);
9821 if (!fndecl
) return false;
9822 if (flag_delete_null_pointer_checks
&& !flag_check_new
9823 && DECL_IS_OPERATOR_NEW (fndecl
)
9824 && !TREE_NOTHROW (fndecl
))
9826 if (flag_delete_null_pointer_checks
9827 && lookup_attribute ("returns_nonnull",
9828 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9830 return alloca_call_p (t
);
9839 /* Return true when T is an address and is known to be nonzero.
9840 Handle warnings about undefined signed overflow. */
9843 tree_expr_nonzero_p (tree t
)
9845 bool ret
, strict_overflow_p
;
9847 strict_overflow_p
= false;
9848 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9849 if (strict_overflow_p
)
9850 fold_overflow_warning (("assuming signed overflow does not occur when "
9851 "determining that expression is always "
9853 WARN_STRICT_OVERFLOW_MISC
);
9857 /* Fold a binary expression of code CODE and type TYPE with operands
9858 OP0 and OP1. LOC is the location of the resulting expression.
9859 Return the folded expression if folding is successful. Otherwise,
9860 return NULL_TREE. */
9863 fold_binary_loc (location_t loc
,
9864 enum tree_code code
, tree type
, tree op0
, tree op1
)
9866 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9867 tree arg0
, arg1
, tem
;
9868 tree t1
= NULL_TREE
;
9869 bool strict_overflow_p
;
9872 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9873 && TREE_CODE_LENGTH (code
) == 2
9875 && op1
!= NULL_TREE
);
9880 /* Strip any conversions that don't change the mode. This is
9881 safe for every expression, except for a comparison expression
9882 because its signedness is derived from its operands. So, in
9883 the latter case, only strip conversions that don't change the
9884 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9887 Note that this is done as an internal manipulation within the
9888 constant folder, in order to find the simplest representation
9889 of the arguments so that their form can be studied. In any
9890 cases, the appropriate type conversions should be put back in
9891 the tree that will get out of the constant folder. */
9893 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9895 STRIP_SIGN_NOPS (arg0
);
9896 STRIP_SIGN_NOPS (arg1
);
9904 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9905 constant but we can't do arithmetic on them. */
9906 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9908 tem
= const_binop (code
, type
, arg0
, arg1
);
9909 if (tem
!= NULL_TREE
)
9911 if (TREE_TYPE (tem
) != type
)
9912 tem
= fold_convert_loc (loc
, type
, tem
);
9917 /* If this is a commutative operation, and ARG0 is a constant, move it
9918 to ARG1 to reduce the number of tests below. */
9919 if (commutative_tree_code (code
)
9920 && tree_swap_operands_p (arg0
, arg1
, true))
9921 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9923 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9924 to ARG1 to reduce the number of tests below. */
9925 if (kind
== tcc_comparison
9926 && tree_swap_operands_p (arg0
, arg1
, true))
9927 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9929 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9933 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9935 First check for cases where an arithmetic operation is applied to a
9936 compound, conditional, or comparison operation. Push the arithmetic
9937 operation inside the compound or conditional to see if any folding
9938 can then be done. Convert comparison to conditional for this purpose.
9939 The also optimizes non-constant cases that used to be done in
9942 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9943 one of the operands is a comparison and the other is a comparison, a
9944 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9945 code below would make the expression more complex. Change it to a
9946 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9947 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9949 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9950 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9951 && TREE_CODE (type
) != VECTOR_TYPE
9952 && ((truth_value_p (TREE_CODE (arg0
))
9953 && (truth_value_p (TREE_CODE (arg1
))
9954 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9955 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9956 || (truth_value_p (TREE_CODE (arg1
))
9957 && (truth_value_p (TREE_CODE (arg0
))
9958 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9959 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9961 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9962 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9965 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9966 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9968 if (code
== EQ_EXPR
)
9969 tem
= invert_truthvalue_loc (loc
, tem
);
9971 return fold_convert_loc (loc
, type
, tem
);
9974 if (TREE_CODE_CLASS (code
) == tcc_binary
9975 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9977 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9979 tem
= fold_build2_loc (loc
, code
, type
,
9980 fold_convert_loc (loc
, TREE_TYPE (op0
),
9981 TREE_OPERAND (arg0
, 1)), op1
);
9982 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9985 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9986 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9988 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9989 fold_convert_loc (loc
, TREE_TYPE (op1
),
9990 TREE_OPERAND (arg1
, 1)));
9991 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9995 if (TREE_CODE (arg0
) == COND_EXPR
9996 || TREE_CODE (arg0
) == VEC_COND_EXPR
9997 || COMPARISON_CLASS_P (arg0
))
9999 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10001 /*cond_first_p=*/1);
10002 if (tem
!= NULL_TREE
)
10006 if (TREE_CODE (arg1
) == COND_EXPR
10007 || TREE_CODE (arg1
) == VEC_COND_EXPR
10008 || COMPARISON_CLASS_P (arg1
))
10010 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10012 /*cond_first_p=*/0);
10013 if (tem
!= NULL_TREE
)
10021 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10022 if (TREE_CODE (arg0
) == ADDR_EXPR
10023 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10025 tree iref
= TREE_OPERAND (arg0
, 0);
10026 return fold_build2 (MEM_REF
, type
,
10027 TREE_OPERAND (iref
, 0),
10028 int_const_binop (PLUS_EXPR
, arg1
,
10029 TREE_OPERAND (iref
, 1)));
10032 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10033 if (TREE_CODE (arg0
) == ADDR_EXPR
10034 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10037 HOST_WIDE_INT coffset
;
10038 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10042 return fold_build2 (MEM_REF
, type
,
10043 build_fold_addr_expr (base
),
10044 int_const_binop (PLUS_EXPR
, arg1
,
10045 size_int (coffset
)));
10050 case POINTER_PLUS_EXPR
:
10051 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10052 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10053 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10054 return fold_convert_loc (loc
, type
,
10055 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10056 fold_convert_loc (loc
, sizetype
,
10058 fold_convert_loc (loc
, sizetype
,
10064 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10066 /* X + (X / CST) * -CST is X % CST. */
10067 if (TREE_CODE (arg1
) == MULT_EXPR
10068 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10069 && operand_equal_p (arg0
,
10070 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10072 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10073 tree cst1
= TREE_OPERAND (arg1
, 1);
10074 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10076 if (sum
&& integer_zerop (sum
))
10077 return fold_convert_loc (loc
, type
,
10078 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10079 TREE_TYPE (arg0
), arg0
,
10084 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10085 one. Make sure the type is not saturating and has the signedness of
10086 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10087 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10088 if ((TREE_CODE (arg0
) == MULT_EXPR
10089 || TREE_CODE (arg1
) == MULT_EXPR
)
10090 && !TYPE_SATURATING (type
)
10091 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10092 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10093 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10095 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10100 if (! FLOAT_TYPE_P (type
))
10102 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10103 with a constant, and the two constants have no bits in common,
10104 we should treat this as a BIT_IOR_EXPR since this may produce more
10105 simplifications. */
10106 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10107 && TREE_CODE (arg1
) == BIT_AND_EXPR
10108 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10109 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10110 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10111 TREE_OPERAND (arg1
, 1)) == 0)
10113 code
= BIT_IOR_EXPR
;
10117 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10118 (plus (plus (mult) (mult)) (foo)) so that we can
10119 take advantage of the factoring cases below. */
10120 if (ANY_INTEGRAL_TYPE_P (type
)
10121 && TYPE_OVERFLOW_WRAPS (type
)
10122 && (((TREE_CODE (arg0
) == PLUS_EXPR
10123 || TREE_CODE (arg0
) == MINUS_EXPR
)
10124 && TREE_CODE (arg1
) == MULT_EXPR
)
10125 || ((TREE_CODE (arg1
) == PLUS_EXPR
10126 || TREE_CODE (arg1
) == MINUS_EXPR
)
10127 && TREE_CODE (arg0
) == MULT_EXPR
)))
10129 tree parg0
, parg1
, parg
, marg
;
10130 enum tree_code pcode
;
10132 if (TREE_CODE (arg1
) == MULT_EXPR
)
10133 parg
= arg0
, marg
= arg1
;
10135 parg
= arg1
, marg
= arg0
;
10136 pcode
= TREE_CODE (parg
);
10137 parg0
= TREE_OPERAND (parg
, 0);
10138 parg1
= TREE_OPERAND (parg
, 1);
10139 STRIP_NOPS (parg0
);
10140 STRIP_NOPS (parg1
);
10142 if (TREE_CODE (parg0
) == MULT_EXPR
10143 && TREE_CODE (parg1
) != MULT_EXPR
)
10144 return fold_build2_loc (loc
, pcode
, type
,
10145 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10146 fold_convert_loc (loc
, type
,
10148 fold_convert_loc (loc
, type
,
10150 fold_convert_loc (loc
, type
, parg1
));
10151 if (TREE_CODE (parg0
) != MULT_EXPR
10152 && TREE_CODE (parg1
) == MULT_EXPR
)
10154 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10155 fold_convert_loc (loc
, type
, parg0
),
10156 fold_build2_loc (loc
, pcode
, type
,
10157 fold_convert_loc (loc
, type
, marg
),
10158 fold_convert_loc (loc
, type
,
10164 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10165 to __complex__ ( x, y ). This is not the same for SNaNs or
10166 if signed zeros are involved. */
10167 if (!HONOR_SNANS (element_mode (arg0
))
10168 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10169 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10171 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10172 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10173 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10174 bool arg0rz
= false, arg0iz
= false;
10175 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10176 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10178 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10179 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10180 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10182 tree rp
= arg1r
? arg1r
10183 : build1 (REALPART_EXPR
, rtype
, arg1
);
10184 tree ip
= arg0i
? arg0i
10185 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10186 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10188 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10190 tree rp
= arg0r
? arg0r
10191 : build1 (REALPART_EXPR
, rtype
, arg0
);
10192 tree ip
= arg1i
? arg1i
10193 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10194 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10199 if (flag_unsafe_math_optimizations
10200 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10201 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10202 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10205 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10206 We associate floats only if the user has specified
10207 -fassociative-math. */
10208 if (flag_associative_math
10209 && TREE_CODE (arg1
) == PLUS_EXPR
10210 && TREE_CODE (arg0
) != MULT_EXPR
)
10212 tree tree10
= TREE_OPERAND (arg1
, 0);
10213 tree tree11
= TREE_OPERAND (arg1
, 1);
10214 if (TREE_CODE (tree11
) == MULT_EXPR
10215 && TREE_CODE (tree10
) == MULT_EXPR
)
10218 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10219 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10222 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10223 We associate floats only if the user has specified
10224 -fassociative-math. */
10225 if (flag_associative_math
10226 && TREE_CODE (arg0
) == PLUS_EXPR
10227 && TREE_CODE (arg1
) != MULT_EXPR
)
10229 tree tree00
= TREE_OPERAND (arg0
, 0);
10230 tree tree01
= TREE_OPERAND (arg0
, 1);
10231 if (TREE_CODE (tree01
) == MULT_EXPR
10232 && TREE_CODE (tree00
) == MULT_EXPR
)
10235 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10236 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10242 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10243 is a rotate of A by C1 bits. */
10244 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10245 is a rotate of A by B bits. */
10247 enum tree_code code0
, code1
;
10249 code0
= TREE_CODE (arg0
);
10250 code1
= TREE_CODE (arg1
);
10251 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10252 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10253 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10254 TREE_OPERAND (arg1
, 0), 0)
10255 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10256 TYPE_UNSIGNED (rtype
))
10257 /* Only create rotates in complete modes. Other cases are not
10258 expanded properly. */
10259 && (element_precision (rtype
)
10260 == element_precision (TYPE_MODE (rtype
))))
10262 tree tree01
, tree11
;
10263 enum tree_code code01
, code11
;
10265 tree01
= TREE_OPERAND (arg0
, 1);
10266 tree11
= TREE_OPERAND (arg1
, 1);
10267 STRIP_NOPS (tree01
);
10268 STRIP_NOPS (tree11
);
10269 code01
= TREE_CODE (tree01
);
10270 code11
= TREE_CODE (tree11
);
10271 if (code01
== INTEGER_CST
10272 && code11
== INTEGER_CST
10273 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10274 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10276 tem
= build2_loc (loc
, LROTATE_EXPR
,
10277 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10278 TREE_OPERAND (arg0
, 0),
10279 code0
== LSHIFT_EXPR
10280 ? TREE_OPERAND (arg0
, 1)
10281 : TREE_OPERAND (arg1
, 1));
10282 return fold_convert_loc (loc
, type
, tem
);
10284 else if (code11
== MINUS_EXPR
)
10286 tree tree110
, tree111
;
10287 tree110
= TREE_OPERAND (tree11
, 0);
10288 tree111
= TREE_OPERAND (tree11
, 1);
10289 STRIP_NOPS (tree110
);
10290 STRIP_NOPS (tree111
);
10291 if (TREE_CODE (tree110
) == INTEGER_CST
10292 && 0 == compare_tree_int (tree110
,
10294 (TREE_TYPE (TREE_OPERAND
10296 && operand_equal_p (tree01
, tree111
, 0))
10298 fold_convert_loc (loc
, type
,
10299 build2 ((code0
== LSHIFT_EXPR
10302 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10303 TREE_OPERAND (arg0
, 0),
10304 TREE_OPERAND (arg0
, 1)));
10306 else if (code01
== MINUS_EXPR
)
10308 tree tree010
, tree011
;
10309 tree010
= TREE_OPERAND (tree01
, 0);
10310 tree011
= TREE_OPERAND (tree01
, 1);
10311 STRIP_NOPS (tree010
);
10312 STRIP_NOPS (tree011
);
10313 if (TREE_CODE (tree010
) == INTEGER_CST
10314 && 0 == compare_tree_int (tree010
,
10316 (TREE_TYPE (TREE_OPERAND
10318 && operand_equal_p (tree11
, tree011
, 0))
10319 return fold_convert_loc
10321 build2 ((code0
!= LSHIFT_EXPR
10324 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10325 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
10331 /* In most languages, can't associate operations on floats through
10332 parentheses. Rather than remember where the parentheses were, we
10333 don't associate floats at all, unless the user has specified
10334 -fassociative-math.
10335 And, we need to make sure type is not saturating. */
10337 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10338 && !TYPE_SATURATING (type
))
10340 tree var0
, con0
, lit0
, minus_lit0
;
10341 tree var1
, con1
, lit1
, minus_lit1
;
10345 /* Split both trees into variables, constants, and literals. Then
10346 associate each group together, the constants with literals,
10347 then the result with variables. This increases the chances of
10348 literals being recombined later and of generating relocatable
10349 expressions for the sum of a constant and literal. */
10350 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10351 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10352 code
== MINUS_EXPR
);
10354 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10355 if (code
== MINUS_EXPR
)
10358 /* With undefined overflow prefer doing association in a type
10359 which wraps on overflow, if that is one of the operand types. */
10360 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10361 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10363 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10364 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10365 atype
= TREE_TYPE (arg0
);
10366 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10367 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10368 atype
= TREE_TYPE (arg1
);
10369 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10372 /* With undefined overflow we can only associate constants with one
10373 variable, and constants whose association doesn't overflow. */
10374 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10375 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10382 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10383 tmp0
= TREE_OPERAND (tmp0
, 0);
10384 if (CONVERT_EXPR_P (tmp0
)
10385 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10386 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10387 <= TYPE_PRECISION (atype
)))
10388 tmp0
= TREE_OPERAND (tmp0
, 0);
10389 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10390 tmp1
= TREE_OPERAND (tmp1
, 0);
10391 if (CONVERT_EXPR_P (tmp1
)
10392 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10393 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10394 <= TYPE_PRECISION (atype
)))
10395 tmp1
= TREE_OPERAND (tmp1
, 0);
10396 /* The only case we can still associate with two variables
10397 is if they are the same, modulo negation and bit-pattern
10398 preserving conversions. */
10399 if (!operand_equal_p (tmp0
, tmp1
, 0))
10404 /* Only do something if we found more than two objects. Otherwise,
10405 nothing has changed and we risk infinite recursion. */
10407 && (2 < ((var0
!= 0) + (var1
!= 0)
10408 + (con0
!= 0) + (con1
!= 0)
10409 + (lit0
!= 0) + (lit1
!= 0)
10410 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10412 bool any_overflows
= false;
10413 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10414 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10415 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10416 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10417 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10418 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10419 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10420 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10423 /* Preserve the MINUS_EXPR if the negative part of the literal is
10424 greater than the positive part. Otherwise, the multiplicative
10425 folding code (i.e extract_muldiv) may be fooled in case
10426 unsigned constants are subtracted, like in the following
10427 example: ((X*2 + 4) - 8U)/2. */
10428 if (minus_lit0
&& lit0
)
10430 if (TREE_CODE (lit0
) == INTEGER_CST
10431 && TREE_CODE (minus_lit0
) == INTEGER_CST
10432 && tree_int_cst_lt (lit0
, minus_lit0
))
10434 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10435 MINUS_EXPR
, atype
);
10440 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10441 MINUS_EXPR
, atype
);
10446 /* Don't introduce overflows through reassociation. */
10448 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
10449 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
10456 fold_convert_loc (loc
, type
,
10457 associate_trees (loc
, var0
, minus_lit0
,
10458 MINUS_EXPR
, atype
));
10461 con0
= associate_trees (loc
, con0
, minus_lit0
,
10462 MINUS_EXPR
, atype
);
10464 fold_convert_loc (loc
, type
,
10465 associate_trees (loc
, var0
, con0
,
10466 PLUS_EXPR
, atype
));
10470 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10472 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10480 /* Pointer simplifications for subtraction, simple reassociations. */
10481 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10483 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10484 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10485 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10487 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10488 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10489 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10490 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10491 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10492 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10494 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10497 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10498 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10500 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10501 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10502 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10503 fold_convert_loc (loc
, type
, arg1
));
10505 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10507 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10509 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10511 tree arg10
= fold_convert_loc (loc
, type
,
10512 TREE_OPERAND (arg1
, 0));
10513 tree arg11
= fold_convert_loc (loc
, type
,
10514 TREE_OPERAND (arg1
, 1));
10515 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10516 fold_convert_loc (loc
, type
, arg0
),
10519 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10522 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10523 if (TREE_CODE (arg0
) == NEGATE_EXPR
10524 && negate_expr_p (arg1
)
10525 && reorder_operands_p (arg0
, arg1
))
10526 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10527 fold_convert_loc (loc
, type
,
10528 negate_expr (arg1
)),
10529 fold_convert_loc (loc
, type
,
10530 TREE_OPERAND (arg0
, 0)));
10532 /* X - (X / Y) * Y is X % Y. */
10533 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10534 && TREE_CODE (arg1
) == MULT_EXPR
10535 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10536 && operand_equal_p (arg0
,
10537 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10538 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10539 TREE_OPERAND (arg1
, 1), 0))
10541 fold_convert_loc (loc
, type
,
10542 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10543 arg0
, TREE_OPERAND (arg1
, 1)));
10545 if (! FLOAT_TYPE_P (type
))
10547 /* Fold A - (A & B) into ~B & A. */
10548 if (!TREE_SIDE_EFFECTS (arg0
)
10549 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10551 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10553 tree arg10
= fold_convert_loc (loc
, type
,
10554 TREE_OPERAND (arg1
, 0));
10555 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10556 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10558 fold_convert_loc (loc
, type
, arg0
));
10560 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10562 tree arg11
= fold_convert_loc (loc
,
10563 type
, TREE_OPERAND (arg1
, 1));
10564 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10565 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10567 fold_convert_loc (loc
, type
, arg0
));
10571 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10572 any power of 2 minus 1. */
10573 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10574 && TREE_CODE (arg1
) == BIT_AND_EXPR
10575 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10576 TREE_OPERAND (arg1
, 0), 0))
10578 tree mask0
= TREE_OPERAND (arg0
, 1);
10579 tree mask1
= TREE_OPERAND (arg1
, 1);
10580 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10582 if (operand_equal_p (tem
, mask1
, 0))
10584 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10585 TREE_OPERAND (arg0
, 0), mask1
);
10586 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10591 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10592 __complex__ ( x, -y ). This is not the same for SNaNs or if
10593 signed zeros are involved. */
10594 if (!HONOR_SNANS (element_mode (arg0
))
10595 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10596 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10598 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10599 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10600 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10601 bool arg0rz
= false, arg0iz
= false;
10602 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10603 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10605 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10606 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10607 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10609 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10611 : build1 (REALPART_EXPR
, rtype
, arg1
));
10612 tree ip
= arg0i
? arg0i
10613 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10614 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10616 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10618 tree rp
= arg0r
? arg0r
10619 : build1 (REALPART_EXPR
, rtype
, arg0
);
10620 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10622 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10623 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10628 /* A - B -> A + (-B) if B is easily negatable. */
10629 if (negate_expr_p (arg1
)
10630 && !TYPE_OVERFLOW_SANITIZED (type
)
10631 && ((FLOAT_TYPE_P (type
)
10632 /* Avoid this transformation if B is a positive REAL_CST. */
10633 && (TREE_CODE (arg1
) != REAL_CST
10634 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10635 || INTEGRAL_TYPE_P (type
)))
10636 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10637 fold_convert_loc (loc
, type
, arg0
),
10638 fold_convert_loc (loc
, type
,
10639 negate_expr (arg1
)));
10641 /* Try folding difference of addresses. */
10643 HOST_WIDE_INT diff
;
10645 if ((TREE_CODE (arg0
) == ADDR_EXPR
10646 || TREE_CODE (arg1
) == ADDR_EXPR
)
10647 && ptr_difference_const (arg0
, arg1
, &diff
))
10648 return build_int_cst_type (type
, diff
);
10651 /* Fold &a[i] - &a[j] to i-j. */
10652 if (TREE_CODE (arg0
) == ADDR_EXPR
10653 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10654 && TREE_CODE (arg1
) == ADDR_EXPR
10655 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10657 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10658 TREE_OPERAND (arg0
, 0),
10659 TREE_OPERAND (arg1
, 0));
10664 if (FLOAT_TYPE_P (type
)
10665 && flag_unsafe_math_optimizations
10666 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10667 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10668 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10671 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10672 one. Make sure the type is not saturating and has the signedness of
10673 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10674 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10675 if ((TREE_CODE (arg0
) == MULT_EXPR
10676 || TREE_CODE (arg1
) == MULT_EXPR
)
10677 && !TYPE_SATURATING (type
)
10678 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10679 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10680 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10682 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10690 /* (-A) * (-B) -> A * B */
10691 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10692 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10693 fold_convert_loc (loc
, type
,
10694 TREE_OPERAND (arg0
, 0)),
10695 fold_convert_loc (loc
, type
,
10696 negate_expr (arg1
)));
10697 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10698 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10699 fold_convert_loc (loc
, type
,
10700 negate_expr (arg0
)),
10701 fold_convert_loc (loc
, type
,
10702 TREE_OPERAND (arg1
, 0)));
10704 if (! FLOAT_TYPE_P (type
))
10706 /* Transform x * -C into -x * C if x is easily negatable. */
10707 if (TREE_CODE (arg1
) == INTEGER_CST
10708 && tree_int_cst_sgn (arg1
) == -1
10709 && negate_expr_p (arg0
)
10710 && (tem
= negate_expr (arg1
)) != arg1
10711 && !TREE_OVERFLOW (tem
))
10712 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10713 fold_convert_loc (loc
, type
,
10714 negate_expr (arg0
)),
10717 /* (a * (1 << b)) is (a << b) */
10718 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10719 && integer_onep (TREE_OPERAND (arg1
, 0)))
10720 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10721 TREE_OPERAND (arg1
, 1));
10722 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10723 && integer_onep (TREE_OPERAND (arg0
, 0)))
10724 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10725 TREE_OPERAND (arg0
, 1));
10727 /* (A + A) * C -> A * 2 * C */
10728 if (TREE_CODE (arg0
) == PLUS_EXPR
10729 && TREE_CODE (arg1
) == INTEGER_CST
10730 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10731 TREE_OPERAND (arg0
, 1), 0))
10732 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10733 omit_one_operand_loc (loc
, type
,
10734 TREE_OPERAND (arg0
, 0),
10735 TREE_OPERAND (arg0
, 1)),
10736 fold_build2_loc (loc
, MULT_EXPR
, type
,
10737 build_int_cst (type
, 2) , arg1
));
10739 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10740 sign-changing only. */
10741 if (TREE_CODE (arg1
) == INTEGER_CST
10742 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10743 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10744 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10746 strict_overflow_p
= false;
10747 if (TREE_CODE (arg1
) == INTEGER_CST
10748 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10749 &strict_overflow_p
)))
10751 if (strict_overflow_p
)
10752 fold_overflow_warning (("assuming signed overflow does not "
10753 "occur when simplifying "
10755 WARN_STRICT_OVERFLOW_MISC
);
10756 return fold_convert_loc (loc
, type
, tem
);
10759 /* Optimize z * conj(z) for integer complex numbers. */
10760 if (TREE_CODE (arg0
) == CONJ_EXPR
10761 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10762 return fold_mult_zconjz (loc
, type
, arg1
);
10763 if (TREE_CODE (arg1
) == CONJ_EXPR
10764 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10765 return fold_mult_zconjz (loc
, type
, arg0
);
10769 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10770 the result for floating point types due to rounding so it is applied
10771 only if -fassociative-math was specify. */
10772 if (flag_associative_math
10773 && TREE_CODE (arg0
) == RDIV_EXPR
10774 && TREE_CODE (arg1
) == REAL_CST
10775 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10777 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10780 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10781 TREE_OPERAND (arg0
, 1));
10784 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10785 if (operand_equal_p (arg0
, arg1
, 0))
10787 tree tem
= fold_strip_sign_ops (arg0
);
10788 if (tem
!= NULL_TREE
)
10790 tem
= fold_convert_loc (loc
, type
, tem
);
10791 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10795 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10796 This is not the same for NaNs or if signed zeros are
10798 if (!HONOR_NANS (arg0
)
10799 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10800 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10801 && TREE_CODE (arg1
) == COMPLEX_CST
10802 && real_zerop (TREE_REALPART (arg1
)))
10804 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10805 if (real_onep (TREE_IMAGPART (arg1
)))
10807 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10808 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10810 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10811 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10813 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10814 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10815 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10819 /* Optimize z * conj(z) for floating point complex numbers.
10820 Guarded by flag_unsafe_math_optimizations as non-finite
10821 imaginary components don't produce scalar results. */
10822 if (flag_unsafe_math_optimizations
10823 && TREE_CODE (arg0
) == CONJ_EXPR
10824 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10825 return fold_mult_zconjz (loc
, type
, arg1
);
10826 if (flag_unsafe_math_optimizations
10827 && TREE_CODE (arg1
) == CONJ_EXPR
10828 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10829 return fold_mult_zconjz (loc
, type
, arg0
);
10831 if (flag_unsafe_math_optimizations
)
10833 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10834 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10836 /* Optimizations of root(...)*root(...). */
10837 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10840 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10841 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10843 /* Optimize sqrt(x)*sqrt(x) as x. */
10844 if (BUILTIN_SQRT_P (fcode0
)
10845 && operand_equal_p (arg00
, arg10
, 0)
10846 && ! HONOR_SNANS (element_mode (type
)))
10849 /* Optimize root(x)*root(y) as root(x*y). */
10850 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10851 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10852 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10855 /* Optimize expN(x)*expN(y) as expN(x+y). */
10856 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10858 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10859 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10860 CALL_EXPR_ARG (arg0
, 0),
10861 CALL_EXPR_ARG (arg1
, 0));
10862 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10865 /* Optimizations of pow(...)*pow(...). */
10866 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10867 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10868 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10870 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10871 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10872 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10873 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10875 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10876 if (operand_equal_p (arg01
, arg11
, 0))
10878 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10879 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10881 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10884 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10885 if (operand_equal_p (arg00
, arg10
, 0))
10887 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10888 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10890 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10894 /* Optimize tan(x)*cos(x) as sin(x). */
10895 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10896 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10897 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10898 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10899 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10900 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10901 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10902 CALL_EXPR_ARG (arg1
, 0), 0))
10904 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10906 if (sinfn
!= NULL_TREE
)
10907 return build_call_expr_loc (loc
, sinfn
, 1,
10908 CALL_EXPR_ARG (arg0
, 0));
10911 /* Optimize x*pow(x,c) as pow(x,c+1). */
10912 if (fcode1
== BUILT_IN_POW
10913 || fcode1
== BUILT_IN_POWF
10914 || fcode1
== BUILT_IN_POWL
)
10916 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10917 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10918 if (TREE_CODE (arg11
) == REAL_CST
10919 && !TREE_OVERFLOW (arg11
)
10920 && operand_equal_p (arg0
, arg10
, 0))
10922 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10926 c
= TREE_REAL_CST (arg11
);
10927 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10928 arg
= build_real (type
, c
);
10929 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10933 /* Optimize pow(x,c)*x as pow(x,c+1). */
10934 if (fcode0
== BUILT_IN_POW
10935 || fcode0
== BUILT_IN_POWF
10936 || fcode0
== BUILT_IN_POWL
)
10938 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10939 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10940 if (TREE_CODE (arg01
) == REAL_CST
10941 && !TREE_OVERFLOW (arg01
)
10942 && operand_equal_p (arg1
, arg00
, 0))
10944 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10948 c
= TREE_REAL_CST (arg01
);
10949 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10950 arg
= build_real (type
, c
);
10951 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10955 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10956 if (!in_gimple_form
10958 && operand_equal_p (arg0
, arg1
, 0))
10960 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10964 tree arg
= build_real (type
, dconst2
);
10965 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10974 /* ~X | X is -1. */
10975 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10976 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10978 t1
= build_zero_cst (type
);
10979 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10980 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10983 /* X | ~X is -1. */
10984 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10985 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10987 t1
= build_zero_cst (type
);
10988 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10989 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10992 /* Canonicalize (X & C1) | C2. */
10993 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10994 && TREE_CODE (arg1
) == INTEGER_CST
10995 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10997 int width
= TYPE_PRECISION (type
), w
;
10998 wide_int c1
= TREE_OPERAND (arg0
, 1);
10999 wide_int c2
= arg1
;
11001 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11002 if ((c1
& c2
) == c1
)
11003 return omit_one_operand_loc (loc
, type
, arg1
,
11004 TREE_OPERAND (arg0
, 0));
11006 wide_int msk
= wi::mask (width
, false,
11007 TYPE_PRECISION (TREE_TYPE (arg1
)));
11009 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11010 if (msk
.and_not (c1
| c2
) == 0)
11011 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11012 TREE_OPERAND (arg0
, 0), arg1
);
11014 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11015 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11016 mode which allows further optimizations. */
11019 wide_int c3
= c1
.and_not (c2
);
11020 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11022 wide_int mask
= wi::mask (w
, false,
11023 TYPE_PRECISION (type
));
11024 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11032 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11033 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11034 TREE_OPERAND (arg0
, 0),
11035 wide_int_to_tree (type
,
11040 /* (X & ~Y) | (~X & Y) is X ^ Y */
11041 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11042 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11044 tree a0
, a1
, l0
, l1
, n0
, n1
;
11046 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11047 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11049 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11050 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11052 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11053 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11055 if ((operand_equal_p (n0
, a0
, 0)
11056 && operand_equal_p (n1
, a1
, 0))
11057 || (operand_equal_p (n0
, a1
, 0)
11058 && operand_equal_p (n1
, a0
, 0)))
11059 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11062 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11063 if (t1
!= NULL_TREE
)
11066 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11068 This results in more efficient code for machines without a NAND
11069 instruction. Combine will canonicalize to the first form
11070 which will allow use of NAND instructions provided by the
11071 backend if they exist. */
11072 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11073 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11076 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11077 build2 (BIT_AND_EXPR
, type
,
11078 fold_convert_loc (loc
, type
,
11079 TREE_OPERAND (arg0
, 0)),
11080 fold_convert_loc (loc
, type
,
11081 TREE_OPERAND (arg1
, 0))));
11084 /* See if this can be simplified into a rotate first. If that
11085 is unsuccessful continue in the association code. */
11089 /* ~X ^ X is -1. */
11090 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11093 t1
= build_zero_cst (type
);
11094 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11095 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11098 /* X ^ ~X is -1. */
11099 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11100 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11102 t1
= build_zero_cst (type
);
11103 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11104 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11107 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11108 with a constant, and the two constants have no bits in common,
11109 we should treat this as a BIT_IOR_EXPR since this may produce more
11110 simplifications. */
11111 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11112 && TREE_CODE (arg1
) == BIT_AND_EXPR
11113 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11114 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11115 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11116 TREE_OPERAND (arg1
, 1)) == 0)
11118 code
= BIT_IOR_EXPR
;
11122 /* (X | Y) ^ X -> Y & ~ X*/
11123 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11124 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11126 tree t2
= TREE_OPERAND (arg0
, 1);
11127 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11129 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11130 fold_convert_loc (loc
, type
, t2
),
11131 fold_convert_loc (loc
, type
, t1
));
11135 /* (Y | X) ^ X -> Y & ~ X*/
11136 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11137 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11139 tree t2
= TREE_OPERAND (arg0
, 0);
11140 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11142 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11143 fold_convert_loc (loc
, type
, t2
),
11144 fold_convert_loc (loc
, type
, t1
));
11148 /* X ^ (X | Y) -> Y & ~ X*/
11149 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11150 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11152 tree t2
= TREE_OPERAND (arg1
, 1);
11153 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11155 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11156 fold_convert_loc (loc
, type
, t2
),
11157 fold_convert_loc (loc
, type
, t1
));
11161 /* X ^ (Y | X) -> Y & ~ X*/
11162 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11165 tree t2
= TREE_OPERAND (arg1
, 0);
11166 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11168 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11169 fold_convert_loc (loc
, type
, t2
),
11170 fold_convert_loc (loc
, type
, t1
));
11174 /* Convert ~X ^ ~Y to X ^ Y. */
11175 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11176 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11177 return fold_build2_loc (loc
, code
, type
,
11178 fold_convert_loc (loc
, type
,
11179 TREE_OPERAND (arg0
, 0)),
11180 fold_convert_loc (loc
, type
,
11181 TREE_OPERAND (arg1
, 0)));
11183 /* Convert ~X ^ C to X ^ ~C. */
11184 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11185 && TREE_CODE (arg1
) == INTEGER_CST
)
11186 return fold_build2_loc (loc
, code
, type
,
11187 fold_convert_loc (loc
, type
,
11188 TREE_OPERAND (arg0
, 0)),
11189 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11191 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11192 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11193 && INTEGRAL_TYPE_P (type
)
11194 && integer_onep (TREE_OPERAND (arg0
, 1))
11195 && integer_onep (arg1
))
11196 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11197 build_zero_cst (TREE_TYPE (arg0
)));
11199 /* Fold (X & Y) ^ Y as ~X & Y. */
11200 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11201 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11203 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11204 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11205 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11206 fold_convert_loc (loc
, type
, arg1
));
11208 /* Fold (X & Y) ^ X as ~Y & X. */
11209 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11210 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11211 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11213 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11214 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11215 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11216 fold_convert_loc (loc
, type
, arg1
));
11218 /* Fold X ^ (X & Y) as X & ~Y. */
11219 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11220 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11222 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11223 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11224 fold_convert_loc (loc
, type
, arg0
),
11225 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11227 /* Fold X ^ (Y & X) as ~Y & X. */
11228 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11229 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11230 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11232 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11233 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11234 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11235 fold_convert_loc (loc
, type
, arg0
));
11238 /* See if this can be simplified into a rotate first. If that
11239 is unsuccessful continue in the association code. */
11243 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11244 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11245 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11246 || (TREE_CODE (arg0
) == EQ_EXPR
11247 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11248 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11249 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11251 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11252 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11253 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11254 || (TREE_CODE (arg1
) == EQ_EXPR
11255 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11256 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11257 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11259 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11260 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11261 && INTEGRAL_TYPE_P (type
)
11262 && integer_onep (TREE_OPERAND (arg0
, 1))
11263 && integer_onep (arg1
))
11266 tem
= TREE_OPERAND (arg0
, 0);
11267 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11268 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11270 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11271 build_zero_cst (TREE_TYPE (tem
)));
11273 /* Fold ~X & 1 as (X & 1) == 0. */
11274 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11275 && INTEGRAL_TYPE_P (type
)
11276 && integer_onep (arg1
))
11279 tem
= TREE_OPERAND (arg0
, 0);
11280 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11281 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11283 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11284 build_zero_cst (TREE_TYPE (tem
)));
11286 /* Fold !X & 1 as X == 0. */
11287 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11288 && integer_onep (arg1
))
11290 tem
= TREE_OPERAND (arg0
, 0);
11291 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11292 build_zero_cst (TREE_TYPE (tem
)));
11295 /* Fold (X ^ Y) & Y as ~X & Y. */
11296 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11297 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11299 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11300 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11301 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11302 fold_convert_loc (loc
, type
, arg1
));
11304 /* Fold (X ^ Y) & X as ~Y & X. */
11305 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11306 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11307 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11309 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11310 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11311 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11312 fold_convert_loc (loc
, type
, arg1
));
11314 /* Fold X & (X ^ Y) as X & ~Y. */
11315 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11316 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11318 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11319 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11320 fold_convert_loc (loc
, type
, arg0
),
11321 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11323 /* Fold X & (Y ^ X) as ~Y & X. */
11324 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11325 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11326 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11328 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11329 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11330 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11331 fold_convert_loc (loc
, type
, arg0
));
11334 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11335 multiple of 1 << CST. */
11336 if (TREE_CODE (arg1
) == INTEGER_CST
)
11338 wide_int cst1
= arg1
;
11339 wide_int ncst1
= -cst1
;
11340 if ((cst1
& ncst1
) == ncst1
11341 && multiple_of_p (type
, arg0
,
11342 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11343 return fold_convert_loc (loc
, type
, arg0
);
11346 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11348 if (TREE_CODE (arg1
) == INTEGER_CST
11349 && TREE_CODE (arg0
) == MULT_EXPR
11350 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11352 wide_int warg1
= arg1
;
11353 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11356 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11358 else if (masked
!= warg1
)
11360 /* Avoid the transform if arg1 is a mask of some
11361 mode which allows further optimizations. */
11362 int pop
= wi::popcount (warg1
);
11363 if (!(pop
>= BITS_PER_UNIT
11364 && exact_log2 (pop
) != -1
11365 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11366 return fold_build2_loc (loc
, code
, type
, op0
,
11367 wide_int_to_tree (type
, masked
));
11371 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11372 ((A & N) + B) & M -> (A + B) & M
11373 Similarly if (N & M) == 0,
11374 ((A | N) + B) & M -> (A + B) & M
11375 and for - instead of + (or unary - instead of +)
11376 and/or ^ instead of |.
11377 If B is constant and (B & M) == 0, fold into A & M. */
11378 if (TREE_CODE (arg1
) == INTEGER_CST
)
11380 wide_int cst1
= arg1
;
11381 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11382 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11383 && (TREE_CODE (arg0
) == PLUS_EXPR
11384 || TREE_CODE (arg0
) == MINUS_EXPR
11385 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11386 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11387 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11393 /* Now we know that arg0 is (C + D) or (C - D) or
11394 -C and arg1 (M) is == (1LL << cst) - 1.
11395 Store C into PMOP[0] and D into PMOP[1]. */
11396 pmop
[0] = TREE_OPERAND (arg0
, 0);
11398 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11400 pmop
[1] = TREE_OPERAND (arg0
, 1);
11404 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11407 for (; which
>= 0; which
--)
11408 switch (TREE_CODE (pmop
[which
]))
11413 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11416 cst0
= TREE_OPERAND (pmop
[which
], 1);
11418 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11423 else if (cst0
!= 0)
11425 /* If C or D is of the form (A & N) where
11426 (N & M) == M, or of the form (A | N) or
11427 (A ^ N) where (N & M) == 0, replace it with A. */
11428 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11431 /* If C or D is a N where (N & M) == 0, it can be
11432 omitted (assumed 0). */
11433 if ((TREE_CODE (arg0
) == PLUS_EXPR
11434 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11435 && (cst1
& pmop
[which
]) == 0)
11436 pmop
[which
] = NULL
;
11442 /* Only build anything new if we optimized one or both arguments
11444 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11445 || (TREE_CODE (arg0
) != NEGATE_EXPR
11446 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11448 tree utype
= TREE_TYPE (arg0
);
11449 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11451 /* Perform the operations in a type that has defined
11452 overflow behavior. */
11453 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11454 if (pmop
[0] != NULL
)
11455 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11456 if (pmop
[1] != NULL
)
11457 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11460 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11461 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11462 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11464 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11465 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11467 else if (pmop
[0] != NULL
)
11469 else if (pmop
[1] != NULL
)
11472 return build_int_cst (type
, 0);
11474 else if (pmop
[0] == NULL
)
11475 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11477 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11479 /* TEM is now the new binary +, - or unary - replacement. */
11480 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11481 fold_convert_loc (loc
, utype
, arg1
));
11482 return fold_convert_loc (loc
, type
, tem
);
11487 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11488 if (t1
!= NULL_TREE
)
11490 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11491 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11492 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11494 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11496 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11499 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11502 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11504 This results in more efficient code for machines without a NOR
11505 instruction. Combine will canonicalize to the first form
11506 which will allow use of NOR instructions provided by the
11507 backend if they exist. */
11508 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11509 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11511 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11512 build2 (BIT_IOR_EXPR
, type
,
11513 fold_convert_loc (loc
, type
,
11514 TREE_OPERAND (arg0
, 0)),
11515 fold_convert_loc (loc
, type
,
11516 TREE_OPERAND (arg1
, 0))));
11519 /* If arg0 is derived from the address of an object or function, we may
11520 be able to fold this expression using the object or function's
11522 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11524 unsigned HOST_WIDE_INT modulus
, residue
;
11525 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11527 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11528 integer_onep (arg1
));
11530 /* This works because modulus is a power of 2. If this weren't the
11531 case, we'd have to replace it by its greatest power-of-2
11532 divisor: modulus & -modulus. */
11534 return build_int_cst (type
, residue
& low
);
11537 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11538 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11539 if the new mask might be further optimized. */
11540 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11541 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11542 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11543 && TREE_CODE (arg1
) == INTEGER_CST
11544 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11545 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11546 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11547 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11549 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11550 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11551 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11552 tree shift_type
= TREE_TYPE (arg0
);
11554 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11555 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11556 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11557 && TYPE_PRECISION (TREE_TYPE (arg0
))
11558 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11560 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11561 tree arg00
= TREE_OPERAND (arg0
, 0);
11562 /* See if more bits can be proven as zero because of
11564 if (TREE_CODE (arg00
) == NOP_EXPR
11565 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11567 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11568 if (TYPE_PRECISION (inner_type
)
11569 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11570 && TYPE_PRECISION (inner_type
) < prec
)
11572 prec
= TYPE_PRECISION (inner_type
);
11573 /* See if we can shorten the right shift. */
11575 shift_type
= inner_type
;
11576 /* Otherwise X >> C1 is all zeros, so we'll optimize
11577 it into (X, 0) later on by making sure zerobits
11581 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11584 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11585 zerobits
<<= prec
- shiftc
;
11587 /* For arithmetic shift if sign bit could be set, zerobits
11588 can contain actually sign bits, so no transformation is
11589 possible, unless MASK masks them all away. In that
11590 case the shift needs to be converted into logical shift. */
11591 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11592 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11594 if ((mask
& zerobits
) == 0)
11595 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11601 /* ((X << 16) & 0xff00) is (X, 0). */
11602 if ((mask
& zerobits
) == mask
)
11603 return omit_one_operand_loc (loc
, type
,
11604 build_int_cst (type
, 0), arg0
);
11606 newmask
= mask
| zerobits
;
11607 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11609 /* Only do the transformation if NEWMASK is some integer
11611 for (prec
= BITS_PER_UNIT
;
11612 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11613 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11615 if (prec
< HOST_BITS_PER_WIDE_INT
11616 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11620 if (shift_type
!= TREE_TYPE (arg0
))
11622 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11623 fold_convert_loc (loc
, shift_type
,
11624 TREE_OPERAND (arg0
, 0)),
11625 TREE_OPERAND (arg0
, 1));
11626 tem
= fold_convert_loc (loc
, type
, tem
);
11630 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11631 if (!tree_int_cst_equal (newmaskt
, arg1
))
11632 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11640 /* Don't touch a floating-point divide by zero unless the mode
11641 of the constant can represent infinity. */
11642 if (TREE_CODE (arg1
) == REAL_CST
11643 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11644 && real_zerop (arg1
))
11647 /* (-A) / (-B) -> A / B */
11648 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11649 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11650 TREE_OPERAND (arg0
, 0),
11651 negate_expr (arg1
));
11652 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11653 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11654 negate_expr (arg0
),
11655 TREE_OPERAND (arg1
, 0));
11657 /* Convert A/B/C to A/(B*C). */
11658 if (flag_reciprocal_math
11659 && TREE_CODE (arg0
) == RDIV_EXPR
)
11660 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11661 fold_build2_loc (loc
, MULT_EXPR
, type
,
11662 TREE_OPERAND (arg0
, 1), arg1
));
11664 /* Convert A/(B/C) to (A/B)*C. */
11665 if (flag_reciprocal_math
11666 && TREE_CODE (arg1
) == RDIV_EXPR
)
11667 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11668 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11669 TREE_OPERAND (arg1
, 0)),
11670 TREE_OPERAND (arg1
, 1));
11672 /* Convert C1/(X*C2) into (C1/C2)/X. */
11673 if (flag_reciprocal_math
11674 && TREE_CODE (arg1
) == MULT_EXPR
11675 && TREE_CODE (arg0
) == REAL_CST
11676 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11678 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11679 TREE_OPERAND (arg1
, 1));
11681 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11682 TREE_OPERAND (arg1
, 0));
11685 if (flag_unsafe_math_optimizations
)
11687 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11688 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11690 /* Optimize sin(x)/cos(x) as tan(x). */
11691 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11692 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11693 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11694 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11695 CALL_EXPR_ARG (arg1
, 0), 0))
11697 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11699 if (tanfn
!= NULL_TREE
)
11700 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11703 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11704 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11705 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11706 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11707 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11708 CALL_EXPR_ARG (arg1
, 0), 0))
11710 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11712 if (tanfn
!= NULL_TREE
)
11714 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11715 CALL_EXPR_ARG (arg0
, 0));
11716 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11717 build_real (type
, dconst1
), tmp
);
11721 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11722 NaNs or Infinities. */
11723 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11724 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11725 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11727 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11728 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11730 if (! HONOR_NANS (arg00
)
11731 && ! HONOR_INFINITIES (element_mode (arg00
))
11732 && operand_equal_p (arg00
, arg01
, 0))
11734 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11736 if (cosfn
!= NULL_TREE
)
11737 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11741 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11742 NaNs or Infinities. */
11743 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11744 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11745 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11747 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11748 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11750 if (! HONOR_NANS (arg00
)
11751 && ! HONOR_INFINITIES (element_mode (arg00
))
11752 && operand_equal_p (arg00
, arg01
, 0))
11754 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11756 if (cosfn
!= NULL_TREE
)
11758 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11759 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11760 build_real (type
, dconst1
),
11766 /* Optimize pow(x,c)/x as pow(x,c-1). */
11767 if (fcode0
== BUILT_IN_POW
11768 || fcode0
== BUILT_IN_POWF
11769 || fcode0
== BUILT_IN_POWL
)
11771 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11772 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11773 if (TREE_CODE (arg01
) == REAL_CST
11774 && !TREE_OVERFLOW (arg01
)
11775 && operand_equal_p (arg1
, arg00
, 0))
11777 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11781 c
= TREE_REAL_CST (arg01
);
11782 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11783 arg
= build_real (type
, c
);
11784 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11788 /* Optimize a/root(b/c) into a*root(c/b). */
11789 if (BUILTIN_ROOT_P (fcode1
))
11791 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11793 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11795 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11796 tree b
= TREE_OPERAND (rootarg
, 0);
11797 tree c
= TREE_OPERAND (rootarg
, 1);
11799 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11801 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11802 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11806 /* Optimize x/expN(y) into x*expN(-y). */
11807 if (BUILTIN_EXPONENT_P (fcode1
))
11809 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11810 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11811 arg1
= build_call_expr_loc (loc
,
11813 fold_convert_loc (loc
, type
, arg
));
11814 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11817 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11818 if (fcode1
== BUILT_IN_POW
11819 || fcode1
== BUILT_IN_POWF
11820 || fcode1
== BUILT_IN_POWL
)
11822 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11823 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11824 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11825 tree neg11
= fold_convert_loc (loc
, type
,
11826 negate_expr (arg11
));
11827 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11828 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11833 case TRUNC_DIV_EXPR
:
11834 /* Optimize (X & (-A)) / A where A is a power of 2,
11836 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11837 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11838 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11840 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11841 arg1
, TREE_OPERAND (arg0
, 1));
11842 if (sum
&& integer_zerop (sum
)) {
11843 tree pow2
= build_int_cst (integer_type_node
,
11844 wi::exact_log2 (arg1
));
11845 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11846 TREE_OPERAND (arg0
, 0), pow2
);
11852 case FLOOR_DIV_EXPR
:
11853 /* Simplify A / (B << N) where A and B are positive and B is
11854 a power of 2, to A >> (N + log2(B)). */
11855 strict_overflow_p
= false;
11856 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11857 && (TYPE_UNSIGNED (type
)
11858 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11860 tree sval
= TREE_OPERAND (arg1
, 0);
11861 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11863 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11864 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11865 wi::exact_log2 (sval
));
11867 if (strict_overflow_p
)
11868 fold_overflow_warning (("assuming signed overflow does not "
11869 "occur when simplifying A / (B << N)"),
11870 WARN_STRICT_OVERFLOW_MISC
);
11872 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11874 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11875 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11881 case ROUND_DIV_EXPR
:
11882 case CEIL_DIV_EXPR
:
11883 case EXACT_DIV_EXPR
:
11884 if (integer_zerop (arg1
))
11887 /* Convert -A / -B to A / B when the type is signed and overflow is
11889 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11890 && TREE_CODE (arg0
) == NEGATE_EXPR
11891 && negate_expr_p (arg1
))
11893 if (INTEGRAL_TYPE_P (type
))
11894 fold_overflow_warning (("assuming signed overflow does not occur "
11895 "when distributing negation across "
11897 WARN_STRICT_OVERFLOW_MISC
);
11898 return fold_build2_loc (loc
, code
, type
,
11899 fold_convert_loc (loc
, type
,
11900 TREE_OPERAND (arg0
, 0)),
11901 fold_convert_loc (loc
, type
,
11902 negate_expr (arg1
)));
11904 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11905 && TREE_CODE (arg1
) == NEGATE_EXPR
11906 && negate_expr_p (arg0
))
11908 if (INTEGRAL_TYPE_P (type
))
11909 fold_overflow_warning (("assuming signed overflow does not occur "
11910 "when distributing negation across "
11912 WARN_STRICT_OVERFLOW_MISC
);
11913 return fold_build2_loc (loc
, code
, type
,
11914 fold_convert_loc (loc
, type
,
11915 negate_expr (arg0
)),
11916 fold_convert_loc (loc
, type
,
11917 TREE_OPERAND (arg1
, 0)));
11920 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11921 operation, EXACT_DIV_EXPR.
11923 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11924 At one time others generated faster code, it's not clear if they do
11925 after the last round to changes to the DIV code in expmed.c. */
11926 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11927 && multiple_of_p (type
, arg0
, arg1
))
11928 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11930 strict_overflow_p
= false;
11931 if (TREE_CODE (arg1
) == INTEGER_CST
11932 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11933 &strict_overflow_p
)))
11935 if (strict_overflow_p
)
11936 fold_overflow_warning (("assuming signed overflow does not occur "
11937 "when simplifying division"),
11938 WARN_STRICT_OVERFLOW_MISC
);
11939 return fold_convert_loc (loc
, type
, tem
);
11944 case CEIL_MOD_EXPR
:
11945 case FLOOR_MOD_EXPR
:
11946 case ROUND_MOD_EXPR
:
11947 case TRUNC_MOD_EXPR
:
11948 /* X % -Y is the same as X % Y. */
11949 if (code
== TRUNC_MOD_EXPR
11950 && !TYPE_UNSIGNED (type
)
11951 && TREE_CODE (arg1
) == NEGATE_EXPR
11952 && !TYPE_OVERFLOW_TRAPS (type
))
11953 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11954 fold_convert_loc (loc
, type
,
11955 TREE_OPERAND (arg1
, 0)));
11957 strict_overflow_p
= false;
11958 if (TREE_CODE (arg1
) == INTEGER_CST
11959 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11960 &strict_overflow_p
)))
11962 if (strict_overflow_p
)
11963 fold_overflow_warning (("assuming signed overflow does not occur "
11964 "when simplifying modulus"),
11965 WARN_STRICT_OVERFLOW_MISC
);
11966 return fold_convert_loc (loc
, type
, tem
);
11969 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11970 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11971 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11972 && (TYPE_UNSIGNED (type
)
11973 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11976 /* Also optimize A % (C << N) where C is a power of 2,
11977 to A & ((C << N) - 1). */
11978 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11979 c
= TREE_OPERAND (arg1
, 0);
11981 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11984 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11985 build_int_cst (TREE_TYPE (arg1
), 1));
11986 if (strict_overflow_p
)
11987 fold_overflow_warning (("assuming signed overflow does not "
11988 "occur when simplifying "
11989 "X % (power of two)"),
11990 WARN_STRICT_OVERFLOW_MISC
);
11991 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11992 fold_convert_loc (loc
, type
, arg0
),
11993 fold_convert_loc (loc
, type
, mask
));
12003 /* Since negative shift count is not well-defined,
12004 don't try to compute it in the compiler. */
12005 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12008 prec
= element_precision (type
);
12010 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12011 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12012 && tree_to_uhwi (arg1
) < prec
12013 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12014 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12016 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12017 + tree_to_uhwi (arg1
));
12019 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12020 being well defined. */
12023 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12025 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12026 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12027 TREE_OPERAND (arg0
, 0));
12032 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12033 build_int_cst (TREE_TYPE (arg1
), low
));
12036 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12037 into x & ((unsigned)-1 >> c) for unsigned types. */
12038 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12039 || (TYPE_UNSIGNED (type
)
12040 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12041 && tree_fits_uhwi_p (arg1
)
12042 && tree_to_uhwi (arg1
) < prec
12043 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12044 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12046 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12047 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12053 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12055 lshift
= build_minus_one_cst (type
);
12056 lshift
= const_binop (code
, lshift
, arg1
);
12058 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12062 /* If we have a rotate of a bit operation with the rotate count and
12063 the second operand of the bit operation both constant,
12064 permute the two operations. */
12065 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12066 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12067 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12068 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12069 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12070 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12071 fold_build2_loc (loc
, code
, type
,
12072 TREE_OPERAND (arg0
, 0), arg1
),
12073 fold_build2_loc (loc
, code
, type
,
12074 TREE_OPERAND (arg0
, 1), arg1
));
12076 /* Two consecutive rotates adding up to the some integer
12077 multiple of the precision of the type can be ignored. */
12078 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12079 && TREE_CODE (arg0
) == RROTATE_EXPR
12080 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12081 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12083 return TREE_OPERAND (arg0
, 0);
12085 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12086 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12087 if the latter can be further optimized. */
12088 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12089 && TREE_CODE (arg0
) == BIT_AND_EXPR
12090 && TREE_CODE (arg1
) == INTEGER_CST
12091 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12093 tree mask
= fold_build2_loc (loc
, code
, type
,
12094 fold_convert_loc (loc
, type
,
12095 TREE_OPERAND (arg0
, 1)),
12097 tree shift
= fold_build2_loc (loc
, code
, type
,
12098 fold_convert_loc (loc
, type
,
12099 TREE_OPERAND (arg0
, 0)),
12101 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12109 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12115 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12120 case TRUTH_ANDIF_EXPR
:
12121 /* Note that the operands of this must be ints
12122 and their values must be 0 or 1.
12123 ("true" is a fixed value perhaps depending on the language.) */
12124 /* If first arg is constant zero, return it. */
12125 if (integer_zerop (arg0
))
12126 return fold_convert_loc (loc
, type
, arg0
);
12127 case TRUTH_AND_EXPR
:
12128 /* If either arg is constant true, drop it. */
12129 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12130 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12131 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12132 /* Preserve sequence points. */
12133 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12134 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12135 /* If second arg is constant zero, result is zero, but first arg
12136 must be evaluated. */
12137 if (integer_zerop (arg1
))
12138 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12139 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12140 case will be handled here. */
12141 if (integer_zerop (arg0
))
12142 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12144 /* !X && X is always false. */
12145 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12146 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12147 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12148 /* X && !X is always false. */
12149 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12150 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12151 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12153 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12154 means A >= Y && A != MAX, but in this case we know that
12157 if (!TREE_SIDE_EFFECTS (arg0
)
12158 && !TREE_SIDE_EFFECTS (arg1
))
12160 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12161 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12162 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12164 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12165 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12166 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12169 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12175 case TRUTH_ORIF_EXPR
:
12176 /* Note that the operands of this must be ints
12177 and their values must be 0 or true.
12178 ("true" is a fixed value perhaps depending on the language.) */
12179 /* If first arg is constant true, return it. */
12180 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12181 return fold_convert_loc (loc
, type
, arg0
);
12182 case TRUTH_OR_EXPR
:
12183 /* If either arg is constant zero, drop it. */
12184 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12185 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12186 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12187 /* Preserve sequence points. */
12188 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12189 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12190 /* If second arg is constant true, result is true, but we must
12191 evaluate first arg. */
12192 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12193 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12194 /* Likewise for first arg, but note this only occurs here for
12196 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12197 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12199 /* !X || X is always true. */
12200 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12201 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12202 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12203 /* X || !X is always true. */
12204 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12205 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12206 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12208 /* (X && !Y) || (!X && Y) is X ^ Y */
12209 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12210 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12212 tree a0
, a1
, l0
, l1
, n0
, n1
;
12214 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12215 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12217 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12218 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12220 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12221 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12223 if ((operand_equal_p (n0
, a0
, 0)
12224 && operand_equal_p (n1
, a1
, 0))
12225 || (operand_equal_p (n0
, a1
, 0)
12226 && operand_equal_p (n1
, a0
, 0)))
12227 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12230 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12236 case TRUTH_XOR_EXPR
:
12237 /* If the second arg is constant zero, drop it. */
12238 if (integer_zerop (arg1
))
12239 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12240 /* If the second arg is constant true, this is a logical inversion. */
12241 if (integer_onep (arg1
))
12243 tem
= invert_truthvalue_loc (loc
, arg0
);
12244 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12246 /* Identical arguments cancel to zero. */
12247 if (operand_equal_p (arg0
, arg1
, 0))
12248 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12250 /* !X ^ X is always true. */
12251 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12252 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12253 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12255 /* X ^ !X is always true. */
12256 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12257 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12258 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12267 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12268 if (tem
!= NULL_TREE
)
12271 /* bool_var != 0 becomes bool_var. */
12272 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12273 && code
== NE_EXPR
)
12274 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12276 /* bool_var == 1 becomes bool_var. */
12277 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12278 && code
== EQ_EXPR
)
12279 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12281 /* bool_var != 1 becomes !bool_var. */
12282 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12283 && code
== NE_EXPR
)
12284 return fold_convert_loc (loc
, type
,
12285 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12286 TREE_TYPE (arg0
), arg0
));
12288 /* bool_var == 0 becomes !bool_var. */
12289 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12290 && code
== EQ_EXPR
)
12291 return fold_convert_loc (loc
, type
,
12292 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12293 TREE_TYPE (arg0
), arg0
));
12295 /* !exp != 0 becomes !exp */
12296 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12297 && code
== NE_EXPR
)
12298 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12300 /* If this is an equality comparison of the address of two non-weak,
12301 unaliased symbols neither of which are extern (since we do not
12302 have access to attributes for externs), then we know the result. */
12303 if (TREE_CODE (arg0
) == ADDR_EXPR
12304 && DECL_P (TREE_OPERAND (arg0
, 0))
12305 && TREE_CODE (arg1
) == ADDR_EXPR
12306 && DECL_P (TREE_OPERAND (arg1
, 0)))
12310 if (decl_in_symtab_p (TREE_OPERAND (arg0
, 0))
12311 && decl_in_symtab_p (TREE_OPERAND (arg1
, 0)))
12312 equal
= symtab_node::get_create (TREE_OPERAND (arg0
, 0))
12313 ->equal_address_to (symtab_node::get_create
12314 (TREE_OPERAND (arg1
, 0)));
12316 equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12318 return constant_boolean_node (equal
12319 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12323 /* Similarly for a NEGATE_EXPR. */
12324 if (TREE_CODE (arg0
) == NEGATE_EXPR
12325 && TREE_CODE (arg1
) == INTEGER_CST
12326 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12328 && TREE_CODE (tem
) == INTEGER_CST
12329 && !TREE_OVERFLOW (tem
))
12330 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12332 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12333 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12334 && TREE_CODE (arg1
) == INTEGER_CST
12335 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12336 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12337 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12338 fold_convert_loc (loc
,
12341 TREE_OPERAND (arg0
, 1)));
12343 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12344 if ((TREE_CODE (arg0
) == PLUS_EXPR
12345 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12346 || TREE_CODE (arg0
) == MINUS_EXPR
)
12347 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12350 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12351 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12353 tree val
= TREE_OPERAND (arg0
, 1);
12354 return omit_two_operands_loc (loc
, type
,
12355 fold_build2_loc (loc
, code
, type
,
12357 build_int_cst (TREE_TYPE (val
),
12359 TREE_OPERAND (arg0
, 0), arg1
);
12362 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12363 if (TREE_CODE (arg0
) == MINUS_EXPR
12364 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12365 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12368 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12370 return omit_two_operands_loc (loc
, type
,
12372 ? boolean_true_node
: boolean_false_node
,
12373 TREE_OPERAND (arg0
, 1), arg1
);
12376 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12377 if (TREE_CODE (arg0
) == ABS_EXPR
12378 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12379 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12381 /* If this is an EQ or NE comparison with zero and ARG0 is
12382 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12383 two operations, but the latter can be done in one less insn
12384 on machines that have only two-operand insns or on which a
12385 constant cannot be the first operand. */
12386 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12387 && integer_zerop (arg1
))
12389 tree arg00
= TREE_OPERAND (arg0
, 0);
12390 tree arg01
= TREE_OPERAND (arg0
, 1);
12391 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12392 && integer_onep (TREE_OPERAND (arg00
, 0)))
12394 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12395 arg01
, TREE_OPERAND (arg00
, 1));
12396 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12397 build_int_cst (TREE_TYPE (arg0
), 1));
12398 return fold_build2_loc (loc
, code
, type
,
12399 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12402 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12403 && integer_onep (TREE_OPERAND (arg01
, 0)))
12405 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12406 arg00
, TREE_OPERAND (arg01
, 1));
12407 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12408 build_int_cst (TREE_TYPE (arg0
), 1));
12409 return fold_build2_loc (loc
, code
, type
,
12410 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12415 /* If this is an NE or EQ comparison of zero against the result of a
12416 signed MOD operation whose second operand is a power of 2, make
12417 the MOD operation unsigned since it is simpler and equivalent. */
12418 if (integer_zerop (arg1
)
12419 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12420 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12421 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12422 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12423 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12424 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12426 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12427 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12428 fold_convert_loc (loc
, newtype
,
12429 TREE_OPERAND (arg0
, 0)),
12430 fold_convert_loc (loc
, newtype
,
12431 TREE_OPERAND (arg0
, 1)));
12433 return fold_build2_loc (loc
, code
, type
, newmod
,
12434 fold_convert_loc (loc
, newtype
, arg1
));
12437 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12438 C1 is a valid shift constant, and C2 is a power of two, i.e.
12440 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12441 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12442 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12444 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12445 && integer_zerop (arg1
))
12447 tree itype
= TREE_TYPE (arg0
);
12448 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12449 prec
= TYPE_PRECISION (itype
);
12451 /* Check for a valid shift count. */
12452 if (wi::ltu_p (arg001
, prec
))
12454 tree arg01
= TREE_OPERAND (arg0
, 1);
12455 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12456 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12457 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12458 can be rewritten as (X & (C2 << C1)) != 0. */
12459 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12461 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12462 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12463 return fold_build2_loc (loc
, code
, type
, tem
,
12464 fold_convert_loc (loc
, itype
, arg1
));
12466 /* Otherwise, for signed (arithmetic) shifts,
12467 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12468 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12469 else if (!TYPE_UNSIGNED (itype
))
12470 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12471 arg000
, build_int_cst (itype
, 0));
12472 /* Otherwise, of unsigned (logical) shifts,
12473 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12474 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12476 return omit_one_operand_loc (loc
, type
,
12477 code
== EQ_EXPR
? integer_one_node
12478 : integer_zero_node
,
12483 /* If we have (A & C) == C where C is a power of 2, convert this into
12484 (A & C) != 0. Similarly for NE_EXPR. */
12485 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12486 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12487 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12488 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12489 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12490 integer_zero_node
));
12492 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12493 bit, then fold the expression into A < 0 or A >= 0. */
12494 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12498 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12499 Similarly for NE_EXPR. */
12500 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12501 && TREE_CODE (arg1
) == INTEGER_CST
12502 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12504 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12505 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12506 TREE_OPERAND (arg0
, 1));
12508 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12509 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12511 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12512 if (integer_nonzerop (dandnotc
))
12513 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12516 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12517 Similarly for NE_EXPR. */
12518 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12519 && TREE_CODE (arg1
) == INTEGER_CST
12520 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12522 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12524 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12525 TREE_OPERAND (arg0
, 1),
12526 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12527 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12528 if (integer_nonzerop (candnotd
))
12529 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12532 /* If this is a comparison of a field, we may be able to simplify it. */
12533 if ((TREE_CODE (arg0
) == COMPONENT_REF
12534 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12535 /* Handle the constant case even without -O
12536 to make sure the warnings are given. */
12537 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12539 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12544 /* Optimize comparisons of strlen vs zero to a compare of the
12545 first character of the string vs zero. To wit,
12546 strlen(ptr) == 0 => *ptr == 0
12547 strlen(ptr) != 0 => *ptr != 0
12548 Other cases should reduce to one of these two (or a constant)
12549 due to the return value of strlen being unsigned. */
12550 if (TREE_CODE (arg0
) == CALL_EXPR
12551 && integer_zerop (arg1
))
12553 tree fndecl
= get_callee_fndecl (arg0
);
12556 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12557 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12558 && call_expr_nargs (arg0
) == 1
12559 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12561 tree iref
= build_fold_indirect_ref_loc (loc
,
12562 CALL_EXPR_ARG (arg0
, 0));
12563 return fold_build2_loc (loc
, code
, type
, iref
,
12564 build_int_cst (TREE_TYPE (iref
), 0));
12568 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12569 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12570 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12571 && integer_zerop (arg1
)
12572 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12574 tree arg00
= TREE_OPERAND (arg0
, 0);
12575 tree arg01
= TREE_OPERAND (arg0
, 1);
12576 tree itype
= TREE_TYPE (arg00
);
12577 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
12579 if (TYPE_UNSIGNED (itype
))
12581 itype
= signed_type_for (itype
);
12582 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12584 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12585 type
, arg00
, build_zero_cst (itype
));
12589 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12590 if (integer_zerop (arg1
)
12591 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12592 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12593 TREE_OPERAND (arg0
, 1));
12595 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12596 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12597 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12598 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12599 build_zero_cst (TREE_TYPE (arg0
)));
12600 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12601 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12602 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12603 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12604 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12605 build_zero_cst (TREE_TYPE (arg0
)));
12607 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12608 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12609 && TREE_CODE (arg1
) == INTEGER_CST
12610 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12611 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12612 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12613 TREE_OPERAND (arg0
, 1), arg1
));
12615 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12616 (X & C) == 0 when C is a single bit. */
12617 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12618 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12619 && integer_zerop (arg1
)
12620 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12622 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12623 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12624 TREE_OPERAND (arg0
, 1));
12625 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12627 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12631 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12632 constant C is a power of two, i.e. a single bit. */
12633 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12634 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12635 && integer_zerop (arg1
)
12636 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12637 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12638 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12640 tree arg00
= TREE_OPERAND (arg0
, 0);
12641 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12642 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12645 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12646 when is C is a power of two, i.e. a single bit. */
12647 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12648 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12649 && integer_zerop (arg1
)
12650 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12651 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12652 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12654 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12655 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12656 arg000
, TREE_OPERAND (arg0
, 1));
12657 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12658 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12661 if (integer_zerop (arg1
)
12662 && tree_expr_nonzero_p (arg0
))
12664 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12665 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12668 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12669 if (TREE_CODE (arg0
) == NEGATE_EXPR
12670 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12671 return fold_build2_loc (loc
, code
, type
,
12672 TREE_OPERAND (arg0
, 0),
12673 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12674 TREE_OPERAND (arg1
, 0)));
12676 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12677 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12678 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12680 tree arg00
= TREE_OPERAND (arg0
, 0);
12681 tree arg01
= TREE_OPERAND (arg0
, 1);
12682 tree arg10
= TREE_OPERAND (arg1
, 0);
12683 tree arg11
= TREE_OPERAND (arg1
, 1);
12684 tree itype
= TREE_TYPE (arg0
);
12686 if (operand_equal_p (arg01
, arg11
, 0))
12687 return fold_build2_loc (loc
, code
, type
,
12688 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12689 fold_build2_loc (loc
,
12690 BIT_XOR_EXPR
, itype
,
12693 build_zero_cst (itype
));
12695 if (operand_equal_p (arg01
, arg10
, 0))
12696 return fold_build2_loc (loc
, code
, type
,
12697 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12698 fold_build2_loc (loc
,
12699 BIT_XOR_EXPR
, itype
,
12702 build_zero_cst (itype
));
12704 if (operand_equal_p (arg00
, arg11
, 0))
12705 return fold_build2_loc (loc
, code
, type
,
12706 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12707 fold_build2_loc (loc
,
12708 BIT_XOR_EXPR
, itype
,
12711 build_zero_cst (itype
));
12713 if (operand_equal_p (arg00
, arg10
, 0))
12714 return fold_build2_loc (loc
, code
, type
,
12715 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12716 fold_build2_loc (loc
,
12717 BIT_XOR_EXPR
, itype
,
12720 build_zero_cst (itype
));
12723 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12724 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12726 tree arg00
= TREE_OPERAND (arg0
, 0);
12727 tree arg01
= TREE_OPERAND (arg0
, 1);
12728 tree arg10
= TREE_OPERAND (arg1
, 0);
12729 tree arg11
= TREE_OPERAND (arg1
, 1);
12730 tree itype
= TREE_TYPE (arg0
);
12732 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12733 operand_equal_p guarantees no side-effects so we don't need
12734 to use omit_one_operand on Z. */
12735 if (operand_equal_p (arg01
, arg11
, 0))
12736 return fold_build2_loc (loc
, code
, type
, arg00
,
12737 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12739 if (operand_equal_p (arg01
, arg10
, 0))
12740 return fold_build2_loc (loc
, code
, type
, arg00
,
12741 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12743 if (operand_equal_p (arg00
, arg11
, 0))
12744 return fold_build2_loc (loc
, code
, type
, arg01
,
12745 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12747 if (operand_equal_p (arg00
, arg10
, 0))
12748 return fold_build2_loc (loc
, code
, type
, arg01
,
12749 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12752 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12753 if (TREE_CODE (arg01
) == INTEGER_CST
12754 && TREE_CODE (arg11
) == INTEGER_CST
)
12756 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12757 fold_convert_loc (loc
, itype
, arg11
));
12758 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12759 return fold_build2_loc (loc
, code
, type
, tem
,
12760 fold_convert_loc (loc
, itype
, arg10
));
12764 /* Attempt to simplify equality/inequality comparisons of complex
12765 values. Only lower the comparison if the result is known or
12766 can be simplified to a single scalar comparison. */
12767 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12768 || TREE_CODE (arg0
) == COMPLEX_CST
)
12769 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12770 || TREE_CODE (arg1
) == COMPLEX_CST
))
12772 tree real0
, imag0
, real1
, imag1
;
12775 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12777 real0
= TREE_OPERAND (arg0
, 0);
12778 imag0
= TREE_OPERAND (arg0
, 1);
12782 real0
= TREE_REALPART (arg0
);
12783 imag0
= TREE_IMAGPART (arg0
);
12786 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12788 real1
= TREE_OPERAND (arg1
, 0);
12789 imag1
= TREE_OPERAND (arg1
, 1);
12793 real1
= TREE_REALPART (arg1
);
12794 imag1
= TREE_IMAGPART (arg1
);
12797 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12798 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12800 if (integer_zerop (rcond
))
12802 if (code
== EQ_EXPR
)
12803 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12805 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12809 if (code
== NE_EXPR
)
12810 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12812 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12816 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12817 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12819 if (integer_zerop (icond
))
12821 if (code
== EQ_EXPR
)
12822 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12824 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12828 if (code
== NE_EXPR
)
12829 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12831 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12842 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12843 if (tem
!= NULL_TREE
)
12846 /* Transform comparisons of the form X +- C CMP X. */
12847 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12848 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12849 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12850 && !HONOR_SNANS (arg0
))
12851 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12852 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12854 tree arg01
= TREE_OPERAND (arg0
, 1);
12855 enum tree_code code0
= TREE_CODE (arg0
);
12858 if (TREE_CODE (arg01
) == REAL_CST
)
12859 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12861 is_positive
= tree_int_cst_sgn (arg01
);
12863 /* (X - c) > X becomes false. */
12864 if (code
== GT_EXPR
12865 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12866 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12868 if (TREE_CODE (arg01
) == INTEGER_CST
12869 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12870 fold_overflow_warning (("assuming signed overflow does not "
12871 "occur when assuming that (X - c) > X "
12872 "is always false"),
12873 WARN_STRICT_OVERFLOW_ALL
);
12874 return constant_boolean_node (0, type
);
12877 /* Likewise (X + c) < X becomes false. */
12878 if (code
== LT_EXPR
12879 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12880 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12882 if (TREE_CODE (arg01
) == INTEGER_CST
12883 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12884 fold_overflow_warning (("assuming signed overflow does not "
12885 "occur when assuming that "
12886 "(X + c) < X is always false"),
12887 WARN_STRICT_OVERFLOW_ALL
);
12888 return constant_boolean_node (0, type
);
12891 /* Convert (X - c) <= X to true. */
12892 if (!HONOR_NANS (arg1
)
12894 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12895 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12897 if (TREE_CODE (arg01
) == INTEGER_CST
12898 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12899 fold_overflow_warning (("assuming signed overflow does not "
12900 "occur when assuming that "
12901 "(X - c) <= X is always true"),
12902 WARN_STRICT_OVERFLOW_ALL
);
12903 return constant_boolean_node (1, type
);
12906 /* Convert (X + c) >= X to true. */
12907 if (!HONOR_NANS (arg1
)
12909 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12910 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12912 if (TREE_CODE (arg01
) == INTEGER_CST
12913 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12914 fold_overflow_warning (("assuming signed overflow does not "
12915 "occur when assuming that "
12916 "(X + c) >= X is always true"),
12917 WARN_STRICT_OVERFLOW_ALL
);
12918 return constant_boolean_node (1, type
);
12921 if (TREE_CODE (arg01
) == INTEGER_CST
)
12923 /* Convert X + c > X and X - c < X to true for integers. */
12924 if (code
== GT_EXPR
12925 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12926 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12928 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12929 fold_overflow_warning (("assuming signed overflow does "
12930 "not occur when assuming that "
12931 "(X + c) > X is always true"),
12932 WARN_STRICT_OVERFLOW_ALL
);
12933 return constant_boolean_node (1, type
);
12936 if (code
== LT_EXPR
12937 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12938 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12940 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12941 fold_overflow_warning (("assuming signed overflow does "
12942 "not occur when assuming that "
12943 "(X - c) < X is always true"),
12944 WARN_STRICT_OVERFLOW_ALL
);
12945 return constant_boolean_node (1, type
);
12948 /* Convert X + c <= X and X - c >= X to false for integers. */
12949 if (code
== LE_EXPR
12950 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12951 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12954 fold_overflow_warning (("assuming signed overflow does "
12955 "not occur when assuming that "
12956 "(X + c) <= X is always false"),
12957 WARN_STRICT_OVERFLOW_ALL
);
12958 return constant_boolean_node (0, type
);
12961 if (code
== GE_EXPR
12962 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12963 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12966 fold_overflow_warning (("assuming signed overflow does "
12967 "not occur when assuming that "
12968 "(X - c) >= X is always false"),
12969 WARN_STRICT_OVERFLOW_ALL
);
12970 return constant_boolean_node (0, type
);
12975 /* Comparisons with the highest or lowest possible integer of
12976 the specified precision will have known values. */
12978 tree arg1_type
= TREE_TYPE (arg1
);
12979 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12981 if (TREE_CODE (arg1
) == INTEGER_CST
12982 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12984 wide_int max
= wi::max_value (arg1_type
);
12985 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12986 wide_int min
= wi::min_value (arg1_type
);
12988 if (wi::eq_p (arg1
, max
))
12992 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12995 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12998 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13001 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13003 /* The GE_EXPR and LT_EXPR cases above are not normally
13004 reached because of previous transformations. */
13009 else if (wi::eq_p (arg1
, max
- 1))
13013 arg1
= const_binop (PLUS_EXPR
, arg1
,
13014 build_int_cst (TREE_TYPE (arg1
), 1));
13015 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13016 fold_convert_loc (loc
,
13017 TREE_TYPE (arg1
), arg0
),
13020 arg1
= const_binop (PLUS_EXPR
, arg1
,
13021 build_int_cst (TREE_TYPE (arg1
), 1));
13022 return fold_build2_loc (loc
, NE_EXPR
, type
,
13023 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13029 else if (wi::eq_p (arg1
, min
))
13033 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13036 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13039 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13042 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13047 else if (wi::eq_p (arg1
, min
+ 1))
13051 arg1
= const_binop (MINUS_EXPR
, arg1
,
13052 build_int_cst (TREE_TYPE (arg1
), 1));
13053 return fold_build2_loc (loc
, NE_EXPR
, type
,
13054 fold_convert_loc (loc
,
13055 TREE_TYPE (arg1
), arg0
),
13058 arg1
= const_binop (MINUS_EXPR
, arg1
,
13059 build_int_cst (TREE_TYPE (arg1
), 1));
13060 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13061 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13068 else if (wi::eq_p (arg1
, signed_max
)
13069 && TYPE_UNSIGNED (arg1_type
)
13070 /* We will flip the signedness of the comparison operator
13071 associated with the mode of arg1, so the sign bit is
13072 specified by this mode. Check that arg1 is the signed
13073 max associated with this sign bit. */
13074 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13075 /* signed_type does not work on pointer types. */
13076 && INTEGRAL_TYPE_P (arg1_type
))
13078 /* The following case also applies to X < signed_max+1
13079 and X >= signed_max+1 because previous transformations. */
13080 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13082 tree st
= signed_type_for (arg1_type
);
13083 return fold_build2_loc (loc
,
13084 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13085 type
, fold_convert_loc (loc
, st
, arg0
),
13086 build_int_cst (st
, 0));
13092 /* If we are comparing an ABS_EXPR with a constant, we can
13093 convert all the cases into explicit comparisons, but they may
13094 well not be faster than doing the ABS and one comparison.
13095 But ABS (X) <= C is a range comparison, which becomes a subtraction
13096 and a comparison, and is probably faster. */
13097 if (code
== LE_EXPR
13098 && TREE_CODE (arg1
) == INTEGER_CST
13099 && TREE_CODE (arg0
) == ABS_EXPR
13100 && ! TREE_SIDE_EFFECTS (arg0
)
13101 && (0 != (tem
= negate_expr (arg1
)))
13102 && TREE_CODE (tem
) == INTEGER_CST
13103 && !TREE_OVERFLOW (tem
))
13104 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13105 build2 (GE_EXPR
, type
,
13106 TREE_OPERAND (arg0
, 0), tem
),
13107 build2 (LE_EXPR
, type
,
13108 TREE_OPERAND (arg0
, 0), arg1
));
13110 /* Convert ABS_EXPR<x> >= 0 to true. */
13111 strict_overflow_p
= false;
13112 if (code
== GE_EXPR
13113 && (integer_zerop (arg1
)
13114 || (! HONOR_NANS (arg0
)
13115 && real_zerop (arg1
)))
13116 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13118 if (strict_overflow_p
)
13119 fold_overflow_warning (("assuming signed overflow does not occur "
13120 "when simplifying comparison of "
13121 "absolute value and zero"),
13122 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13123 return omit_one_operand_loc (loc
, type
,
13124 constant_boolean_node (true, type
),
13128 /* Convert ABS_EXPR<x> < 0 to false. */
13129 strict_overflow_p
= false;
13130 if (code
== LT_EXPR
13131 && (integer_zerop (arg1
) || real_zerop (arg1
))
13132 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13134 if (strict_overflow_p
)
13135 fold_overflow_warning (("assuming signed overflow does not occur "
13136 "when simplifying comparison of "
13137 "absolute value and zero"),
13138 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13139 return omit_one_operand_loc (loc
, type
,
13140 constant_boolean_node (false, type
),
13144 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13145 and similarly for >= into !=. */
13146 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13147 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13148 && TREE_CODE (arg1
) == LSHIFT_EXPR
13149 && integer_onep (TREE_OPERAND (arg1
, 0)))
13150 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13151 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13152 TREE_OPERAND (arg1
, 1)),
13153 build_zero_cst (TREE_TYPE (arg0
)));
13155 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13156 otherwise Y might be >= # of bits in X's type and thus e.g.
13157 (unsigned char) (1 << Y) for Y 15 might be 0.
13158 If the cast is widening, then 1 << Y should have unsigned type,
13159 otherwise if Y is number of bits in the signed shift type minus 1,
13160 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13161 31 might be 0xffffffff80000000. */
13162 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13163 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13164 && CONVERT_EXPR_P (arg1
)
13165 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13166 && (element_precision (TREE_TYPE (arg1
))
13167 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13168 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13169 || (element_precision (TREE_TYPE (arg1
))
13170 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13171 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13173 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13174 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13175 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13176 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13177 build_zero_cst (TREE_TYPE (arg0
)));
13182 case UNORDERED_EXPR
:
13190 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13192 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13193 if (t1
!= NULL_TREE
)
13197 /* If the first operand is NaN, the result is constant. */
13198 if (TREE_CODE (arg0
) == REAL_CST
13199 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13200 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13202 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13203 ? integer_zero_node
13204 : integer_one_node
;
13205 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13208 /* If the second operand is NaN, the result is constant. */
13209 if (TREE_CODE (arg1
) == REAL_CST
13210 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13211 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13213 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13214 ? integer_zero_node
13215 : integer_one_node
;
13216 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13219 /* Simplify unordered comparison of something with itself. */
13220 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13221 && operand_equal_p (arg0
, arg1
, 0))
13222 return constant_boolean_node (1, type
);
13224 if (code
== LTGT_EXPR
13225 && !flag_trapping_math
13226 && operand_equal_p (arg0
, arg1
, 0))
13227 return constant_boolean_node (0, type
);
13229 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13231 tree targ0
= strip_float_extensions (arg0
);
13232 tree targ1
= strip_float_extensions (arg1
);
13233 tree newtype
= TREE_TYPE (targ0
);
13235 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13236 newtype
= TREE_TYPE (targ1
);
13238 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13239 return fold_build2_loc (loc
, code
, type
,
13240 fold_convert_loc (loc
, newtype
, targ0
),
13241 fold_convert_loc (loc
, newtype
, targ1
));
13246 case COMPOUND_EXPR
:
13247 /* When pedantic, a compound expression can be neither an lvalue
13248 nor an integer constant expression. */
13249 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13251 /* Don't let (0, 0) be null pointer constant. */
13252 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13253 : fold_convert_loc (loc
, type
, arg1
);
13254 return pedantic_non_lvalue_loc (loc
, tem
);
13257 /* An ASSERT_EXPR should never be passed to fold_binary. */
13258 gcc_unreachable ();
13262 } /* switch (code) */
13265 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13266 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13270 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13272 switch (TREE_CODE (*tp
))
13278 *walk_subtrees
= 0;
13280 /* ... fall through ... */
13287 /* Return whether the sub-tree ST contains a label which is accessible from
13288 outside the sub-tree. */
13291 contains_label_p (tree st
)
13294 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13297 /* Fold a ternary expression of code CODE and type TYPE with operands
13298 OP0, OP1, and OP2. Return the folded expression if folding is
13299 successful. Otherwise, return NULL_TREE. */
13302 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13303 tree op0
, tree op1
, tree op2
)
13306 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13307 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13309 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13310 && TREE_CODE_LENGTH (code
) == 3);
13312 /* If this is a commutative operation, and OP0 is a constant, move it
13313 to OP1 to reduce the number of tests below. */
13314 if (commutative_ternary_tree_code (code
)
13315 && tree_swap_operands_p (op0
, op1
, true))
13316 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13318 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13322 /* Strip any conversions that don't change the mode. This is safe
13323 for every expression, except for a comparison expression because
13324 its signedness is derived from its operands. So, in the latter
13325 case, only strip conversions that don't change the signedness.
13327 Note that this is done as an internal manipulation within the
13328 constant folder, in order to find the simplest representation of
13329 the arguments so that their form can be studied. In any cases,
13330 the appropriate type conversions should be put back in the tree
13331 that will get out of the constant folder. */
13352 case COMPONENT_REF
:
13353 if (TREE_CODE (arg0
) == CONSTRUCTOR
13354 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13356 unsigned HOST_WIDE_INT idx
;
13358 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13365 case VEC_COND_EXPR
:
13366 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13367 so all simple results must be passed through pedantic_non_lvalue. */
13368 if (TREE_CODE (arg0
) == INTEGER_CST
)
13370 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13371 tem
= integer_zerop (arg0
) ? op2
: op1
;
13372 /* Only optimize constant conditions when the selected branch
13373 has the same type as the COND_EXPR. This avoids optimizing
13374 away "c ? x : throw", where the throw has a void type.
13375 Avoid throwing away that operand which contains label. */
13376 if ((!TREE_SIDE_EFFECTS (unused_op
)
13377 || !contains_label_p (unused_op
))
13378 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13379 || VOID_TYPE_P (type
)))
13380 return pedantic_non_lvalue_loc (loc
, tem
);
13383 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13385 if ((TREE_CODE (arg1
) == VECTOR_CST
13386 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13387 && (TREE_CODE (arg2
) == VECTOR_CST
13388 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13390 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13391 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13392 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13393 for (i
= 0; i
< nelts
; i
++)
13395 tree val
= VECTOR_CST_ELT (arg0
, i
);
13396 if (integer_all_onesp (val
))
13398 else if (integer_zerop (val
))
13399 sel
[i
] = nelts
+ i
;
13400 else /* Currently unreachable. */
13403 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13404 if (t
!= NULL_TREE
)
13409 /* If we have A op B ? A : C, we may be able to convert this to a
13410 simpler expression, depending on the operation and the values
13411 of B and C. Signed zeros prevent all of these transformations,
13412 for reasons given above each one.
13414 Also try swapping the arguments and inverting the conditional. */
13415 if (COMPARISON_CLASS_P (arg0
)
13416 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13417 arg1
, TREE_OPERAND (arg0
, 1))
13418 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
13420 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13425 if (COMPARISON_CLASS_P (arg0
)
13426 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13428 TREE_OPERAND (arg0
, 1))
13429 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
13431 location_t loc0
= expr_location_or (arg0
, loc
);
13432 tem
= fold_invert_truthvalue (loc0
, arg0
);
13433 if (tem
&& COMPARISON_CLASS_P (tem
))
13435 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13441 /* If the second operand is simpler than the third, swap them
13442 since that produces better jump optimization results. */
13443 if (truth_value_p (TREE_CODE (arg0
))
13444 && tree_swap_operands_p (op1
, op2
, false))
13446 location_t loc0
= expr_location_or (arg0
, loc
);
13447 /* See if this can be inverted. If it can't, possibly because
13448 it was a floating-point inequality comparison, don't do
13450 tem
= fold_invert_truthvalue (loc0
, arg0
);
13452 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13455 /* Convert A ? 1 : 0 to simply A. */
13456 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13457 : (integer_onep (op1
)
13458 && !VECTOR_TYPE_P (type
)))
13459 && integer_zerop (op2
)
13460 /* If we try to convert OP0 to our type, the
13461 call to fold will try to move the conversion inside
13462 a COND, which will recurse. In that case, the COND_EXPR
13463 is probably the best choice, so leave it alone. */
13464 && type
== TREE_TYPE (arg0
))
13465 return pedantic_non_lvalue_loc (loc
, arg0
);
13467 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13468 over COND_EXPR in cases such as floating point comparisons. */
13469 if (integer_zerop (op1
)
13470 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13471 : (integer_onep (op2
)
13472 && !VECTOR_TYPE_P (type
)))
13473 && truth_value_p (TREE_CODE (arg0
)))
13474 return pedantic_non_lvalue_loc (loc
,
13475 fold_convert_loc (loc
, type
,
13476 invert_truthvalue_loc (loc
,
13479 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13480 if (TREE_CODE (arg0
) == LT_EXPR
13481 && integer_zerop (TREE_OPERAND (arg0
, 1))
13482 && integer_zerop (op2
)
13483 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13485 /* sign_bit_p looks through both zero and sign extensions,
13486 but for this optimization only sign extensions are
13488 tree tem2
= TREE_OPERAND (arg0
, 0);
13489 while (tem
!= tem2
)
13491 if (TREE_CODE (tem2
) != NOP_EXPR
13492 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13497 tem2
= TREE_OPERAND (tem2
, 0);
13499 /* sign_bit_p only checks ARG1 bits within A's precision.
13500 If <sign bit of A> has wider type than A, bits outside
13501 of A's precision in <sign bit of A> need to be checked.
13502 If they are all 0, this optimization needs to be done
13503 in unsigned A's type, if they are all 1 in signed A's type,
13504 otherwise this can't be done. */
13506 && TYPE_PRECISION (TREE_TYPE (tem
))
13507 < TYPE_PRECISION (TREE_TYPE (arg1
))
13508 && TYPE_PRECISION (TREE_TYPE (tem
))
13509 < TYPE_PRECISION (type
))
13511 int inner_width
, outer_width
;
13514 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13515 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13516 if (outer_width
> TYPE_PRECISION (type
))
13517 outer_width
= TYPE_PRECISION (type
);
13519 wide_int mask
= wi::shifted_mask
13520 (inner_width
, outer_width
- inner_width
, false,
13521 TYPE_PRECISION (TREE_TYPE (arg1
)));
13523 wide_int common
= mask
& arg1
;
13524 if (common
== mask
)
13526 tem_type
= signed_type_for (TREE_TYPE (tem
));
13527 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13529 else if (common
== 0)
13531 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13532 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13540 fold_convert_loc (loc
, type
,
13541 fold_build2_loc (loc
, BIT_AND_EXPR
,
13542 TREE_TYPE (tem
), tem
,
13543 fold_convert_loc (loc
,
13548 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13549 already handled above. */
13550 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13551 && integer_onep (TREE_OPERAND (arg0
, 1))
13552 && integer_zerop (op2
)
13553 && integer_pow2p (arg1
))
13555 tree tem
= TREE_OPERAND (arg0
, 0);
13557 if (TREE_CODE (tem
) == RSHIFT_EXPR
13558 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13559 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13560 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13561 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13562 TREE_OPERAND (tem
, 0), arg1
);
13565 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13566 is probably obsolete because the first operand should be a
13567 truth value (that's why we have the two cases above), but let's
13568 leave it in until we can confirm this for all front-ends. */
13569 if (integer_zerop (op2
)
13570 && TREE_CODE (arg0
) == NE_EXPR
13571 && integer_zerop (TREE_OPERAND (arg0
, 1))
13572 && integer_pow2p (arg1
)
13573 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13574 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13575 arg1
, OEP_ONLY_CONST
))
13576 return pedantic_non_lvalue_loc (loc
,
13577 fold_convert_loc (loc
, type
,
13578 TREE_OPERAND (arg0
, 0)));
13580 /* Disable the transformations below for vectors, since
13581 fold_binary_op_with_conditional_arg may undo them immediately,
13582 yielding an infinite loop. */
13583 if (code
== VEC_COND_EXPR
)
13586 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13587 if (integer_zerop (op2
)
13588 && truth_value_p (TREE_CODE (arg0
))
13589 && truth_value_p (TREE_CODE (arg1
))
13590 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13591 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13592 : TRUTH_ANDIF_EXPR
,
13593 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
13595 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13596 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13597 && truth_value_p (TREE_CODE (arg0
))
13598 && truth_value_p (TREE_CODE (arg1
))
13599 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13601 location_t loc0
= expr_location_or (arg0
, loc
);
13602 /* Only perform transformation if ARG0 is easily inverted. */
13603 tem
= fold_invert_truthvalue (loc0
, arg0
);
13605 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13608 type
, fold_convert_loc (loc
, type
, tem
),
13612 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13613 if (integer_zerop (arg1
)
13614 && truth_value_p (TREE_CODE (arg0
))
13615 && truth_value_p (TREE_CODE (op2
))
13616 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13618 location_t loc0
= expr_location_or (arg0
, loc
);
13619 /* Only perform transformation if ARG0 is easily inverted. */
13620 tem
= fold_invert_truthvalue (loc0
, arg0
);
13622 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13623 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13624 type
, fold_convert_loc (loc
, type
, tem
),
13628 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13629 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13630 && truth_value_p (TREE_CODE (arg0
))
13631 && truth_value_p (TREE_CODE (op2
))
13632 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13633 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13634 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13635 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13640 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13641 of fold_ternary on them. */
13642 gcc_unreachable ();
13644 case BIT_FIELD_REF
:
13645 if ((TREE_CODE (arg0
) == VECTOR_CST
13646 || (TREE_CODE (arg0
) == CONSTRUCTOR
13647 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
13648 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13649 || (TREE_CODE (type
) == VECTOR_TYPE
13650 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
13652 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13653 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
13654 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13655 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13658 && (idx
% width
) == 0
13659 && (n
% width
) == 0
13660 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13665 if (TREE_CODE (arg0
) == VECTOR_CST
)
13668 return VECTOR_CST_ELT (arg0
, idx
);
13670 tree
*vals
= XALLOCAVEC (tree
, n
);
13671 for (unsigned i
= 0; i
< n
; ++i
)
13672 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
13673 return build_vector (type
, vals
);
13676 /* Constructor elements can be subvectors. */
13677 unsigned HOST_WIDE_INT k
= 1;
13678 if (CONSTRUCTOR_NELTS (arg0
) != 0)
13680 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
13681 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
13682 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
13685 /* We keep an exact subset of the constructor elements. */
13686 if ((idx
% k
) == 0 && (n
% k
) == 0)
13688 if (CONSTRUCTOR_NELTS (arg0
) == 0)
13689 return build_constructor (type
, NULL
);
13694 if (idx
< CONSTRUCTOR_NELTS (arg0
))
13695 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13696 return build_zero_cst (type
);
13699 vec
<constructor_elt
, va_gc
> *vals
;
13700 vec_alloc (vals
, n
);
13701 for (unsigned i
= 0;
13702 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
13704 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
13706 (arg0
, idx
+ i
)->value
);
13707 return build_constructor (type
, vals
);
13709 /* The bitfield references a single constructor element. */
13710 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
13712 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
13713 return build_zero_cst (type
);
13715 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
13717 return fold_build3_loc (loc
, code
, type
,
13718 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
13719 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
13724 /* A bit-field-ref that referenced the full argument can be stripped. */
13725 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13726 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
13727 && integer_zerop (op2
))
13728 return fold_convert_loc (loc
, type
, arg0
);
13730 /* On constants we can use native encode/interpret to constant
13731 fold (nearly) all BIT_FIELD_REFs. */
13732 if (CONSTANT_CLASS_P (arg0
)
13733 && can_native_interpret_type_p (type
)
13734 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
13735 /* This limitation should not be necessary, we just need to
13736 round this up to mode size. */
13737 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
13738 /* Need bit-shifting of the buffer to relax the following. */
13739 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
13741 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13742 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13743 unsigned HOST_WIDE_INT clen
;
13744 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
13745 /* ??? We cannot tell native_encode_expr to start at
13746 some random byte only. So limit us to a reasonable amount
13750 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
13751 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
13753 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
13755 tree v
= native_interpret_expr (type
,
13756 b
+ bitpos
/ BITS_PER_UNIT
,
13757 bitsize
/ BITS_PER_UNIT
);
13767 /* For integers we can decompose the FMA if possible. */
13768 if (TREE_CODE (arg0
) == INTEGER_CST
13769 && TREE_CODE (arg1
) == INTEGER_CST
)
13770 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13771 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13772 if (integer_zerop (arg2
))
13773 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13775 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13777 case VEC_PERM_EXPR
:
13778 if (TREE_CODE (arg2
) == VECTOR_CST
)
13780 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
13781 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
13782 unsigned char *sel2
= sel
+ nelts
;
13783 bool need_mask_canon
= false;
13784 bool need_mask_canon2
= false;
13785 bool all_in_vec0
= true;
13786 bool all_in_vec1
= true;
13787 bool maybe_identity
= true;
13788 bool single_arg
= (op0
== op1
);
13789 bool changed
= false;
13791 mask2
= 2 * nelts
- 1;
13792 mask
= single_arg
? (nelts
- 1) : mask2
;
13793 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
13794 for (i
= 0; i
< nelts
; i
++)
13796 tree val
= VECTOR_CST_ELT (arg2
, i
);
13797 if (TREE_CODE (val
) != INTEGER_CST
)
13800 /* Make sure that the perm value is in an acceptable
13803 need_mask_canon
|= wi::gtu_p (t
, mask
);
13804 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
13805 sel
[i
] = t
.to_uhwi () & mask
;
13806 sel2
[i
] = t
.to_uhwi () & mask2
;
13808 if (sel
[i
] < nelts
)
13809 all_in_vec1
= false;
13811 all_in_vec0
= false;
13813 if ((sel
[i
] & (nelts
-1)) != i
)
13814 maybe_identity
= false;
13817 if (maybe_identity
)
13827 else if (all_in_vec1
)
13830 for (i
= 0; i
< nelts
; i
++)
13832 need_mask_canon
= true;
13835 if ((TREE_CODE (op0
) == VECTOR_CST
13836 || TREE_CODE (op0
) == CONSTRUCTOR
)
13837 && (TREE_CODE (op1
) == VECTOR_CST
13838 || TREE_CODE (op1
) == CONSTRUCTOR
))
13840 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
13841 if (t
!= NULL_TREE
)
13845 if (op0
== op1
&& !single_arg
)
13848 /* Some targets are deficient and fail to expand a single
13849 argument permutation while still allowing an equivalent
13850 2-argument version. */
13851 if (need_mask_canon
&& arg2
== op2
13852 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
13853 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
13855 need_mask_canon
= need_mask_canon2
;
13859 if (need_mask_canon
&& arg2
== op2
)
13861 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
13862 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
13863 for (i
= 0; i
< nelts
; i
++)
13864 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
13865 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
13870 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
13876 } /* switch (code) */
13879 /* Perform constant folding and related simplification of EXPR.
13880 The related simplifications include x*1 => x, x*0 => 0, etc.,
13881 and application of the associative law.
13882 NOP_EXPR conversions may be removed freely (as long as we
13883 are careful not to change the type of the overall expression).
13884 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13885 but we can constant-fold them if they have constant operands. */
13887 #ifdef ENABLE_FOLD_CHECKING
13888 # define fold(x) fold_1 (x)
13889 static tree
fold_1 (tree
);
13895 const tree t
= expr
;
13896 enum tree_code code
= TREE_CODE (t
);
13897 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13899 location_t loc
= EXPR_LOCATION (expr
);
13901 /* Return right away if a constant. */
13902 if (kind
== tcc_constant
)
13905 /* CALL_EXPR-like objects with variable numbers of operands are
13906 treated specially. */
13907 if (kind
== tcc_vl_exp
)
13909 if (code
== CALL_EXPR
)
13911 tem
= fold_call_expr (loc
, expr
, false);
13912 return tem
? tem
: expr
;
13917 if (IS_EXPR_CODE_CLASS (kind
))
13919 tree type
= TREE_TYPE (t
);
13920 tree op0
, op1
, op2
;
13922 switch (TREE_CODE_LENGTH (code
))
13925 op0
= TREE_OPERAND (t
, 0);
13926 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13927 return tem
? tem
: expr
;
13929 op0
= TREE_OPERAND (t
, 0);
13930 op1
= TREE_OPERAND (t
, 1);
13931 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13932 return tem
? tem
: expr
;
13934 op0
= TREE_OPERAND (t
, 0);
13935 op1
= TREE_OPERAND (t
, 1);
13936 op2
= TREE_OPERAND (t
, 2);
13937 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13938 return tem
? tem
: expr
;
13948 tree op0
= TREE_OPERAND (t
, 0);
13949 tree op1
= TREE_OPERAND (t
, 1);
13951 if (TREE_CODE (op1
) == INTEGER_CST
13952 && TREE_CODE (op0
) == CONSTRUCTOR
13953 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13955 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
13956 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
13957 unsigned HOST_WIDE_INT begin
= 0;
13959 /* Find a matching index by means of a binary search. */
13960 while (begin
!= end
)
13962 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13963 tree index
= (*elts
)[middle
].index
;
13965 if (TREE_CODE (index
) == INTEGER_CST
13966 && tree_int_cst_lt (index
, op1
))
13967 begin
= middle
+ 1;
13968 else if (TREE_CODE (index
) == INTEGER_CST
13969 && tree_int_cst_lt (op1
, index
))
13971 else if (TREE_CODE (index
) == RANGE_EXPR
13972 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13973 begin
= middle
+ 1;
13974 else if (TREE_CODE (index
) == RANGE_EXPR
13975 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13978 return (*elts
)[middle
].value
;
13985 /* Return a VECTOR_CST if possible. */
13988 tree type
= TREE_TYPE (t
);
13989 if (TREE_CODE (type
) != VECTOR_TYPE
)
13992 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13993 unsigned HOST_WIDE_INT idx
, pos
= 0;
13996 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13998 if (!CONSTANT_CLASS_P (value
))
14000 if (TREE_CODE (value
) == VECTOR_CST
)
14002 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14003 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14006 vec
[pos
++] = value
;
14008 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14009 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14011 return build_vector (type
, vec
);
14015 return fold (DECL_INITIAL (t
));
14019 } /* switch (code) */
14022 #ifdef ENABLE_FOLD_CHECKING
14025 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14026 hash_table
<pointer_hash
<const tree_node
> > *);
14027 static void fold_check_failed (const_tree
, const_tree
);
14028 void print_fold_checksum (const_tree
);
14030 /* When --enable-checking=fold, compute a digest of expr before
14031 and after actual fold call to see if fold did not accidentally
14032 change original expr. */
14038 struct md5_ctx ctx
;
14039 unsigned char checksum_before
[16], checksum_after
[16];
14040 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14042 md5_init_ctx (&ctx
);
14043 fold_checksum_tree (expr
, &ctx
, &ht
);
14044 md5_finish_ctx (&ctx
, checksum_before
);
14047 ret
= fold_1 (expr
);
14049 md5_init_ctx (&ctx
);
14050 fold_checksum_tree (expr
, &ctx
, &ht
);
14051 md5_finish_ctx (&ctx
, checksum_after
);
14053 if (memcmp (checksum_before
, checksum_after
, 16))
14054 fold_check_failed (expr
, ret
);
14060 print_fold_checksum (const_tree expr
)
14062 struct md5_ctx ctx
;
14063 unsigned char checksum
[16], cnt
;
14064 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14066 md5_init_ctx (&ctx
);
14067 fold_checksum_tree (expr
, &ctx
, &ht
);
14068 md5_finish_ctx (&ctx
, checksum
);
14069 for (cnt
= 0; cnt
< 16; ++cnt
)
14070 fprintf (stderr
, "%02x", checksum
[cnt
]);
14071 putc ('\n', stderr
);
14075 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14077 internal_error ("fold check: original tree changed by fold");
14081 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14082 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14084 const tree_node
**slot
;
14085 enum tree_code code
;
14086 union tree_node buf
;
14092 slot
= ht
->find_slot (expr
, INSERT
);
14096 code
= TREE_CODE (expr
);
14097 if (TREE_CODE_CLASS (code
) == tcc_declaration
14098 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
14100 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14101 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14102 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14103 buf
.decl_with_vis
.symtab_node
= NULL
;
14104 expr
= (tree
) &buf
;
14106 else if (TREE_CODE_CLASS (code
) == tcc_type
14107 && (TYPE_POINTER_TO (expr
)
14108 || TYPE_REFERENCE_TO (expr
)
14109 || TYPE_CACHED_VALUES_P (expr
)
14110 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14111 || TYPE_NEXT_VARIANT (expr
)))
14113 /* Allow these fields to be modified. */
14115 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14116 expr
= tmp
= (tree
) &buf
;
14117 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14118 TYPE_POINTER_TO (tmp
) = NULL
;
14119 TYPE_REFERENCE_TO (tmp
) = NULL
;
14120 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14121 if (TYPE_CACHED_VALUES_P (tmp
))
14123 TYPE_CACHED_VALUES_P (tmp
) = 0;
14124 TYPE_CACHED_VALUES (tmp
) = NULL
;
14127 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14128 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14129 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14130 if (TREE_CODE_CLASS (code
) != tcc_type
14131 && TREE_CODE_CLASS (code
) != tcc_declaration
14132 && code
!= TREE_LIST
14133 && code
!= SSA_NAME
14134 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14135 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14136 switch (TREE_CODE_CLASS (code
))
14142 md5_process_bytes (TREE_STRING_POINTER (expr
),
14143 TREE_STRING_LENGTH (expr
), ctx
);
14146 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14147 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14150 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14151 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14157 case tcc_exceptional
:
14161 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14162 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14163 expr
= TREE_CHAIN (expr
);
14164 goto recursive_label
;
14167 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14168 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14174 case tcc_expression
:
14175 case tcc_reference
:
14176 case tcc_comparison
:
14179 case tcc_statement
:
14181 len
= TREE_OPERAND_LENGTH (expr
);
14182 for (i
= 0; i
< len
; ++i
)
14183 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14185 case tcc_declaration
:
14186 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14187 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14188 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14190 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14191 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14192 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14193 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14194 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14197 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14199 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14201 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14202 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14204 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14208 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14209 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14210 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14211 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14212 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14213 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14214 if (INTEGRAL_TYPE_P (expr
)
14215 || SCALAR_FLOAT_TYPE_P (expr
))
14217 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14218 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14220 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14221 if (TREE_CODE (expr
) == RECORD_TYPE
14222 || TREE_CODE (expr
) == UNION_TYPE
14223 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14224 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14225 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14232 /* Helper function for outputting the checksum of a tree T. When
14233 debugging with gdb, you can "define mynext" to be "next" followed
14234 by "call debug_fold_checksum (op0)", then just trace down till the
14237 DEBUG_FUNCTION
void
14238 debug_fold_checksum (const_tree t
)
14241 unsigned char checksum
[16];
14242 struct md5_ctx ctx
;
14243 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14245 md5_init_ctx (&ctx
);
14246 fold_checksum_tree (t
, &ctx
, &ht
);
14247 md5_finish_ctx (&ctx
, checksum
);
14250 for (i
= 0; i
< 16; i
++)
14251 fprintf (stderr
, "%d ", checksum
[i
]);
14253 fprintf (stderr
, "\n");
14258 /* Fold a unary tree expression with code CODE of type TYPE with an
14259 operand OP0. LOC is the location of the resulting expression.
14260 Return a folded expression if successful. Otherwise, return a tree
14261 expression with code CODE of type TYPE with an operand OP0. */
14264 fold_build1_stat_loc (location_t loc
,
14265 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14268 #ifdef ENABLE_FOLD_CHECKING
14269 unsigned char checksum_before
[16], checksum_after
[16];
14270 struct md5_ctx ctx
;
14271 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14273 md5_init_ctx (&ctx
);
14274 fold_checksum_tree (op0
, &ctx
, &ht
);
14275 md5_finish_ctx (&ctx
, checksum_before
);
14279 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14281 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14283 #ifdef ENABLE_FOLD_CHECKING
14284 md5_init_ctx (&ctx
);
14285 fold_checksum_tree (op0
, &ctx
, &ht
);
14286 md5_finish_ctx (&ctx
, checksum_after
);
14288 if (memcmp (checksum_before
, checksum_after
, 16))
14289 fold_check_failed (op0
, tem
);
14294 /* Fold a binary tree expression with code CODE of type TYPE with
14295 operands OP0 and OP1. LOC is the location of the resulting
14296 expression. Return a folded expression if successful. Otherwise,
14297 return a tree expression with code CODE of type TYPE with operands
14301 fold_build2_stat_loc (location_t loc
,
14302 enum tree_code code
, tree type
, tree op0
, tree op1
14306 #ifdef ENABLE_FOLD_CHECKING
14307 unsigned char checksum_before_op0
[16],
14308 checksum_before_op1
[16],
14309 checksum_after_op0
[16],
14310 checksum_after_op1
[16];
14311 struct md5_ctx ctx
;
14312 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14314 md5_init_ctx (&ctx
);
14315 fold_checksum_tree (op0
, &ctx
, &ht
);
14316 md5_finish_ctx (&ctx
, checksum_before_op0
);
14319 md5_init_ctx (&ctx
);
14320 fold_checksum_tree (op1
, &ctx
, &ht
);
14321 md5_finish_ctx (&ctx
, checksum_before_op1
);
14325 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14327 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14329 #ifdef ENABLE_FOLD_CHECKING
14330 md5_init_ctx (&ctx
);
14331 fold_checksum_tree (op0
, &ctx
, &ht
);
14332 md5_finish_ctx (&ctx
, checksum_after_op0
);
14335 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14336 fold_check_failed (op0
, tem
);
14338 md5_init_ctx (&ctx
);
14339 fold_checksum_tree (op1
, &ctx
, &ht
);
14340 md5_finish_ctx (&ctx
, checksum_after_op1
);
14342 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14343 fold_check_failed (op1
, tem
);
14348 /* Fold a ternary tree expression with code CODE of type TYPE with
14349 operands OP0, OP1, and OP2. Return a folded expression if
14350 successful. Otherwise, return a tree expression with code CODE of
14351 type TYPE with operands OP0, OP1, and OP2. */
14354 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14355 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14358 #ifdef ENABLE_FOLD_CHECKING
14359 unsigned char checksum_before_op0
[16],
14360 checksum_before_op1
[16],
14361 checksum_before_op2
[16],
14362 checksum_after_op0
[16],
14363 checksum_after_op1
[16],
14364 checksum_after_op2
[16];
14365 struct md5_ctx ctx
;
14366 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14368 md5_init_ctx (&ctx
);
14369 fold_checksum_tree (op0
, &ctx
, &ht
);
14370 md5_finish_ctx (&ctx
, checksum_before_op0
);
14373 md5_init_ctx (&ctx
);
14374 fold_checksum_tree (op1
, &ctx
, &ht
);
14375 md5_finish_ctx (&ctx
, checksum_before_op1
);
14378 md5_init_ctx (&ctx
);
14379 fold_checksum_tree (op2
, &ctx
, &ht
);
14380 md5_finish_ctx (&ctx
, checksum_before_op2
);
14384 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14385 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14387 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14389 #ifdef ENABLE_FOLD_CHECKING
14390 md5_init_ctx (&ctx
);
14391 fold_checksum_tree (op0
, &ctx
, &ht
);
14392 md5_finish_ctx (&ctx
, checksum_after_op0
);
14395 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14396 fold_check_failed (op0
, tem
);
14398 md5_init_ctx (&ctx
);
14399 fold_checksum_tree (op1
, &ctx
, &ht
);
14400 md5_finish_ctx (&ctx
, checksum_after_op1
);
14403 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14404 fold_check_failed (op1
, tem
);
14406 md5_init_ctx (&ctx
);
14407 fold_checksum_tree (op2
, &ctx
, &ht
);
14408 md5_finish_ctx (&ctx
, checksum_after_op2
);
14410 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14411 fold_check_failed (op2
, tem
);
14416 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14417 arguments in ARGARRAY, and a null static chain.
14418 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14419 of type TYPE from the given operands as constructed by build_call_array. */
14422 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14423 int nargs
, tree
*argarray
)
14426 #ifdef ENABLE_FOLD_CHECKING
14427 unsigned char checksum_before_fn
[16],
14428 checksum_before_arglist
[16],
14429 checksum_after_fn
[16],
14430 checksum_after_arglist
[16];
14431 struct md5_ctx ctx
;
14432 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14435 md5_init_ctx (&ctx
);
14436 fold_checksum_tree (fn
, &ctx
, &ht
);
14437 md5_finish_ctx (&ctx
, checksum_before_fn
);
14440 md5_init_ctx (&ctx
);
14441 for (i
= 0; i
< nargs
; i
++)
14442 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14443 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14447 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14449 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14451 #ifdef ENABLE_FOLD_CHECKING
14452 md5_init_ctx (&ctx
);
14453 fold_checksum_tree (fn
, &ctx
, &ht
);
14454 md5_finish_ctx (&ctx
, checksum_after_fn
);
14457 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14458 fold_check_failed (fn
, tem
);
14460 md5_init_ctx (&ctx
);
14461 for (i
= 0; i
< nargs
; i
++)
14462 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14463 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14465 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14466 fold_check_failed (NULL_TREE
, tem
);
14471 /* Perform constant folding and related simplification of initializer
14472 expression EXPR. These behave identically to "fold_buildN" but ignore
14473 potential run-time traps and exceptions that fold must preserve. */
14475 #define START_FOLD_INIT \
14476 int saved_signaling_nans = flag_signaling_nans;\
14477 int saved_trapping_math = flag_trapping_math;\
14478 int saved_rounding_math = flag_rounding_math;\
14479 int saved_trapv = flag_trapv;\
14480 int saved_folding_initializer = folding_initializer;\
14481 flag_signaling_nans = 0;\
14482 flag_trapping_math = 0;\
14483 flag_rounding_math = 0;\
14485 folding_initializer = 1;
14487 #define END_FOLD_INIT \
14488 flag_signaling_nans = saved_signaling_nans;\
14489 flag_trapping_math = saved_trapping_math;\
14490 flag_rounding_math = saved_rounding_math;\
14491 flag_trapv = saved_trapv;\
14492 folding_initializer = saved_folding_initializer;
14495 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14496 tree type
, tree op
)
14501 result
= fold_build1_loc (loc
, code
, type
, op
);
14508 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14509 tree type
, tree op0
, tree op1
)
14514 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14521 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14522 int nargs
, tree
*argarray
)
14527 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14533 #undef START_FOLD_INIT
14534 #undef END_FOLD_INIT
14536 /* Determine if first argument is a multiple of second argument. Return 0 if
14537 it is not, or we cannot easily determined it to be.
14539 An example of the sort of thing we care about (at this point; this routine
14540 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14541 fold cases do now) is discovering that
14543 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14549 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14551 This code also handles discovering that
14553 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14555 is a multiple of 8 so we don't have to worry about dealing with a
14556 possible remainder.
14558 Note that we *look* inside a SAVE_EXPR only to determine how it was
14559 calculated; it is not safe for fold to do much of anything else with the
14560 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14561 at run time. For example, the latter example above *cannot* be implemented
14562 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14563 evaluation time of the original SAVE_EXPR is not necessarily the same at
14564 the time the new expression is evaluated. The only optimization of this
14565 sort that would be valid is changing
14567 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14571 SAVE_EXPR (I) * SAVE_EXPR (J)
14573 (where the same SAVE_EXPR (J) is used in the original and the
14574 transformed version). */
14577 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14579 if (operand_equal_p (top
, bottom
, 0))
14582 if (TREE_CODE (type
) != INTEGER_TYPE
)
14585 switch (TREE_CODE (top
))
14588 /* Bitwise and provides a power of two multiple. If the mask is
14589 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14590 if (!integer_pow2p (bottom
))
14595 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14596 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14600 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14601 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14604 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14608 op1
= TREE_OPERAND (top
, 1);
14609 /* const_binop may not detect overflow correctly,
14610 so check for it explicitly here. */
14611 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
14612 && 0 != (t1
= fold_convert (type
,
14613 const_binop (LSHIFT_EXPR
,
14616 && !TREE_OVERFLOW (t1
))
14617 return multiple_of_p (type
, t1
, bottom
);
14622 /* Can't handle conversions from non-integral or wider integral type. */
14623 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14624 || (TYPE_PRECISION (type
)
14625 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14628 /* .. fall through ... */
14631 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14634 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14635 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14638 if (TREE_CODE (bottom
) != INTEGER_CST
14639 || integer_zerop (bottom
)
14640 || (TYPE_UNSIGNED (type
)
14641 && (tree_int_cst_sgn (top
) < 0
14642 || tree_int_cst_sgn (bottom
) < 0)))
14644 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14652 /* Return true if CODE or TYPE is known to be non-negative. */
14655 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14657 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14658 && truth_value_p (code
))
14659 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14660 have a signed:1 type (where the value is -1 and 0). */
14665 /* Return true if (CODE OP0) is known to be non-negative. If the return
14666 value is based on the assumption that signed overflow is undefined,
14667 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14668 *STRICT_OVERFLOW_P. */
14671 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14672 bool *strict_overflow_p
)
14674 if (TYPE_UNSIGNED (type
))
14680 /* We can't return 1 if flag_wrapv is set because
14681 ABS_EXPR<INT_MIN> = INT_MIN. */
14682 if (!INTEGRAL_TYPE_P (type
))
14684 if (TYPE_OVERFLOW_UNDEFINED (type
))
14686 *strict_overflow_p
= true;
14691 case NON_LVALUE_EXPR
:
14693 case FIX_TRUNC_EXPR
:
14694 return tree_expr_nonnegative_warnv_p (op0
,
14695 strict_overflow_p
);
14699 tree inner_type
= TREE_TYPE (op0
);
14700 tree outer_type
= type
;
14702 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14704 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14705 return tree_expr_nonnegative_warnv_p (op0
,
14706 strict_overflow_p
);
14707 if (INTEGRAL_TYPE_P (inner_type
))
14709 if (TYPE_UNSIGNED (inner_type
))
14711 return tree_expr_nonnegative_warnv_p (op0
,
14712 strict_overflow_p
);
14715 else if (INTEGRAL_TYPE_P (outer_type
))
14717 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14718 return tree_expr_nonnegative_warnv_p (op0
,
14719 strict_overflow_p
);
14720 if (INTEGRAL_TYPE_P (inner_type
))
14721 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14722 && TYPE_UNSIGNED (inner_type
);
14728 return tree_simple_nonnegative_warnv_p (code
, type
);
14731 /* We don't know sign of `t', so be conservative and return false. */
14735 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14736 value is based on the assumption that signed overflow is undefined,
14737 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14738 *STRICT_OVERFLOW_P. */
14741 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14742 tree op1
, bool *strict_overflow_p
)
14744 if (TYPE_UNSIGNED (type
))
14749 case POINTER_PLUS_EXPR
:
14751 if (FLOAT_TYPE_P (type
))
14752 return (tree_expr_nonnegative_warnv_p (op0
,
14754 && tree_expr_nonnegative_warnv_p (op1
,
14755 strict_overflow_p
));
14757 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14758 both unsigned and at least 2 bits shorter than the result. */
14759 if (TREE_CODE (type
) == INTEGER_TYPE
14760 && TREE_CODE (op0
) == NOP_EXPR
14761 && TREE_CODE (op1
) == NOP_EXPR
)
14763 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14764 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14765 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14766 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14768 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14769 TYPE_PRECISION (inner2
)) + 1;
14770 return prec
< TYPE_PRECISION (type
);
14776 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14778 /* x * x is always non-negative for floating point x
14779 or without overflow. */
14780 if (operand_equal_p (op0
, op1
, 0)
14781 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
14782 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
14784 if (ANY_INTEGRAL_TYPE_P (type
)
14785 && TYPE_OVERFLOW_UNDEFINED (type
))
14786 *strict_overflow_p
= true;
14791 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14792 both unsigned and their total bits is shorter than the result. */
14793 if (TREE_CODE (type
) == INTEGER_TYPE
14794 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14795 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14797 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14798 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14800 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14801 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14804 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14805 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14807 if (TREE_CODE (op0
) == INTEGER_CST
)
14808 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14810 if (TREE_CODE (op1
) == INTEGER_CST
)
14811 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14813 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14814 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14816 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14817 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14818 : TYPE_PRECISION (inner0
);
14820 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14821 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14822 : TYPE_PRECISION (inner1
);
14824 return precision0
+ precision1
< TYPE_PRECISION (type
);
14831 return (tree_expr_nonnegative_warnv_p (op0
,
14833 || tree_expr_nonnegative_warnv_p (op1
,
14834 strict_overflow_p
));
14840 case TRUNC_DIV_EXPR
:
14841 case CEIL_DIV_EXPR
:
14842 case FLOOR_DIV_EXPR
:
14843 case ROUND_DIV_EXPR
:
14844 return (tree_expr_nonnegative_warnv_p (op0
,
14846 && tree_expr_nonnegative_warnv_p (op1
,
14847 strict_overflow_p
));
14849 case TRUNC_MOD_EXPR
:
14850 case CEIL_MOD_EXPR
:
14851 case FLOOR_MOD_EXPR
:
14852 case ROUND_MOD_EXPR
:
14853 return tree_expr_nonnegative_warnv_p (op0
,
14854 strict_overflow_p
);
14856 return tree_simple_nonnegative_warnv_p (code
, type
);
14859 /* We don't know sign of `t', so be conservative and return false. */
14863 /* Return true if T is known to be non-negative. If the return
14864 value is based on the assumption that signed overflow is undefined,
14865 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14866 *STRICT_OVERFLOW_P. */
14869 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14871 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14874 switch (TREE_CODE (t
))
14877 return tree_int_cst_sgn (t
) >= 0;
14880 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14883 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14886 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14888 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14889 strict_overflow_p
));
14891 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14894 /* We don't know sign of `t', so be conservative and return false. */
14898 /* Return true if T is known to be non-negative. If the return
14899 value is based on the assumption that signed overflow is undefined,
14900 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14901 *STRICT_OVERFLOW_P. */
14904 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14905 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14907 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14908 switch (DECL_FUNCTION_CODE (fndecl
))
14910 CASE_FLT_FN (BUILT_IN_ACOS
):
14911 CASE_FLT_FN (BUILT_IN_ACOSH
):
14912 CASE_FLT_FN (BUILT_IN_CABS
):
14913 CASE_FLT_FN (BUILT_IN_COSH
):
14914 CASE_FLT_FN (BUILT_IN_ERFC
):
14915 CASE_FLT_FN (BUILT_IN_EXP
):
14916 CASE_FLT_FN (BUILT_IN_EXP10
):
14917 CASE_FLT_FN (BUILT_IN_EXP2
):
14918 CASE_FLT_FN (BUILT_IN_FABS
):
14919 CASE_FLT_FN (BUILT_IN_FDIM
):
14920 CASE_FLT_FN (BUILT_IN_HYPOT
):
14921 CASE_FLT_FN (BUILT_IN_POW10
):
14922 CASE_INT_FN (BUILT_IN_FFS
):
14923 CASE_INT_FN (BUILT_IN_PARITY
):
14924 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14925 CASE_INT_FN (BUILT_IN_CLZ
):
14926 CASE_INT_FN (BUILT_IN_CLRSB
):
14927 case BUILT_IN_BSWAP32
:
14928 case BUILT_IN_BSWAP64
:
14932 CASE_FLT_FN (BUILT_IN_SQRT
):
14933 /* sqrt(-0.0) is -0.0. */
14934 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
14936 return tree_expr_nonnegative_warnv_p (arg0
,
14937 strict_overflow_p
);
14939 CASE_FLT_FN (BUILT_IN_ASINH
):
14940 CASE_FLT_FN (BUILT_IN_ATAN
):
14941 CASE_FLT_FN (BUILT_IN_ATANH
):
14942 CASE_FLT_FN (BUILT_IN_CBRT
):
14943 CASE_FLT_FN (BUILT_IN_CEIL
):
14944 CASE_FLT_FN (BUILT_IN_ERF
):
14945 CASE_FLT_FN (BUILT_IN_EXPM1
):
14946 CASE_FLT_FN (BUILT_IN_FLOOR
):
14947 CASE_FLT_FN (BUILT_IN_FMOD
):
14948 CASE_FLT_FN (BUILT_IN_FREXP
):
14949 CASE_FLT_FN (BUILT_IN_ICEIL
):
14950 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14951 CASE_FLT_FN (BUILT_IN_IRINT
):
14952 CASE_FLT_FN (BUILT_IN_IROUND
):
14953 CASE_FLT_FN (BUILT_IN_LCEIL
):
14954 CASE_FLT_FN (BUILT_IN_LDEXP
):
14955 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14956 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14957 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14958 CASE_FLT_FN (BUILT_IN_LLRINT
):
14959 CASE_FLT_FN (BUILT_IN_LLROUND
):
14960 CASE_FLT_FN (BUILT_IN_LRINT
):
14961 CASE_FLT_FN (BUILT_IN_LROUND
):
14962 CASE_FLT_FN (BUILT_IN_MODF
):
14963 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14964 CASE_FLT_FN (BUILT_IN_RINT
):
14965 CASE_FLT_FN (BUILT_IN_ROUND
):
14966 CASE_FLT_FN (BUILT_IN_SCALB
):
14967 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14968 CASE_FLT_FN (BUILT_IN_SCALBN
):
14969 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14970 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14971 CASE_FLT_FN (BUILT_IN_SINH
):
14972 CASE_FLT_FN (BUILT_IN_TANH
):
14973 CASE_FLT_FN (BUILT_IN_TRUNC
):
14974 /* True if the 1st argument is nonnegative. */
14975 return tree_expr_nonnegative_warnv_p (arg0
,
14976 strict_overflow_p
);
14978 CASE_FLT_FN (BUILT_IN_FMAX
):
14979 /* True if the 1st OR 2nd arguments are nonnegative. */
14980 return (tree_expr_nonnegative_warnv_p (arg0
,
14982 || (tree_expr_nonnegative_warnv_p (arg1
,
14983 strict_overflow_p
)));
14985 CASE_FLT_FN (BUILT_IN_FMIN
):
14986 /* True if the 1st AND 2nd arguments are nonnegative. */
14987 return (tree_expr_nonnegative_warnv_p (arg0
,
14989 && (tree_expr_nonnegative_warnv_p (arg1
,
14990 strict_overflow_p
)));
14992 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14993 /* True if the 2nd argument is nonnegative. */
14994 return tree_expr_nonnegative_warnv_p (arg1
,
14995 strict_overflow_p
);
14997 CASE_FLT_FN (BUILT_IN_POWI
):
14998 /* True if the 1st argument is nonnegative or the second
14999 argument is an even integer. */
15000 if (TREE_CODE (arg1
) == INTEGER_CST
15001 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15003 return tree_expr_nonnegative_warnv_p (arg0
,
15004 strict_overflow_p
);
15006 CASE_FLT_FN (BUILT_IN_POW
):
15007 /* True if the 1st argument is nonnegative or the second
15008 argument is an even integer valued real. */
15009 if (TREE_CODE (arg1
) == REAL_CST
)
15014 c
= TREE_REAL_CST (arg1
);
15015 n
= real_to_integer (&c
);
15018 REAL_VALUE_TYPE cint
;
15019 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
15020 if (real_identical (&c
, &cint
))
15024 return tree_expr_nonnegative_warnv_p (arg0
,
15025 strict_overflow_p
);
15030 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15034 /* Return true if T is known to be non-negative. If the return
15035 value is based on the assumption that signed overflow is undefined,
15036 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15037 *STRICT_OVERFLOW_P. */
15040 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15042 enum tree_code code
= TREE_CODE (t
);
15043 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15050 tree temp
= TARGET_EXPR_SLOT (t
);
15051 t
= TARGET_EXPR_INITIAL (t
);
15053 /* If the initializer is non-void, then it's a normal expression
15054 that will be assigned to the slot. */
15055 if (!VOID_TYPE_P (t
))
15056 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15058 /* Otherwise, the initializer sets the slot in some way. One common
15059 way is an assignment statement at the end of the initializer. */
15062 if (TREE_CODE (t
) == BIND_EXPR
)
15063 t
= expr_last (BIND_EXPR_BODY (t
));
15064 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15065 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15066 t
= expr_last (TREE_OPERAND (t
, 0));
15067 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15072 if (TREE_CODE (t
) == MODIFY_EXPR
15073 && TREE_OPERAND (t
, 0) == temp
)
15074 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15075 strict_overflow_p
);
15082 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15083 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15085 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15086 get_callee_fndecl (t
),
15089 strict_overflow_p
);
15091 case COMPOUND_EXPR
:
15093 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15094 strict_overflow_p
);
15096 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15097 strict_overflow_p
);
15099 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15100 strict_overflow_p
);
15103 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15107 /* We don't know sign of `t', so be conservative and return false. */
15111 /* Return true if T is known to be non-negative. If the return
15112 value is based on the assumption that signed overflow is undefined,
15113 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15114 *STRICT_OVERFLOW_P. */
15117 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15119 enum tree_code code
;
15120 if (t
== error_mark_node
)
15123 code
= TREE_CODE (t
);
15124 switch (TREE_CODE_CLASS (code
))
15127 case tcc_comparison
:
15128 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15130 TREE_OPERAND (t
, 0),
15131 TREE_OPERAND (t
, 1),
15132 strict_overflow_p
);
15135 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15137 TREE_OPERAND (t
, 0),
15138 strict_overflow_p
);
15141 case tcc_declaration
:
15142 case tcc_reference
:
15143 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15151 case TRUTH_AND_EXPR
:
15152 case TRUTH_OR_EXPR
:
15153 case TRUTH_XOR_EXPR
:
15154 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15156 TREE_OPERAND (t
, 0),
15157 TREE_OPERAND (t
, 1),
15158 strict_overflow_p
);
15159 case TRUTH_NOT_EXPR
:
15160 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15162 TREE_OPERAND (t
, 0),
15163 strict_overflow_p
);
15170 case WITH_SIZE_EXPR
:
15172 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15175 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15179 /* Return true if `t' is known to be non-negative. Handle warnings
15180 about undefined signed overflow. */
15183 tree_expr_nonnegative_p (tree t
)
15185 bool ret
, strict_overflow_p
;
15187 strict_overflow_p
= false;
15188 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15189 if (strict_overflow_p
)
15190 fold_overflow_warning (("assuming signed overflow does not occur when "
15191 "determining that expression is always "
15193 WARN_STRICT_OVERFLOW_MISC
);
15198 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15199 For floating point we further ensure that T is not denormal.
15200 Similar logic is present in nonzero_address in rtlanal.h.
15202 If the return value is based on the assumption that signed overflow
15203 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15204 change *STRICT_OVERFLOW_P. */
15207 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15208 bool *strict_overflow_p
)
15213 return tree_expr_nonzero_warnv_p (op0
,
15214 strict_overflow_p
);
15218 tree inner_type
= TREE_TYPE (op0
);
15219 tree outer_type
= type
;
15221 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15222 && tree_expr_nonzero_warnv_p (op0
,
15223 strict_overflow_p
));
15227 case NON_LVALUE_EXPR
:
15228 return tree_expr_nonzero_warnv_p (op0
,
15229 strict_overflow_p
);
15238 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15239 For floating point we further ensure that T is not denormal.
15240 Similar logic is present in nonzero_address in rtlanal.h.
15242 If the return value is based on the assumption that signed overflow
15243 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15244 change *STRICT_OVERFLOW_P. */
15247 tree_binary_nonzero_warnv_p (enum tree_code code
,
15250 tree op1
, bool *strict_overflow_p
)
15252 bool sub_strict_overflow_p
;
15255 case POINTER_PLUS_EXPR
:
15257 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
15259 /* With the presence of negative values it is hard
15260 to say something. */
15261 sub_strict_overflow_p
= false;
15262 if (!tree_expr_nonnegative_warnv_p (op0
,
15263 &sub_strict_overflow_p
)
15264 || !tree_expr_nonnegative_warnv_p (op1
,
15265 &sub_strict_overflow_p
))
15267 /* One of operands must be positive and the other non-negative. */
15268 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15269 overflows, on a twos-complement machine the sum of two
15270 nonnegative numbers can never be zero. */
15271 return (tree_expr_nonzero_warnv_p (op0
,
15273 || tree_expr_nonzero_warnv_p (op1
,
15274 strict_overflow_p
));
15279 if (TYPE_OVERFLOW_UNDEFINED (type
))
15281 if (tree_expr_nonzero_warnv_p (op0
,
15283 && tree_expr_nonzero_warnv_p (op1
,
15284 strict_overflow_p
))
15286 *strict_overflow_p
= true;
15293 sub_strict_overflow_p
= false;
15294 if (tree_expr_nonzero_warnv_p (op0
,
15295 &sub_strict_overflow_p
)
15296 && tree_expr_nonzero_warnv_p (op1
,
15297 &sub_strict_overflow_p
))
15299 if (sub_strict_overflow_p
)
15300 *strict_overflow_p
= true;
15305 sub_strict_overflow_p
= false;
15306 if (tree_expr_nonzero_warnv_p (op0
,
15307 &sub_strict_overflow_p
))
15309 if (sub_strict_overflow_p
)
15310 *strict_overflow_p
= true;
15312 /* When both operands are nonzero, then MAX must be too. */
15313 if (tree_expr_nonzero_warnv_p (op1
,
15314 strict_overflow_p
))
15317 /* MAX where operand 0 is positive is positive. */
15318 return tree_expr_nonnegative_warnv_p (op0
,
15319 strict_overflow_p
);
15321 /* MAX where operand 1 is positive is positive. */
15322 else if (tree_expr_nonzero_warnv_p (op1
,
15323 &sub_strict_overflow_p
)
15324 && tree_expr_nonnegative_warnv_p (op1
,
15325 &sub_strict_overflow_p
))
15327 if (sub_strict_overflow_p
)
15328 *strict_overflow_p
= true;
15334 return (tree_expr_nonzero_warnv_p (op1
,
15336 || tree_expr_nonzero_warnv_p (op0
,
15337 strict_overflow_p
));
15346 /* Return true when T is an address and is known to be nonzero.
15347 For floating point we further ensure that T is not denormal.
15348 Similar logic is present in nonzero_address in rtlanal.h.
15350 If the return value is based on the assumption that signed overflow
15351 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15352 change *STRICT_OVERFLOW_P. */
15355 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15357 bool sub_strict_overflow_p
;
15358 switch (TREE_CODE (t
))
15361 return !integer_zerop (t
);
15365 tree base
= TREE_OPERAND (t
, 0);
15367 if (!DECL_P (base
))
15368 base
= get_base_address (base
);
15373 /* For objects in symbol table check if we know they are non-zero.
15374 Don't do anything for variables and functions before symtab is built;
15375 it is quite possible that they will be declared weak later. */
15376 if (DECL_P (base
) && decl_in_symtab_p (base
))
15378 struct symtab_node
*symbol
;
15380 symbol
= symtab_node::get_create (base
);
15382 return symbol
->nonzero_address ();
15387 /* Function local objects are never NULL. */
15389 && (DECL_CONTEXT (base
)
15390 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15391 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15394 /* Constants are never weak. */
15395 if (CONSTANT_CLASS_P (base
))
15402 sub_strict_overflow_p
= false;
15403 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15404 &sub_strict_overflow_p
)
15405 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15406 &sub_strict_overflow_p
))
15408 if (sub_strict_overflow_p
)
15409 *strict_overflow_p
= true;
15420 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15421 attempt to fold the expression to a constant without modifying TYPE,
15424 If the expression could be simplified to a constant, then return
15425 the constant. If the expression would not be simplified to a
15426 constant, then return NULL_TREE. */
15429 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15431 tree tem
= fold_binary (code
, type
, op0
, op1
);
15432 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15435 /* Given the components of a unary expression CODE, TYPE and OP0,
15436 attempt to fold the expression to a constant without modifying
15439 If the expression could be simplified to a constant, then return
15440 the constant. If the expression would not be simplified to a
15441 constant, then return NULL_TREE. */
15444 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15446 tree tem
= fold_unary (code
, type
, op0
);
15447 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15450 /* If EXP represents referencing an element in a constant string
15451 (either via pointer arithmetic or array indexing), return the
15452 tree representing the value accessed, otherwise return NULL. */
15455 fold_read_from_constant_string (tree exp
)
15457 if ((TREE_CODE (exp
) == INDIRECT_REF
15458 || TREE_CODE (exp
) == ARRAY_REF
)
15459 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15461 tree exp1
= TREE_OPERAND (exp
, 0);
15464 location_t loc
= EXPR_LOCATION (exp
);
15466 if (TREE_CODE (exp
) == INDIRECT_REF
)
15467 string
= string_constant (exp1
, &index
);
15470 tree low_bound
= array_ref_low_bound (exp
);
15471 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15473 /* Optimize the special-case of a zero lower bound.
15475 We convert the low_bound to sizetype to avoid some problems
15476 with constant folding. (E.g. suppose the lower bound is 1,
15477 and its mode is QI. Without the conversion,l (ARRAY
15478 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15479 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15480 if (! integer_zerop (low_bound
))
15481 index
= size_diffop_loc (loc
, index
,
15482 fold_convert_loc (loc
, sizetype
, low_bound
));
15488 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15489 && TREE_CODE (string
) == STRING_CST
15490 && TREE_CODE (index
) == INTEGER_CST
15491 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15492 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15494 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15495 return build_int_cst_type (TREE_TYPE (exp
),
15496 (TREE_STRING_POINTER (string
)
15497 [TREE_INT_CST_LOW (index
)]));
15502 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15503 an integer constant, real, or fixed-point constant.
15505 TYPE is the type of the result. */
15508 fold_negate_const (tree arg0
, tree type
)
15510 tree t
= NULL_TREE
;
15512 switch (TREE_CODE (arg0
))
15517 wide_int val
= wi::neg (arg0
, &overflow
);
15518 t
= force_fit_type (type
, val
, 1,
15519 (overflow
| TREE_OVERFLOW (arg0
))
15520 && !TYPE_UNSIGNED (type
));
15525 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15530 FIXED_VALUE_TYPE f
;
15531 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15532 &(TREE_FIXED_CST (arg0
)), NULL
,
15533 TYPE_SATURATING (type
));
15534 t
= build_fixed (type
, f
);
15535 /* Propagate overflow flags. */
15536 if (overflow_p
| TREE_OVERFLOW (arg0
))
15537 TREE_OVERFLOW (t
) = 1;
15542 gcc_unreachable ();
15548 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15549 an integer constant or real constant.
15551 TYPE is the type of the result. */
15554 fold_abs_const (tree arg0
, tree type
)
15556 tree t
= NULL_TREE
;
15558 switch (TREE_CODE (arg0
))
15562 /* If the value is unsigned or non-negative, then the absolute value
15563 is the same as the ordinary value. */
15564 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
15567 /* If the value is negative, then the absolute value is
15572 wide_int val
= wi::neg (arg0
, &overflow
);
15573 t
= force_fit_type (type
, val
, -1,
15574 overflow
| TREE_OVERFLOW (arg0
));
15580 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15581 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15587 gcc_unreachable ();
15593 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15594 constant. TYPE is the type of the result. */
15597 fold_not_const (const_tree arg0
, tree type
)
15599 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15601 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
15604 /* Given CODE, a relational operator, the target type, TYPE and two
15605 constant operands OP0 and OP1, return the result of the
15606 relational operation. If the result is not a compile time
15607 constant, then return NULL_TREE. */
15610 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15612 int result
, invert
;
15614 /* From here on, the only cases we handle are when the result is
15615 known to be a constant. */
15617 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15619 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15620 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15622 /* Handle the cases where either operand is a NaN. */
15623 if (real_isnan (c0
) || real_isnan (c1
))
15633 case UNORDERED_EXPR
:
15647 if (flag_trapping_math
)
15653 gcc_unreachable ();
15656 return constant_boolean_node (result
, type
);
15659 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15662 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15664 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15665 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15666 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15669 /* Handle equality/inequality of complex constants. */
15670 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15672 tree rcond
= fold_relational_const (code
, type
,
15673 TREE_REALPART (op0
),
15674 TREE_REALPART (op1
));
15675 tree icond
= fold_relational_const (code
, type
,
15676 TREE_IMAGPART (op0
),
15677 TREE_IMAGPART (op1
));
15678 if (code
== EQ_EXPR
)
15679 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15680 else if (code
== NE_EXPR
)
15681 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15686 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15688 unsigned count
= VECTOR_CST_NELTS (op0
);
15689 tree
*elts
= XALLOCAVEC (tree
, count
);
15690 gcc_assert (VECTOR_CST_NELTS (op1
) == count
15691 && TYPE_VECTOR_SUBPARTS (type
) == count
);
15693 for (unsigned i
= 0; i
< count
; i
++)
15695 tree elem_type
= TREE_TYPE (type
);
15696 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15697 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15699 tree tem
= fold_relational_const (code
, elem_type
,
15702 if (tem
== NULL_TREE
)
15705 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
15708 return build_vector (type
, elts
);
15711 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15713 To compute GT, swap the arguments and do LT.
15714 To compute GE, do LT and invert the result.
15715 To compute LE, swap the arguments, do LT and invert the result.
15716 To compute NE, do EQ and invert the result.
15718 Therefore, the code below must handle only EQ and LT. */
15720 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15725 code
= swap_tree_comparison (code
);
15728 /* Note that it is safe to invert for real values here because we
15729 have already handled the one case that it matters. */
15732 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15735 code
= invert_tree_comparison (code
, false);
15738 /* Compute a result for LT or EQ if args permit;
15739 Otherwise return T. */
15740 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15742 if (code
== EQ_EXPR
)
15743 result
= tree_int_cst_equal (op0
, op1
);
15745 result
= tree_int_cst_lt (op0
, op1
);
15752 return constant_boolean_node (result
, type
);
15755 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15756 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15760 fold_build_cleanup_point_expr (tree type
, tree expr
)
15762 /* If the expression does not have side effects then we don't have to wrap
15763 it with a cleanup point expression. */
15764 if (!TREE_SIDE_EFFECTS (expr
))
15767 /* If the expression is a return, check to see if the expression inside the
15768 return has no side effects or the right hand side of the modify expression
15769 inside the return. If either don't have side effects set we don't need to
15770 wrap the expression in a cleanup point expression. Note we don't check the
15771 left hand side of the modify because it should always be a return decl. */
15772 if (TREE_CODE (expr
) == RETURN_EXPR
)
15774 tree op
= TREE_OPERAND (expr
, 0);
15775 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15777 op
= TREE_OPERAND (op
, 1);
15778 if (!TREE_SIDE_EFFECTS (op
))
15782 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15785 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15786 of an indirection through OP0, or NULL_TREE if no simplification is
15790 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15796 subtype
= TREE_TYPE (sub
);
15797 if (!POINTER_TYPE_P (subtype
))
15800 if (TREE_CODE (sub
) == ADDR_EXPR
)
15802 tree op
= TREE_OPERAND (sub
, 0);
15803 tree optype
= TREE_TYPE (op
);
15804 /* *&CONST_DECL -> to the value of the const decl. */
15805 if (TREE_CODE (op
) == CONST_DECL
)
15806 return DECL_INITIAL (op
);
15807 /* *&p => p; make sure to handle *&"str"[cst] here. */
15808 if (type
== optype
)
15810 tree fop
= fold_read_from_constant_string (op
);
15816 /* *(foo *)&fooarray => fooarray[0] */
15817 else if (TREE_CODE (optype
) == ARRAY_TYPE
15818 && type
== TREE_TYPE (optype
)
15819 && (!in_gimple_form
15820 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15822 tree type_domain
= TYPE_DOMAIN (optype
);
15823 tree min_val
= size_zero_node
;
15824 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15825 min_val
= TYPE_MIN_VALUE (type_domain
);
15827 && TREE_CODE (min_val
) != INTEGER_CST
)
15829 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15830 NULL_TREE
, NULL_TREE
);
15832 /* *(foo *)&complexfoo => __real__ complexfoo */
15833 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15834 && type
== TREE_TYPE (optype
))
15835 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15836 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15837 else if (TREE_CODE (optype
) == VECTOR_TYPE
15838 && type
== TREE_TYPE (optype
))
15840 tree part_width
= TYPE_SIZE (type
);
15841 tree index
= bitsize_int (0);
15842 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15846 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15847 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15849 tree op00
= TREE_OPERAND (sub
, 0);
15850 tree op01
= TREE_OPERAND (sub
, 1);
15853 if (TREE_CODE (op00
) == ADDR_EXPR
)
15856 op00
= TREE_OPERAND (op00
, 0);
15857 op00type
= TREE_TYPE (op00
);
15859 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15860 if (TREE_CODE (op00type
) == VECTOR_TYPE
15861 && type
== TREE_TYPE (op00type
))
15863 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
15864 tree part_width
= TYPE_SIZE (type
);
15865 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
15866 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15867 tree index
= bitsize_int (indexi
);
15869 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
15870 return fold_build3_loc (loc
,
15871 BIT_FIELD_REF
, type
, op00
,
15872 part_width
, index
);
15875 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15876 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15877 && type
== TREE_TYPE (op00type
))
15879 tree size
= TYPE_SIZE_UNIT (type
);
15880 if (tree_int_cst_equal (size
, op01
))
15881 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15883 /* ((foo *)&fooarray)[1] => fooarray[1] */
15884 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15885 && type
== TREE_TYPE (op00type
))
15887 tree type_domain
= TYPE_DOMAIN (op00type
);
15888 tree min_val
= size_zero_node
;
15889 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15890 min_val
= TYPE_MIN_VALUE (type_domain
);
15891 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15892 TYPE_SIZE_UNIT (type
));
15893 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15894 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15895 NULL_TREE
, NULL_TREE
);
15900 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15901 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15902 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15903 && (!in_gimple_form
15904 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15907 tree min_val
= size_zero_node
;
15908 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15909 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15910 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15911 min_val
= TYPE_MIN_VALUE (type_domain
);
15913 && TREE_CODE (min_val
) != INTEGER_CST
)
15915 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15922 /* Builds an expression for an indirection through T, simplifying some
15926 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15928 tree type
= TREE_TYPE (TREE_TYPE (t
));
15929 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15934 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15937 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15940 fold_indirect_ref_loc (location_t loc
, tree t
)
15942 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15950 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15951 whose result is ignored. The type of the returned tree need not be
15952 the same as the original expression. */
15955 fold_ignored_result (tree t
)
15957 if (!TREE_SIDE_EFFECTS (t
))
15958 return integer_zero_node
;
15961 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15964 t
= TREE_OPERAND (t
, 0);
15968 case tcc_comparison
:
15969 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15970 t
= TREE_OPERAND (t
, 0);
15971 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15972 t
= TREE_OPERAND (t
, 1);
15977 case tcc_expression
:
15978 switch (TREE_CODE (t
))
15980 case COMPOUND_EXPR
:
15981 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15983 t
= TREE_OPERAND (t
, 0);
15987 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15988 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15990 t
= TREE_OPERAND (t
, 0);
16003 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16006 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16008 tree div
= NULL_TREE
;
16013 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16014 have to do anything. Only do this when we are not given a const,
16015 because in that case, this check is more expensive than just
16017 if (TREE_CODE (value
) != INTEGER_CST
)
16019 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16021 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16025 /* If divisor is a power of two, simplify this to bit manipulation. */
16026 if (divisor
== (divisor
& -divisor
))
16028 if (TREE_CODE (value
) == INTEGER_CST
)
16030 wide_int val
= value
;
16033 if ((val
& (divisor
- 1)) == 0)
16036 overflow_p
= TREE_OVERFLOW (value
);
16037 val
+= divisor
- 1;
16038 val
&= - (int) divisor
;
16042 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16048 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16049 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16050 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
16051 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16057 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16058 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16059 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16065 /* Likewise, but round down. */
16068 round_down_loc (location_t loc
, tree value
, int divisor
)
16070 tree div
= NULL_TREE
;
16072 gcc_assert (divisor
> 0);
16076 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16077 have to do anything. Only do this when we are not given a const,
16078 because in that case, this check is more expensive than just
16080 if (TREE_CODE (value
) != INTEGER_CST
)
16082 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16084 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16088 /* If divisor is a power of two, simplify this to bit manipulation. */
16089 if (divisor
== (divisor
& -divisor
))
16093 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16094 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16099 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16100 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16101 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16107 /* Returns the pointer to the base of the object addressed by EXP and
16108 extracts the information about the offset of the access, storing it
16109 to PBITPOS and POFFSET. */
16112 split_address_to_core_and_offset (tree exp
,
16113 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16117 int unsignedp
, volatilep
;
16118 HOST_WIDE_INT bitsize
;
16119 location_t loc
= EXPR_LOCATION (exp
);
16121 if (TREE_CODE (exp
) == ADDR_EXPR
)
16123 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16124 poffset
, &mode
, &unsignedp
, &volatilep
,
16126 core
= build_fold_addr_expr_loc (loc
, core
);
16132 *poffset
= NULL_TREE
;
16138 /* Returns true if addresses of E1 and E2 differ by a constant, false
16139 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16142 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16145 HOST_WIDE_INT bitpos1
, bitpos2
;
16146 tree toffset1
, toffset2
, tdiff
, type
;
16148 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16149 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16151 if (bitpos1
% BITS_PER_UNIT
!= 0
16152 || bitpos2
% BITS_PER_UNIT
!= 0
16153 || !operand_equal_p (core1
, core2
, 0))
16156 if (toffset1
&& toffset2
)
16158 type
= TREE_TYPE (toffset1
);
16159 if (type
!= TREE_TYPE (toffset2
))
16160 toffset2
= fold_convert (type
, toffset2
);
16162 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16163 if (!cst_and_fits_in_hwi (tdiff
))
16166 *diff
= int_cst_value (tdiff
);
16168 else if (toffset1
|| toffset2
)
16170 /* If only one of the offsets is non-constant, the difference cannot
16177 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16181 /* Simplify the floating point expression EXP when the sign of the
16182 result is not significant. Return NULL_TREE if no simplification
16186 fold_strip_sign_ops (tree exp
)
16189 location_t loc
= EXPR_LOCATION (exp
);
16191 switch (TREE_CODE (exp
))
16195 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16196 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16200 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
16202 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16203 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16204 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16205 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16206 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16207 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16210 case COMPOUND_EXPR
:
16211 arg0
= TREE_OPERAND (exp
, 0);
16212 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16214 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16218 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16219 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16221 return fold_build3_loc (loc
,
16222 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16223 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16224 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16229 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16232 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16233 /* Strip copysign function call, return the 1st argument. */
16234 arg0
= CALL_EXPR_ARG (exp
, 0);
16235 arg1
= CALL_EXPR_ARG (exp
, 1);
16236 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16239 /* Strip sign ops from the argument of "odd" math functions. */
16240 if (negate_mathfn_p (fcode
))
16242 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16244 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);
16257 /* Return OFF converted to a pointer offset type suitable as offset for
16258 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16260 convert_to_ptrofftype_loc (location_t loc
, tree off
)
16262 return fold_convert_loc (loc
, sizetype
, off
);
16265 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16267 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
16269 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16270 ptr
, convert_to_ptrofftype_loc (loc
, off
));
16273 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16275 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
16277 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16278 ptr
, size_int (off
));