1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
55 #include "fold-const.h"
56 #include "stor-layout.h"
58 #include "tree-iterator.h"
62 #include "hard-reg-set.h"
64 #include "statistics.h"
65 #include "insn-config.h"
75 #include "diagnostic-core.h"
77 #include "langhooks.h"
80 #include "basic-block.h"
81 #include "tree-ssa-alias.h"
82 #include "internal-fn.h"
84 #include "gimple-expr.h"
89 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
92 #include "plugin-api.h"
95 #include "generic-match.h"
98 /* Nonzero if we are folding constants inside an initializer; zero
100 int folding_initializer
= 0;
102 /* The following constants represent a bit based encoding of GCC's
103 comparison operators. This encoding simplifies transformations
104 on relational comparison operators, such as AND and OR. */
105 enum comparison_code
{
124 static bool negate_mathfn_p (enum built_in_function
);
125 static bool negate_expr_p (tree
);
126 static tree
negate_expr (tree
);
127 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
128 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
129 static enum comparison_code
comparison_to_compcode (enum tree_code
);
130 static enum tree_code
compcode_to_comparison (enum comparison_code
);
131 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
132 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
133 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
134 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static tree
make_bit_field_ref (location_t
, tree
, tree
,
136 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
137 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
139 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
141 machine_mode
*, int *, int *,
143 static int simple_operand_p (const_tree
);
144 static bool simple_operand_p_2 (tree
);
145 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
146 static tree
range_predecessor (tree
);
147 static tree
range_successor (tree
);
148 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
149 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
150 static tree
unextend (tree
, int, int, tree
);
151 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
153 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
154 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
155 static tree
fold_binary_op_with_conditional_arg (location_t
,
156 enum tree_code
, tree
,
159 static tree
fold_mathfn_compare (location_t
,
160 enum built_in_function
, enum tree_code
,
162 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
163 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
164 static bool reorder_operands_p (const_tree
, const_tree
);
165 static tree
fold_negate_const (tree
, tree
);
166 static tree
fold_not_const (const_tree
, tree
);
167 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
168 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
169 static tree
fold_view_convert_expr (tree
, tree
);
170 static bool vec_cst_ctor_to_array (tree
, tree
*);
173 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
174 Otherwise, return LOC. */
177 expr_location_or (tree t
, location_t loc
)
179 location_t tloc
= EXPR_LOCATION (t
);
180 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
183 /* Similar to protected_set_expr_location, but never modify x in place,
184 if location can and needs to be set, unshare it. */
187 protected_set_expr_location_unshare (tree x
, location_t loc
)
189 if (CAN_HAVE_LOCATION_P (x
)
190 && EXPR_LOCATION (x
) != loc
191 && !(TREE_CODE (x
) == SAVE_EXPR
192 || TREE_CODE (x
) == TARGET_EXPR
193 || TREE_CODE (x
) == BIND_EXPR
))
196 SET_EXPR_LOCATION (x
, loc
);
201 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
202 division and returns the quotient. Otherwise returns
206 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
210 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
212 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
217 /* This is nonzero if we should defer warnings about undefined
218 overflow. This facility exists because these warnings are a
219 special case. The code to estimate loop iterations does not want
220 to issue any warnings, since it works with expressions which do not
221 occur in user code. Various bits of cleanup code call fold(), but
222 only use the result if it has certain characteristics (e.g., is a
223 constant); that code only wants to issue a warning if the result is
226 static int fold_deferring_overflow_warnings
;
228 /* If a warning about undefined overflow is deferred, this is the
229 warning. Note that this may cause us to turn two warnings into
230 one, but that is fine since it is sufficient to only give one
231 warning per expression. */
233 static const char* fold_deferred_overflow_warning
;
235 /* If a warning about undefined overflow is deferred, this is the
236 level at which the warning should be emitted. */
238 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
240 /* Start deferring overflow warnings. We could use a stack here to
241 permit nested calls, but at present it is not necessary. */
244 fold_defer_overflow_warnings (void)
246 ++fold_deferring_overflow_warnings
;
249 /* Stop deferring overflow warnings. If there is a pending warning,
250 and ISSUE is true, then issue the warning if appropriate. STMT is
251 the statement with which the warning should be associated (used for
252 location information); STMT may be NULL. CODE is the level of the
253 warning--a warn_strict_overflow_code value. This function will use
254 the smaller of CODE and the deferred code when deciding whether to
255 issue the warning. CODE may be zero to mean to always use the
259 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
264 gcc_assert (fold_deferring_overflow_warnings
> 0);
265 --fold_deferring_overflow_warnings
;
266 if (fold_deferring_overflow_warnings
> 0)
268 if (fold_deferred_overflow_warning
!= NULL
270 && code
< (int) fold_deferred_overflow_code
)
271 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
275 warnmsg
= fold_deferred_overflow_warning
;
276 fold_deferred_overflow_warning
= NULL
;
278 if (!issue
|| warnmsg
== NULL
)
281 if (gimple_no_warning_p (stmt
))
284 /* Use the smallest code level when deciding to issue the
286 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
287 code
= fold_deferred_overflow_code
;
289 if (!issue_strict_overflow_warning (code
))
293 locus
= input_location
;
295 locus
= gimple_location (stmt
);
296 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
299 /* Stop deferring overflow warnings, ignoring any deferred
303 fold_undefer_and_ignore_overflow_warnings (void)
305 fold_undefer_overflow_warnings (false, NULL
, 0);
308 /* Whether we are deferring overflow warnings. */
311 fold_deferring_overflow_warnings_p (void)
313 return fold_deferring_overflow_warnings
> 0;
316 /* This is called when we fold something based on the fact that signed
317 overflow is undefined. */
320 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
322 if (fold_deferring_overflow_warnings
> 0)
324 if (fold_deferred_overflow_warning
== NULL
325 || wc
< fold_deferred_overflow_code
)
327 fold_deferred_overflow_warning
= gmsgid
;
328 fold_deferred_overflow_code
= wc
;
331 else if (issue_strict_overflow_warning (wc
))
332 warning (OPT_Wstrict_overflow
, gmsgid
);
335 /* Return true if the built-in mathematical function specified by CODE
336 is odd, i.e. -f(x) == f(-x). */
339 negate_mathfn_p (enum built_in_function code
)
343 CASE_FLT_FN (BUILT_IN_ASIN
):
344 CASE_FLT_FN (BUILT_IN_ASINH
):
345 CASE_FLT_FN (BUILT_IN_ATAN
):
346 CASE_FLT_FN (BUILT_IN_ATANH
):
347 CASE_FLT_FN (BUILT_IN_CASIN
):
348 CASE_FLT_FN (BUILT_IN_CASINH
):
349 CASE_FLT_FN (BUILT_IN_CATAN
):
350 CASE_FLT_FN (BUILT_IN_CATANH
):
351 CASE_FLT_FN (BUILT_IN_CBRT
):
352 CASE_FLT_FN (BUILT_IN_CPROJ
):
353 CASE_FLT_FN (BUILT_IN_CSIN
):
354 CASE_FLT_FN (BUILT_IN_CSINH
):
355 CASE_FLT_FN (BUILT_IN_CTAN
):
356 CASE_FLT_FN (BUILT_IN_CTANH
):
357 CASE_FLT_FN (BUILT_IN_ERF
):
358 CASE_FLT_FN (BUILT_IN_LLROUND
):
359 CASE_FLT_FN (BUILT_IN_LROUND
):
360 CASE_FLT_FN (BUILT_IN_ROUND
):
361 CASE_FLT_FN (BUILT_IN_SIN
):
362 CASE_FLT_FN (BUILT_IN_SINH
):
363 CASE_FLT_FN (BUILT_IN_TAN
):
364 CASE_FLT_FN (BUILT_IN_TANH
):
365 CASE_FLT_FN (BUILT_IN_TRUNC
):
368 CASE_FLT_FN (BUILT_IN_LLRINT
):
369 CASE_FLT_FN (BUILT_IN_LRINT
):
370 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
371 CASE_FLT_FN (BUILT_IN_RINT
):
372 return !flag_rounding_math
;
380 /* Check whether we may negate an integer constant T without causing
384 may_negate_without_overflow_p (const_tree t
)
388 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
390 type
= TREE_TYPE (t
);
391 if (TYPE_UNSIGNED (type
))
394 return !wi::only_sign_bit_p (t
);
397 /* Determine whether an expression T can be cheaply negated using
398 the function negate_expr without introducing undefined overflow. */
401 negate_expr_p (tree t
)
408 type
= TREE_TYPE (t
);
411 switch (TREE_CODE (t
))
414 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
417 /* Check that -CST will not overflow type. */
418 return may_negate_without_overflow_p (t
);
420 return (INTEGRAL_TYPE_P (type
)
421 && TYPE_OVERFLOW_WRAPS (type
));
427 return !TYPE_OVERFLOW_SANITIZED (type
);
430 /* We want to canonicalize to positive real constants. Pretend
431 that only negative ones can be easily negated. */
432 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
435 return negate_expr_p (TREE_REALPART (t
))
436 && negate_expr_p (TREE_IMAGPART (t
));
440 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
443 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
445 for (i
= 0; i
< count
; i
++)
446 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
453 return negate_expr_p (TREE_OPERAND (t
, 0))
454 && negate_expr_p (TREE_OPERAND (t
, 1));
457 return negate_expr_p (TREE_OPERAND (t
, 0));
460 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
461 || HONOR_SIGNED_ZEROS (element_mode (type
)))
463 /* -(A + B) -> (-B) - A. */
464 if (negate_expr_p (TREE_OPERAND (t
, 1))
465 && reorder_operands_p (TREE_OPERAND (t
, 0),
466 TREE_OPERAND (t
, 1)))
468 /* -(A + B) -> (-A) - B. */
469 return negate_expr_p (TREE_OPERAND (t
, 0));
472 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
473 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
474 && !HONOR_SIGNED_ZEROS (element_mode (type
))
475 && reorder_operands_p (TREE_OPERAND (t
, 0),
476 TREE_OPERAND (t
, 1));
479 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
485 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
486 return negate_expr_p (TREE_OPERAND (t
, 1))
487 || negate_expr_p (TREE_OPERAND (t
, 0));
493 /* In general we can't negate A / B, because if A is INT_MIN and
494 B is 1, we may turn this into INT_MIN / -1 which is undefined
495 and actually traps on some architectures. But if overflow is
496 undefined, we can negate, because - (INT_MIN / 1) is an
498 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
500 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
502 /* If overflow is undefined then we have to be careful because
503 we ask whether it's ok to associate the negate with the
504 division which is not ok for example for
505 -((a - b) / c) where (-(a - b)) / c may invoke undefined
506 overflow because of negating INT_MIN. So do not use
507 negate_expr_p here but open-code the two important cases. */
508 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
509 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
510 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
513 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
515 return negate_expr_p (TREE_OPERAND (t
, 1));
518 /* Negate -((double)float) as (double)(-float). */
519 if (TREE_CODE (type
) == REAL_TYPE
)
521 tree tem
= strip_float_extensions (t
);
523 return negate_expr_p (tem
);
528 /* Negate -f(x) as f(-x). */
529 if (negate_mathfn_p (builtin_mathfn_code (t
)))
530 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
534 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
535 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
537 tree op1
= TREE_OPERAND (t
, 1);
538 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
549 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
550 simplification is possible.
551 If negate_expr_p would return true for T, NULL_TREE will never be
555 fold_negate_expr (location_t loc
, tree t
)
557 tree type
= TREE_TYPE (t
);
560 switch (TREE_CODE (t
))
562 /* Convert - (~A) to A + 1. */
564 if (INTEGRAL_TYPE_P (type
))
565 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
566 build_one_cst (type
));
570 tem
= fold_negate_const (t
, type
);
571 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
572 || (ANY_INTEGRAL_TYPE_P (type
)
573 && !TYPE_OVERFLOW_TRAPS (type
)
574 && TYPE_OVERFLOW_WRAPS (type
))
575 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
580 tem
= fold_negate_const (t
, type
);
584 tem
= fold_negate_const (t
, type
);
589 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
590 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
592 return build_complex (type
, rpart
, ipart
);
598 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
599 tree
*elts
= XALLOCAVEC (tree
, count
);
601 for (i
= 0; i
< count
; i
++)
603 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
604 if (elts
[i
] == NULL_TREE
)
608 return build_vector (type
, elts
);
612 if (negate_expr_p (t
))
613 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
614 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
615 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
619 if (negate_expr_p (t
))
620 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
621 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
625 if (!TYPE_OVERFLOW_SANITIZED (type
))
626 return TREE_OPERAND (t
, 0);
630 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
631 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
633 /* -(A + B) -> (-B) - A. */
634 if (negate_expr_p (TREE_OPERAND (t
, 1))
635 && reorder_operands_p (TREE_OPERAND (t
, 0),
636 TREE_OPERAND (t
, 1)))
638 tem
= negate_expr (TREE_OPERAND (t
, 1));
639 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
640 tem
, TREE_OPERAND (t
, 0));
643 /* -(A + B) -> (-A) - B. */
644 if (negate_expr_p (TREE_OPERAND (t
, 0)))
646 tem
= negate_expr (TREE_OPERAND (t
, 0));
647 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
648 tem
, TREE_OPERAND (t
, 1));
654 /* - (A - B) -> B - A */
655 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
656 && !HONOR_SIGNED_ZEROS (element_mode (type
))
657 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
658 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
659 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
663 if (TYPE_UNSIGNED (type
))
669 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
671 tem
= TREE_OPERAND (t
, 1);
672 if (negate_expr_p (tem
))
673 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
674 TREE_OPERAND (t
, 0), negate_expr (tem
));
675 tem
= TREE_OPERAND (t
, 0);
676 if (negate_expr_p (tem
))
677 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
678 negate_expr (tem
), TREE_OPERAND (t
, 1));
685 /* In general we can't negate A / B, because if A is INT_MIN and
686 B is 1, we may turn this into INT_MIN / -1 which is undefined
687 and actually traps on some architectures. But if overflow is
688 undefined, we can negate, because - (INT_MIN / 1) is an
690 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
692 const char * const warnmsg
= G_("assuming signed overflow does not "
693 "occur when negating a division");
694 tem
= TREE_OPERAND (t
, 1);
695 if (negate_expr_p (tem
))
697 if (INTEGRAL_TYPE_P (type
)
698 && (TREE_CODE (tem
) != INTEGER_CST
699 || integer_onep (tem
)))
700 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
701 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
702 TREE_OPERAND (t
, 0), negate_expr (tem
));
704 /* If overflow is undefined then we have to be careful because
705 we ask whether it's ok to associate the negate with the
706 division which is not ok for example for
707 -((a - b) / c) where (-(a - b)) / c may invoke undefined
708 overflow because of negating INT_MIN. So do not use
709 negate_expr_p here but open-code the two important cases. */
710 tem
= TREE_OPERAND (t
, 0);
711 if ((INTEGRAL_TYPE_P (type
)
712 && (TREE_CODE (tem
) == NEGATE_EXPR
713 || (TREE_CODE (tem
) == INTEGER_CST
714 && may_negate_without_overflow_p (tem
))))
715 || !INTEGRAL_TYPE_P (type
))
716 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
717 negate_expr (tem
), TREE_OPERAND (t
, 1));
722 /* Convert -((double)float) into (double)(-float). */
723 if (TREE_CODE (type
) == REAL_TYPE
)
725 tem
= strip_float_extensions (t
);
726 if (tem
!= t
&& negate_expr_p (tem
))
727 return fold_convert_loc (loc
, type
, negate_expr (tem
));
732 /* Negate -f(x) as f(-x). */
733 if (negate_mathfn_p (builtin_mathfn_code (t
))
734 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
738 fndecl
= get_callee_fndecl (t
);
739 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
740 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
745 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
746 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
748 tree op1
= TREE_OPERAND (t
, 1);
749 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
751 tree ntype
= TYPE_UNSIGNED (type
)
752 ? signed_type_for (type
)
753 : unsigned_type_for (type
);
754 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
755 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
756 return fold_convert_loc (loc
, type
, temp
);
768 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
769 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
781 loc
= EXPR_LOCATION (t
);
782 type
= TREE_TYPE (t
);
785 tem
= fold_negate_expr (loc
, t
);
787 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
788 return fold_convert_loc (loc
, type
, tem
);
791 /* Split a tree IN into a constant, literal and variable parts that could be
792 combined with CODE to make IN. "constant" means an expression with
793 TREE_CONSTANT but that isn't an actual constant. CODE must be a
794 commutative arithmetic operation. Store the constant part into *CONP,
795 the literal in *LITP and return the variable part. If a part isn't
796 present, set it to null. If the tree does not decompose in this way,
797 return the entire tree as the variable part and the other parts as null.
799 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
800 case, we negate an operand that was subtracted. Except if it is a
801 literal for which we use *MINUS_LITP instead.
803 If NEGATE_P is true, we are negating all of IN, again except a literal
804 for which we use *MINUS_LITP instead.
806 If IN is itself a literal or constant, return it as appropriate.
808 Note that we do not guarantee that any of the three values will be the
809 same type as IN, but they will have the same signedness and mode. */
812 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
813 tree
*minus_litp
, int negate_p
)
821 /* Strip any conversions that don't change the machine mode or signedness. */
822 STRIP_SIGN_NOPS (in
);
824 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
825 || TREE_CODE (in
) == FIXED_CST
)
827 else if (TREE_CODE (in
) == code
828 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
829 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
830 /* We can associate addition and subtraction together (even
831 though the C standard doesn't say so) for integers because
832 the value is not affected. For reals, the value might be
833 affected, so we can't. */
834 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
835 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
837 tree op0
= TREE_OPERAND (in
, 0);
838 tree op1
= TREE_OPERAND (in
, 1);
839 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
840 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
842 /* First see if either of the operands is a literal, then a constant. */
843 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
844 || TREE_CODE (op0
) == FIXED_CST
)
845 *litp
= op0
, op0
= 0;
846 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
847 || TREE_CODE (op1
) == FIXED_CST
)
848 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
850 if (op0
!= 0 && TREE_CONSTANT (op0
))
851 *conp
= op0
, op0
= 0;
852 else if (op1
!= 0 && TREE_CONSTANT (op1
))
853 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
855 /* If we haven't dealt with either operand, this is not a case we can
856 decompose. Otherwise, VAR is either of the ones remaining, if any. */
857 if (op0
!= 0 && op1
!= 0)
862 var
= op1
, neg_var_p
= neg1_p
;
864 /* Now do any needed negations. */
866 *minus_litp
= *litp
, *litp
= 0;
868 *conp
= negate_expr (*conp
);
870 var
= negate_expr (var
);
872 else if (TREE_CODE (in
) == BIT_NOT_EXPR
873 && code
== PLUS_EXPR
)
875 /* -X - 1 is folded to ~X, undo that here. */
876 *minus_litp
= build_one_cst (TREE_TYPE (in
));
877 var
= negate_expr (TREE_OPERAND (in
, 0));
879 else if (TREE_CONSTANT (in
))
887 *minus_litp
= *litp
, *litp
= 0;
888 else if (*minus_litp
)
889 *litp
= *minus_litp
, *minus_litp
= 0;
890 *conp
= negate_expr (*conp
);
891 var
= negate_expr (var
);
897 /* Re-associate trees split by the above function. T1 and T2 are
898 either expressions to associate or null. Return the new
899 expression, if any. LOC is the location of the new expression. If
900 we build an operation, do it in TYPE and with CODE. */
903 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
910 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
911 try to fold this since we will have infinite recursion. But do
912 deal with any NEGATE_EXPRs. */
913 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
914 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
916 if (code
== PLUS_EXPR
)
918 if (TREE_CODE (t1
) == NEGATE_EXPR
)
919 return build2_loc (loc
, MINUS_EXPR
, type
,
920 fold_convert_loc (loc
, type
, t2
),
921 fold_convert_loc (loc
, type
,
922 TREE_OPERAND (t1
, 0)));
923 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
924 return build2_loc (loc
, MINUS_EXPR
, type
,
925 fold_convert_loc (loc
, type
, t1
),
926 fold_convert_loc (loc
, type
,
927 TREE_OPERAND (t2
, 0)));
928 else if (integer_zerop (t2
))
929 return fold_convert_loc (loc
, type
, t1
);
931 else if (code
== MINUS_EXPR
)
933 if (integer_zerop (t2
))
934 return fold_convert_loc (loc
, type
, t1
);
937 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
938 fold_convert_loc (loc
, type
, t2
));
941 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
942 fold_convert_loc (loc
, type
, t2
));
945 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
946 for use in int_const_binop, size_binop and size_diffop. */
949 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
951 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
953 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
968 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
969 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
970 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
974 /* Combine two integer constants ARG1 and ARG2 under operation CODE
975 to produce a new constant. Return NULL_TREE if we don't know how
976 to evaluate CODE at compile-time. */
979 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
984 tree type
= TREE_TYPE (arg1
);
985 signop sign
= TYPE_SIGN (type
);
986 bool overflow
= false;
988 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
989 TYPE_SIGN (TREE_TYPE (parg2
)));
994 res
= wi::bit_or (arg1
, arg2
);
998 res
= wi::bit_xor (arg1
, arg2
);
1002 res
= wi::bit_and (arg1
, arg2
);
1007 if (wi::neg_p (arg2
))
1010 if (code
== RSHIFT_EXPR
)
1016 if (code
== RSHIFT_EXPR
)
1017 /* It's unclear from the C standard whether shifts can overflow.
1018 The following code ignores overflow; perhaps a C standard
1019 interpretation ruling is needed. */
1020 res
= wi::rshift (arg1
, arg2
, sign
);
1022 res
= wi::lshift (arg1
, arg2
);
1027 if (wi::neg_p (arg2
))
1030 if (code
== RROTATE_EXPR
)
1031 code
= LROTATE_EXPR
;
1033 code
= RROTATE_EXPR
;
1036 if (code
== RROTATE_EXPR
)
1037 res
= wi::rrotate (arg1
, arg2
);
1039 res
= wi::lrotate (arg1
, arg2
);
1043 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1047 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1051 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1054 case MULT_HIGHPART_EXPR
:
1055 res
= wi::mul_high (arg1
, arg2
, sign
);
1058 case TRUNC_DIV_EXPR
:
1059 case EXACT_DIV_EXPR
:
1062 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1065 case FLOOR_DIV_EXPR
:
1068 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1074 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1077 case ROUND_DIV_EXPR
:
1080 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1083 case TRUNC_MOD_EXPR
:
1086 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1089 case FLOOR_MOD_EXPR
:
1092 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1098 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1101 case ROUND_MOD_EXPR
:
1104 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1108 res
= wi::min (arg1
, arg2
, sign
);
1112 res
= wi::max (arg1
, arg2
, sign
);
1119 t
= force_fit_type (type
, res
, overflowable
,
1120 (((sign
== SIGNED
|| overflowable
== -1)
1122 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1128 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1130 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1133 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1134 constant. We assume ARG1 and ARG2 have the same data type, or at least
1135 are the same kind of constant and the same machine mode. Return zero if
1136 combining the constants is not allowed in the current operating mode. */
1139 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1141 /* Sanity check for the recursive cases. */
1148 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1150 if (code
== POINTER_PLUS_EXPR
)
1151 return int_const_binop (PLUS_EXPR
,
1152 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1154 return int_const_binop (code
, arg1
, arg2
);
1157 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1162 REAL_VALUE_TYPE value
;
1163 REAL_VALUE_TYPE result
;
1167 /* The following codes are handled by real_arithmetic. */
1182 d1
= TREE_REAL_CST (arg1
);
1183 d2
= TREE_REAL_CST (arg2
);
1185 type
= TREE_TYPE (arg1
);
1186 mode
= TYPE_MODE (type
);
1188 /* Don't perform operation if we honor signaling NaNs and
1189 either operand is a NaN. */
1190 if (HONOR_SNANS (mode
)
1191 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1194 /* Don't perform operation if it would raise a division
1195 by zero exception. */
1196 if (code
== RDIV_EXPR
1197 && REAL_VALUES_EQUAL (d2
, dconst0
)
1198 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1201 /* If either operand is a NaN, just return it. Otherwise, set up
1202 for floating-point trap; we return an overflow. */
1203 if (REAL_VALUE_ISNAN (d1
))
1205 else if (REAL_VALUE_ISNAN (d2
))
1208 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1209 real_convert (&result
, mode
, &value
);
1211 /* Don't constant fold this floating point operation if
1212 the result has overflowed and flag_trapping_math. */
1213 if (flag_trapping_math
1214 && MODE_HAS_INFINITIES (mode
)
1215 && REAL_VALUE_ISINF (result
)
1216 && !REAL_VALUE_ISINF (d1
)
1217 && !REAL_VALUE_ISINF (d2
))
1220 /* Don't constant fold this floating point operation if the
1221 result may dependent upon the run-time rounding mode and
1222 flag_rounding_math is set, or if GCC's software emulation
1223 is unable to accurately represent the result. */
1224 if ((flag_rounding_math
1225 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1226 && (inexact
|| !real_identical (&result
, &value
)))
1229 t
= build_real (type
, result
);
1231 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1235 if (TREE_CODE (arg1
) == FIXED_CST
)
1237 FIXED_VALUE_TYPE f1
;
1238 FIXED_VALUE_TYPE f2
;
1239 FIXED_VALUE_TYPE result
;
1244 /* The following codes are handled by fixed_arithmetic. */
1250 case TRUNC_DIV_EXPR
:
1251 if (TREE_CODE (arg2
) != FIXED_CST
)
1253 f2
= TREE_FIXED_CST (arg2
);
1259 if (TREE_CODE (arg2
) != INTEGER_CST
)
1262 f2
.data
.high
= w2
.elt (1);
1263 f2
.data
.low
= w2
.elt (0);
1272 f1
= TREE_FIXED_CST (arg1
);
1273 type
= TREE_TYPE (arg1
);
1274 sat_p
= TYPE_SATURATING (type
);
1275 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1276 t
= build_fixed (type
, result
);
1277 /* Propagate overflow flags. */
1278 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1279 TREE_OVERFLOW (t
) = 1;
1283 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1285 tree type
= TREE_TYPE (arg1
);
1286 tree r1
= TREE_REALPART (arg1
);
1287 tree i1
= TREE_IMAGPART (arg1
);
1288 tree r2
= TREE_REALPART (arg2
);
1289 tree i2
= TREE_IMAGPART (arg2
);
1296 real
= const_binop (code
, r1
, r2
);
1297 imag
= const_binop (code
, i1
, i2
);
1301 if (COMPLEX_FLOAT_TYPE_P (type
))
1302 return do_mpc_arg2 (arg1
, arg2
, type
,
1303 /* do_nonfinite= */ folding_initializer
,
1306 real
= const_binop (MINUS_EXPR
,
1307 const_binop (MULT_EXPR
, r1
, r2
),
1308 const_binop (MULT_EXPR
, i1
, i2
));
1309 imag
= const_binop (PLUS_EXPR
,
1310 const_binop (MULT_EXPR
, r1
, i2
),
1311 const_binop (MULT_EXPR
, i1
, r2
));
1315 if (COMPLEX_FLOAT_TYPE_P (type
))
1316 return do_mpc_arg2 (arg1
, arg2
, type
,
1317 /* do_nonfinite= */ folding_initializer
,
1320 case TRUNC_DIV_EXPR
:
1322 case FLOOR_DIV_EXPR
:
1323 case ROUND_DIV_EXPR
:
1324 if (flag_complex_method
== 0)
1326 /* Keep this algorithm in sync with
1327 tree-complex.c:expand_complex_div_straight().
1329 Expand complex division to scalars, straightforward algorithm.
1330 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1334 = const_binop (PLUS_EXPR
,
1335 const_binop (MULT_EXPR
, r2
, r2
),
1336 const_binop (MULT_EXPR
, i2
, i2
));
1338 = const_binop (PLUS_EXPR
,
1339 const_binop (MULT_EXPR
, r1
, r2
),
1340 const_binop (MULT_EXPR
, i1
, i2
));
1342 = const_binop (MINUS_EXPR
,
1343 const_binop (MULT_EXPR
, i1
, r2
),
1344 const_binop (MULT_EXPR
, r1
, i2
));
1346 real
= const_binop (code
, t1
, magsquared
);
1347 imag
= const_binop (code
, t2
, magsquared
);
1351 /* Keep this algorithm in sync with
1352 tree-complex.c:expand_complex_div_wide().
1354 Expand complex division to scalars, modified algorithm to minimize
1355 overflow with wide input ranges. */
1356 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1357 fold_abs_const (r2
, TREE_TYPE (type
)),
1358 fold_abs_const (i2
, TREE_TYPE (type
)));
1360 if (integer_nonzerop (compare
))
1362 /* In the TRUE branch, we compute
1364 div = (br * ratio) + bi;
1365 tr = (ar * ratio) + ai;
1366 ti = (ai * ratio) - ar;
1369 tree ratio
= const_binop (code
, r2
, i2
);
1370 tree div
= const_binop (PLUS_EXPR
, i2
,
1371 const_binop (MULT_EXPR
, r2
, ratio
));
1372 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1373 real
= const_binop (PLUS_EXPR
, real
, i1
);
1374 real
= const_binop (code
, real
, div
);
1376 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1377 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1378 imag
= const_binop (code
, imag
, div
);
1382 /* In the FALSE branch, we compute
1384 divisor = (d * ratio) + c;
1385 tr = (b * ratio) + a;
1386 ti = b - (a * ratio);
1389 tree ratio
= const_binop (code
, i2
, r2
);
1390 tree div
= const_binop (PLUS_EXPR
, r2
,
1391 const_binop (MULT_EXPR
, i2
, ratio
));
1393 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1394 real
= const_binop (PLUS_EXPR
, real
, r1
);
1395 real
= const_binop (code
, real
, div
);
1397 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1398 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1399 imag
= const_binop (code
, imag
, div
);
1409 return build_complex (type
, real
, imag
);
1412 if (TREE_CODE (arg1
) == VECTOR_CST
1413 && TREE_CODE (arg2
) == VECTOR_CST
)
1415 tree type
= TREE_TYPE (arg1
);
1416 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1417 tree
*elts
= XALLOCAVEC (tree
, count
);
1419 for (i
= 0; i
< count
; i
++)
1421 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1422 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1424 elts
[i
] = const_binop (code
, elem1
, elem2
);
1426 /* It is possible that const_binop cannot handle the given
1427 code and return NULL_TREE */
1428 if (elts
[i
] == NULL_TREE
)
1432 return build_vector (type
, elts
);
1435 /* Shifts allow a scalar offset for a vector. */
1436 if (TREE_CODE (arg1
) == VECTOR_CST
1437 && TREE_CODE (arg2
) == INTEGER_CST
)
1439 tree type
= TREE_TYPE (arg1
);
1440 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1441 tree
*elts
= XALLOCAVEC (tree
, count
);
1443 for (i
= 0; i
< count
; i
++)
1445 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1447 elts
[i
] = const_binop (code
, elem1
, arg2
);
1449 /* It is possible that const_binop cannot handle the given
1450 code and return NULL_TREE. */
1451 if (elts
[i
] == NULL_TREE
)
1455 return build_vector (type
, elts
);
1460 /* Overload that adds a TYPE parameter to be able to dispatch
1461 to fold_relational_const. */
1464 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1466 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1467 return fold_relational_const (code
, type
, arg1
, arg2
);
1469 /* ??? Until we make the const_binop worker take the type of the
1470 result as argument put those cases that need it here. */
1474 if ((TREE_CODE (arg1
) == REAL_CST
1475 && TREE_CODE (arg2
) == REAL_CST
)
1476 || (TREE_CODE (arg1
) == INTEGER_CST
1477 && TREE_CODE (arg2
) == INTEGER_CST
))
1478 return build_complex (type
, arg1
, arg2
);
1481 case VEC_PACK_TRUNC_EXPR
:
1482 case VEC_PACK_FIX_TRUNC_EXPR
:
1484 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1487 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1488 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1489 if (TREE_CODE (arg1
) != VECTOR_CST
1490 || TREE_CODE (arg2
) != VECTOR_CST
)
1493 elts
= XALLOCAVEC (tree
, nelts
);
1494 if (!vec_cst_ctor_to_array (arg1
, elts
)
1495 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1498 for (i
= 0; i
< nelts
; i
++)
1500 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1501 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1502 TREE_TYPE (type
), elts
[i
]);
1503 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1507 return build_vector (type
, elts
);
1510 case VEC_WIDEN_MULT_LO_EXPR
:
1511 case VEC_WIDEN_MULT_HI_EXPR
:
1512 case VEC_WIDEN_MULT_EVEN_EXPR
:
1513 case VEC_WIDEN_MULT_ODD_EXPR
:
1515 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1516 unsigned int out
, ofs
, scale
;
1519 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1520 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1521 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1524 elts
= XALLOCAVEC (tree
, nelts
* 4);
1525 if (!vec_cst_ctor_to_array (arg1
, elts
)
1526 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1529 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1530 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1531 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1532 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1533 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1535 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1538 for (out
= 0; out
< nelts
; out
++)
1540 unsigned int in1
= (out
<< scale
) + ofs
;
1541 unsigned int in2
= in1
+ nelts
* 2;
1544 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1545 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1547 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1549 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1550 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1554 return build_vector (type
, elts
);
1560 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1563 /* Make sure type and arg0 have the same saturating flag. */
1564 gcc_checking_assert (TYPE_SATURATING (type
)
1565 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1567 return const_binop (code
, arg1
, arg2
);
1570 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1571 Return zero if computing the constants is not possible. */
1574 const_unop (enum tree_code code
, tree type
, tree arg0
)
1580 case FIX_TRUNC_EXPR
:
1581 case FIXED_CONVERT_EXPR
:
1582 return fold_convert_const (code
, type
, arg0
);
1584 case ADDR_SPACE_CONVERT_EXPR
:
1585 if (integer_zerop (arg0
))
1586 return fold_convert_const (code
, type
, arg0
);
1589 case VIEW_CONVERT_EXPR
:
1590 return fold_view_convert_expr (type
, arg0
);
1594 /* Can't call fold_negate_const directly here as that doesn't
1595 handle all cases and we might not be able to negate some
1597 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1598 if (tem
&& CONSTANT_CLASS_P (tem
))
1604 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1605 return fold_abs_const (arg0
, type
);
1609 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1611 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1613 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1618 if (TREE_CODE (arg0
) == INTEGER_CST
)
1619 return fold_not_const (arg0
, type
);
1620 /* Perform BIT_NOT_EXPR on each element individually. */
1621 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1625 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1627 elements
= XALLOCAVEC (tree
, count
);
1628 for (i
= 0; i
< count
; i
++)
1630 elem
= VECTOR_CST_ELT (arg0
, i
);
1631 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1632 if (elem
== NULL_TREE
)
1637 return build_vector (type
, elements
);
1641 case TRUTH_NOT_EXPR
:
1642 if (TREE_CODE (arg0
) == INTEGER_CST
)
1643 return constant_boolean_node (integer_zerop (arg0
), type
);
1647 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1648 return fold_convert (type
, TREE_REALPART (arg0
));
1652 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1653 return fold_convert (type
, TREE_IMAGPART (arg0
));
1656 case VEC_UNPACK_LO_EXPR
:
1657 case VEC_UNPACK_HI_EXPR
:
1658 case VEC_UNPACK_FLOAT_LO_EXPR
:
1659 case VEC_UNPACK_FLOAT_HI_EXPR
:
1661 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1663 enum tree_code subcode
;
1665 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1666 if (TREE_CODE (arg0
) != VECTOR_CST
)
1669 elts
= XALLOCAVEC (tree
, nelts
* 2);
1670 if (!vec_cst_ctor_to_array (arg0
, elts
))
1673 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1674 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1677 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1680 subcode
= FLOAT_EXPR
;
1682 for (i
= 0; i
< nelts
; i
++)
1684 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1685 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1689 return build_vector (type
, elts
);
1692 case REDUC_MIN_EXPR
:
1693 case REDUC_MAX_EXPR
:
1694 case REDUC_PLUS_EXPR
:
1696 unsigned int nelts
, i
;
1698 enum tree_code subcode
;
1700 if (TREE_CODE (arg0
) != VECTOR_CST
)
1702 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1704 elts
= XALLOCAVEC (tree
, nelts
);
1705 if (!vec_cst_ctor_to_array (arg0
, elts
))
1710 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1711 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1712 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1713 default: gcc_unreachable ();
1716 for (i
= 1; i
< nelts
; i
++)
1718 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1719 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1733 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1734 indicates which particular sizetype to create. */
1737 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1739 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1742 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1743 is a tree code. The type of the result is taken from the operands.
1744 Both must be equivalent integer types, ala int_binop_types_match_p.
1745 If the operands are constant, so is the result. */
1748 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1750 tree type
= TREE_TYPE (arg0
);
1752 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1753 return error_mark_node
;
1755 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1758 /* Handle the special case of two integer constants faster. */
1759 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1761 /* And some specific cases even faster than that. */
1762 if (code
== PLUS_EXPR
)
1764 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1766 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1769 else if (code
== MINUS_EXPR
)
1771 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1774 else if (code
== MULT_EXPR
)
1776 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1780 /* Handle general case of two integer constants. For sizetype
1781 constant calculations we always want to know about overflow,
1782 even in the unsigned case. */
1783 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1786 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1789 /* Given two values, either both of sizetype or both of bitsizetype,
1790 compute the difference between the two values. Return the value
1791 in signed type corresponding to the type of the operands. */
1794 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1796 tree type
= TREE_TYPE (arg0
);
1799 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1802 /* If the type is already signed, just do the simple thing. */
1803 if (!TYPE_UNSIGNED (type
))
1804 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1806 if (type
== sizetype
)
1808 else if (type
== bitsizetype
)
1809 ctype
= sbitsizetype
;
1811 ctype
= signed_type_for (type
);
1813 /* If either operand is not a constant, do the conversions to the signed
1814 type and subtract. The hardware will do the right thing with any
1815 overflow in the subtraction. */
1816 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1817 return size_binop_loc (loc
, MINUS_EXPR
,
1818 fold_convert_loc (loc
, ctype
, arg0
),
1819 fold_convert_loc (loc
, ctype
, arg1
));
1821 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1822 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1823 overflow) and negate (which can't either). Special-case a result
1824 of zero while we're here. */
1825 if (tree_int_cst_equal (arg0
, arg1
))
1826 return build_int_cst (ctype
, 0);
1827 else if (tree_int_cst_lt (arg1
, arg0
))
1828 return fold_convert_loc (loc
, ctype
,
1829 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1831 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1832 fold_convert_loc (loc
, ctype
,
1833 size_binop_loc (loc
,
1838 /* A subroutine of fold_convert_const handling conversions of an
1839 INTEGER_CST to another integer type. */
1842 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1844 /* Given an integer constant, make new constant with new type,
1845 appropriately sign-extended or truncated. Use widest_int
1846 so that any extension is done according ARG1's type. */
1847 return force_fit_type (type
, wi::to_widest (arg1
),
1848 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1849 TREE_OVERFLOW (arg1
));
1852 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1853 to an integer type. */
1856 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1858 bool overflow
= false;
1861 /* The following code implements the floating point to integer
1862 conversion rules required by the Java Language Specification,
1863 that IEEE NaNs are mapped to zero and values that overflow
1864 the target precision saturate, i.e. values greater than
1865 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1866 are mapped to INT_MIN. These semantics are allowed by the
1867 C and C++ standards that simply state that the behavior of
1868 FP-to-integer conversion is unspecified upon overflow. */
1872 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1876 case FIX_TRUNC_EXPR
:
1877 real_trunc (&r
, VOIDmode
, &x
);
1884 /* If R is NaN, return zero and show we have an overflow. */
1885 if (REAL_VALUE_ISNAN (r
))
1888 val
= wi::zero (TYPE_PRECISION (type
));
1891 /* See if R is less than the lower bound or greater than the
1896 tree lt
= TYPE_MIN_VALUE (type
);
1897 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1898 if (REAL_VALUES_LESS (r
, l
))
1907 tree ut
= TYPE_MAX_VALUE (type
);
1910 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1911 if (REAL_VALUES_LESS (u
, r
))
1920 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1922 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1926 /* A subroutine of fold_convert_const handling conversions of a
1927 FIXED_CST to an integer type. */
1930 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1933 double_int temp
, temp_trunc
;
1936 /* Right shift FIXED_CST to temp by fbit. */
1937 temp
= TREE_FIXED_CST (arg1
).data
;
1938 mode
= TREE_FIXED_CST (arg1
).mode
;
1939 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1941 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1942 HOST_BITS_PER_DOUBLE_INT
,
1943 SIGNED_FIXED_POINT_MODE_P (mode
));
1945 /* Left shift temp to temp_trunc by fbit. */
1946 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1947 HOST_BITS_PER_DOUBLE_INT
,
1948 SIGNED_FIXED_POINT_MODE_P (mode
));
1952 temp
= double_int_zero
;
1953 temp_trunc
= double_int_zero
;
1956 /* If FIXED_CST is negative, we need to round the value toward 0.
1957 By checking if the fractional bits are not zero to add 1 to temp. */
1958 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1959 && temp_trunc
.is_negative ()
1960 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1961 temp
+= double_int_one
;
1963 /* Given a fixed-point constant, make new constant with new type,
1964 appropriately sign-extended or truncated. */
1965 t
= force_fit_type (type
, temp
, -1,
1966 (temp
.is_negative ()
1967 && (TYPE_UNSIGNED (type
)
1968 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1969 | TREE_OVERFLOW (arg1
));
1974 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1975 to another floating point type. */
1978 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1980 REAL_VALUE_TYPE value
;
1983 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1984 t
= build_real (type
, value
);
1986 /* If converting an infinity or NAN to a representation that doesn't
1987 have one, set the overflow bit so that we can produce some kind of
1988 error message at the appropriate point if necessary. It's not the
1989 most user-friendly message, but it's better than nothing. */
1990 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1991 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1992 TREE_OVERFLOW (t
) = 1;
1993 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1994 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1995 TREE_OVERFLOW (t
) = 1;
1996 /* Regular overflow, conversion produced an infinity in a mode that
1997 can't represent them. */
1998 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1999 && REAL_VALUE_ISINF (value
)
2000 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2001 TREE_OVERFLOW (t
) = 1;
2003 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2007 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2008 to a floating point type. */
2011 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2013 REAL_VALUE_TYPE value
;
2016 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2017 t
= build_real (type
, value
);
2019 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2023 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2024 to another fixed-point type. */
2027 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2029 FIXED_VALUE_TYPE value
;
2033 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2034 TYPE_SATURATING (type
));
2035 t
= build_fixed (type
, value
);
2037 /* Propagate overflow flags. */
2038 if (overflow_p
| TREE_OVERFLOW (arg1
))
2039 TREE_OVERFLOW (t
) = 1;
2043 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2044 to a fixed-point type. */
2047 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2049 FIXED_VALUE_TYPE value
;
2054 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2056 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2057 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2058 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
2060 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2062 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2063 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2064 TYPE_SATURATING (type
));
2065 t
= build_fixed (type
, value
);
2067 /* Propagate overflow flags. */
2068 if (overflow_p
| TREE_OVERFLOW (arg1
))
2069 TREE_OVERFLOW (t
) = 1;
2073 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2074 to a fixed-point type. */
2077 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2079 FIXED_VALUE_TYPE value
;
2083 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2084 &TREE_REAL_CST (arg1
),
2085 TYPE_SATURATING (type
));
2086 t
= build_fixed (type
, value
);
2088 /* Propagate overflow flags. */
2089 if (overflow_p
| TREE_OVERFLOW (arg1
))
2090 TREE_OVERFLOW (t
) = 1;
2094 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2095 type TYPE. If no simplification can be done return NULL_TREE. */
2098 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2100 if (TREE_TYPE (arg1
) == type
)
2103 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2104 || TREE_CODE (type
) == OFFSET_TYPE
)
2106 if (TREE_CODE (arg1
) == INTEGER_CST
)
2107 return fold_convert_const_int_from_int (type
, arg1
);
2108 else if (TREE_CODE (arg1
) == REAL_CST
)
2109 return fold_convert_const_int_from_real (code
, type
, arg1
);
2110 else if (TREE_CODE (arg1
) == FIXED_CST
)
2111 return fold_convert_const_int_from_fixed (type
, arg1
);
2113 else if (TREE_CODE (type
) == REAL_TYPE
)
2115 if (TREE_CODE (arg1
) == INTEGER_CST
)
2116 return build_real_from_int_cst (type
, arg1
);
2117 else if (TREE_CODE (arg1
) == REAL_CST
)
2118 return fold_convert_const_real_from_real (type
, arg1
);
2119 else if (TREE_CODE (arg1
) == FIXED_CST
)
2120 return fold_convert_const_real_from_fixed (type
, arg1
);
2122 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2124 if (TREE_CODE (arg1
) == FIXED_CST
)
2125 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2126 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2127 return fold_convert_const_fixed_from_int (type
, arg1
);
2128 else if (TREE_CODE (arg1
) == REAL_CST
)
2129 return fold_convert_const_fixed_from_real (type
, arg1
);
2134 /* Construct a vector of zero elements of vector type TYPE. */
2137 build_zero_vector (tree type
)
2141 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2142 return build_vector_from_val (type
, t
);
2145 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2148 fold_convertible_p (const_tree type
, const_tree arg
)
2150 tree orig
= TREE_TYPE (arg
);
2155 if (TREE_CODE (arg
) == ERROR_MARK
2156 || TREE_CODE (type
) == ERROR_MARK
2157 || TREE_CODE (orig
) == ERROR_MARK
)
2160 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2163 switch (TREE_CODE (type
))
2165 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2166 case POINTER_TYPE
: case REFERENCE_TYPE
:
2168 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2169 || TREE_CODE (orig
) == OFFSET_TYPE
)
2171 return (TREE_CODE (orig
) == VECTOR_TYPE
2172 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2175 case FIXED_POINT_TYPE
:
2179 return TREE_CODE (type
) == TREE_CODE (orig
);
2186 /* Convert expression ARG to type TYPE. Used by the middle-end for
2187 simple conversions in preference to calling the front-end's convert. */
2190 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2192 tree orig
= TREE_TYPE (arg
);
2198 if (TREE_CODE (arg
) == ERROR_MARK
2199 || TREE_CODE (type
) == ERROR_MARK
2200 || TREE_CODE (orig
) == ERROR_MARK
)
2201 return error_mark_node
;
2203 switch (TREE_CODE (type
))
2206 case REFERENCE_TYPE
:
2207 /* Handle conversions between pointers to different address spaces. */
2208 if (POINTER_TYPE_P (orig
)
2209 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2210 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2211 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2214 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2216 if (TREE_CODE (arg
) == INTEGER_CST
)
2218 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2219 if (tem
!= NULL_TREE
)
2222 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2223 || TREE_CODE (orig
) == OFFSET_TYPE
)
2224 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2225 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2226 return fold_convert_loc (loc
, type
,
2227 fold_build1_loc (loc
, REALPART_EXPR
,
2228 TREE_TYPE (orig
), arg
));
2229 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2230 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2231 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2234 if (TREE_CODE (arg
) == INTEGER_CST
)
2236 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2237 if (tem
!= NULL_TREE
)
2240 else if (TREE_CODE (arg
) == REAL_CST
)
2242 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2243 if (tem
!= NULL_TREE
)
2246 else if (TREE_CODE (arg
) == FIXED_CST
)
2248 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2249 if (tem
!= NULL_TREE
)
2253 switch (TREE_CODE (orig
))
2256 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2257 case POINTER_TYPE
: case REFERENCE_TYPE
:
2258 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2261 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2263 case FIXED_POINT_TYPE
:
2264 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2267 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2268 return fold_convert_loc (loc
, type
, tem
);
2274 case FIXED_POINT_TYPE
:
2275 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2276 || TREE_CODE (arg
) == REAL_CST
)
2278 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2279 if (tem
!= NULL_TREE
)
2280 goto fold_convert_exit
;
2283 switch (TREE_CODE (orig
))
2285 case FIXED_POINT_TYPE
:
2290 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2293 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2294 return fold_convert_loc (loc
, type
, tem
);
2301 switch (TREE_CODE (orig
))
2304 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2305 case POINTER_TYPE
: case REFERENCE_TYPE
:
2307 case FIXED_POINT_TYPE
:
2308 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2309 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2310 fold_convert_loc (loc
, TREE_TYPE (type
),
2311 integer_zero_node
));
2316 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2318 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2319 TREE_OPERAND (arg
, 0));
2320 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2321 TREE_OPERAND (arg
, 1));
2322 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2325 arg
= save_expr (arg
);
2326 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2327 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2328 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2329 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2330 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2338 if (integer_zerop (arg
))
2339 return build_zero_vector (type
);
2340 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2341 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2342 || TREE_CODE (orig
) == VECTOR_TYPE
);
2343 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2346 tem
= fold_ignored_result (arg
);
2347 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2350 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2351 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2355 protected_set_expr_location_unshare (tem
, loc
);
2359 /* Return false if expr can be assumed not to be an lvalue, true
2363 maybe_lvalue_p (const_tree x
)
2365 /* We only need to wrap lvalue tree codes. */
2366 switch (TREE_CODE (x
))
2379 case ARRAY_RANGE_REF
:
2385 case PREINCREMENT_EXPR
:
2386 case PREDECREMENT_EXPR
:
2388 case TRY_CATCH_EXPR
:
2389 case WITH_CLEANUP_EXPR
:
2398 /* Assume the worst for front-end tree codes. */
2399 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2407 /* Return an expr equal to X but certainly not valid as an lvalue. */
2410 non_lvalue_loc (location_t loc
, tree x
)
2412 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2417 if (! maybe_lvalue_p (x
))
2419 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2422 /* When pedantic, return an expr equal to X but certainly not valid as a
2423 pedantic lvalue. Otherwise, return X. */
2426 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2428 return protected_set_expr_location_unshare (x
, loc
);
2431 /* Given a tree comparison code, return the code that is the logical inverse.
2432 It is generally not safe to do this for floating-point comparisons, except
2433 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2434 ERROR_MARK in this case. */
2437 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2439 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2440 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2450 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2452 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2454 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2456 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2470 return UNORDERED_EXPR
;
2471 case UNORDERED_EXPR
:
2472 return ORDERED_EXPR
;
2478 /* Similar, but return the comparison that results if the operands are
2479 swapped. This is safe for floating-point. */
2482 swap_tree_comparison (enum tree_code code
)
2489 case UNORDERED_EXPR
:
2515 /* Convert a comparison tree code from an enum tree_code representation
2516 into a compcode bit-based encoding. This function is the inverse of
2517 compcode_to_comparison. */
2519 static enum comparison_code
2520 comparison_to_compcode (enum tree_code code
)
2537 return COMPCODE_ORD
;
2538 case UNORDERED_EXPR
:
2539 return COMPCODE_UNORD
;
2541 return COMPCODE_UNLT
;
2543 return COMPCODE_UNEQ
;
2545 return COMPCODE_UNLE
;
2547 return COMPCODE_UNGT
;
2549 return COMPCODE_LTGT
;
2551 return COMPCODE_UNGE
;
2557 /* Convert a compcode bit-based encoding of a comparison operator back
2558 to GCC's enum tree_code representation. This function is the
2559 inverse of comparison_to_compcode. */
2561 static enum tree_code
2562 compcode_to_comparison (enum comparison_code code
)
2579 return ORDERED_EXPR
;
2580 case COMPCODE_UNORD
:
2581 return UNORDERED_EXPR
;
2599 /* Return a tree for the comparison which is the combination of
2600 doing the AND or OR (depending on CODE) of the two operations LCODE
2601 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2602 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2603 if this makes the transformation invalid. */
2606 combine_comparisons (location_t loc
,
2607 enum tree_code code
, enum tree_code lcode
,
2608 enum tree_code rcode
, tree truth_type
,
2609 tree ll_arg
, tree lr_arg
)
2611 bool honor_nans
= HONOR_NANS (ll_arg
);
2612 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2613 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2618 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2619 compcode
= lcompcode
& rcompcode
;
2622 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2623 compcode
= lcompcode
| rcompcode
;
2632 /* Eliminate unordered comparisons, as well as LTGT and ORD
2633 which are not used unless the mode has NaNs. */
2634 compcode
&= ~COMPCODE_UNORD
;
2635 if (compcode
== COMPCODE_LTGT
)
2636 compcode
= COMPCODE_NE
;
2637 else if (compcode
== COMPCODE_ORD
)
2638 compcode
= COMPCODE_TRUE
;
2640 else if (flag_trapping_math
)
2642 /* Check that the original operation and the optimized ones will trap
2643 under the same condition. */
2644 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2645 && (lcompcode
!= COMPCODE_EQ
)
2646 && (lcompcode
!= COMPCODE_ORD
);
2647 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2648 && (rcompcode
!= COMPCODE_EQ
)
2649 && (rcompcode
!= COMPCODE_ORD
);
2650 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2651 && (compcode
!= COMPCODE_EQ
)
2652 && (compcode
!= COMPCODE_ORD
);
2654 /* In a short-circuited boolean expression the LHS might be
2655 such that the RHS, if evaluated, will never trap. For
2656 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2657 if neither x nor y is NaN. (This is a mixed blessing: for
2658 example, the expression above will never trap, hence
2659 optimizing it to x < y would be invalid). */
2660 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2661 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2664 /* If the comparison was short-circuited, and only the RHS
2665 trapped, we may now generate a spurious trap. */
2667 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2670 /* If we changed the conditions that cause a trap, we lose. */
2671 if ((ltrap
|| rtrap
) != trap
)
2675 if (compcode
== COMPCODE_TRUE
)
2676 return constant_boolean_node (true, truth_type
);
2677 else if (compcode
== COMPCODE_FALSE
)
2678 return constant_boolean_node (false, truth_type
);
2681 enum tree_code tcode
;
2683 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2684 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2688 /* Return nonzero if two operands (typically of the same tree node)
2689 are necessarily equal. If either argument has side-effects this
2690 function returns zero. FLAGS modifies behavior as follows:
2692 If OEP_ONLY_CONST is set, only return nonzero for constants.
2693 This function tests whether the operands are indistinguishable;
2694 it does not test whether they are equal using C's == operation.
2695 The distinction is important for IEEE floating point, because
2696 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2697 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2699 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2700 even though it may hold multiple values during a function.
2701 This is because a GCC tree node guarantees that nothing else is
2702 executed between the evaluation of its "operands" (which may often
2703 be evaluated in arbitrary order). Hence if the operands themselves
2704 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2705 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2706 unset means assuming isochronic (or instantaneous) tree equivalence.
2707 Unless comparing arbitrary expression trees, such as from different
2708 statements, this flag can usually be left unset.
2710 If OEP_PURE_SAME is set, then pure functions with identical arguments
2711 are considered the same. It is used when the caller has other ways
2712 to ensure that global memory is unchanged in between. */
2715 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2717 /* If either is ERROR_MARK, they aren't equal. */
2718 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2719 || TREE_TYPE (arg0
) == error_mark_node
2720 || TREE_TYPE (arg1
) == error_mark_node
)
2723 /* Similar, if either does not have a type (like a released SSA name),
2724 they aren't equal. */
2725 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2728 /* Check equality of integer constants before bailing out due to
2729 precision differences. */
2730 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2731 return tree_int_cst_equal (arg0
, arg1
);
2733 /* If both types don't have the same signedness, then we can't consider
2734 them equal. We must check this before the STRIP_NOPS calls
2735 because they may change the signedness of the arguments. As pointers
2736 strictly don't have a signedness, require either two pointers or
2737 two non-pointers as well. */
2738 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2739 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2742 /* We cannot consider pointers to different address space equal. */
2743 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2744 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2745 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2748 /* If both types don't have the same precision, then it is not safe
2750 if (element_precision (TREE_TYPE (arg0
))
2751 != element_precision (TREE_TYPE (arg1
)))
2757 /* In case both args are comparisons but with different comparison
2758 code, try to swap the comparison operands of one arg to produce
2759 a match and compare that variant. */
2760 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2761 && COMPARISON_CLASS_P (arg0
)
2762 && COMPARISON_CLASS_P (arg1
))
2764 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2766 if (TREE_CODE (arg0
) == swap_code
)
2767 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2768 TREE_OPERAND (arg1
, 1), flags
)
2769 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2770 TREE_OPERAND (arg1
, 0), flags
);
2773 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2774 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2775 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2778 /* This is needed for conversions and for COMPONENT_REF.
2779 Might as well play it safe and always test this. */
2780 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2781 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2782 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2785 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2786 We don't care about side effects in that case because the SAVE_EXPR
2787 takes care of that for us. In all other cases, two expressions are
2788 equal if they have no side effects. If we have two identical
2789 expressions with side effects that should be treated the same due
2790 to the only side effects being identical SAVE_EXPR's, that will
2791 be detected in the recursive calls below.
2792 If we are taking an invariant address of two identical objects
2793 they are necessarily equal as well. */
2794 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2795 && (TREE_CODE (arg0
) == SAVE_EXPR
2796 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2797 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2800 /* Next handle constant cases, those for which we can return 1 even
2801 if ONLY_CONST is set. */
2802 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2803 switch (TREE_CODE (arg0
))
2806 return tree_int_cst_equal (arg0
, arg1
);
2809 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2810 TREE_FIXED_CST (arg1
));
2813 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2814 TREE_REAL_CST (arg1
)))
2818 if (!HONOR_SIGNED_ZEROS (arg0
))
2820 /* If we do not distinguish between signed and unsigned zero,
2821 consider them equal. */
2822 if (real_zerop (arg0
) && real_zerop (arg1
))
2831 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2834 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2836 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2837 VECTOR_CST_ELT (arg1
, i
), flags
))
2844 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2846 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2850 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2851 && ! memcmp (TREE_STRING_POINTER (arg0
),
2852 TREE_STRING_POINTER (arg1
),
2853 TREE_STRING_LENGTH (arg0
)));
2856 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2857 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2858 ? OEP_CONSTANT_ADDRESS_OF
| OEP_ADDRESS_OF
: 0);
2863 if (flags
& OEP_ONLY_CONST
)
2866 /* Define macros to test an operand from arg0 and arg1 for equality and a
2867 variant that allows null and views null as being different from any
2868 non-null value. In the latter case, if either is null, the both
2869 must be; otherwise, do the normal comparison. */
2870 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2871 TREE_OPERAND (arg1, N), flags)
2873 #define OP_SAME_WITH_NULL(N) \
2874 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2875 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2877 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2880 /* Two conversions are equal only if signedness and modes match. */
2881 switch (TREE_CODE (arg0
))
2884 case FIX_TRUNC_EXPR
:
2885 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2886 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2896 case tcc_comparison
:
2898 if (OP_SAME (0) && OP_SAME (1))
2901 /* For commutative ops, allow the other order. */
2902 return (commutative_tree_code (TREE_CODE (arg0
))
2903 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2904 TREE_OPERAND (arg1
, 1), flags
)
2905 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2906 TREE_OPERAND (arg1
, 0), flags
));
2909 /* If either of the pointer (or reference) expressions we are
2910 dereferencing contain a side effect, these cannot be equal,
2911 but their addresses can be. */
2912 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2913 && (TREE_SIDE_EFFECTS (arg0
)
2914 || TREE_SIDE_EFFECTS (arg1
)))
2917 switch (TREE_CODE (arg0
))
2920 if (!(flags
& OEP_ADDRESS_OF
)
2921 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2922 != TYPE_ALIGN (TREE_TYPE (arg1
))))
2924 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2931 case TARGET_MEM_REF
:
2933 /* Require equal access sizes, and similar pointer types.
2934 We can have incomplete types for array references of
2935 variable-sized arrays from the Fortran frontend
2936 though. Also verify the types are compatible. */
2937 if (!((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2938 || (TYPE_SIZE (TREE_TYPE (arg0
))
2939 && TYPE_SIZE (TREE_TYPE (arg1
))
2940 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2941 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2942 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2943 && ((flags
& OEP_ADDRESS_OF
)
2944 || (alias_ptr_types_compatible_p
2945 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2946 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2947 && (MR_DEPENDENCE_CLIQUE (arg0
)
2948 == MR_DEPENDENCE_CLIQUE (arg1
))
2949 && (MR_DEPENDENCE_BASE (arg0
)
2950 == MR_DEPENDENCE_BASE (arg1
))
2951 && (TYPE_ALIGN (TREE_TYPE (arg0
))
2952 == TYPE_ALIGN (TREE_TYPE (arg1
)))))))
2954 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2955 return (OP_SAME (0) && OP_SAME (1)
2956 /* TARGET_MEM_REF require equal extra operands. */
2957 && (TREE_CODE (arg0
) != TARGET_MEM_REF
2958 || (OP_SAME_WITH_NULL (2)
2959 && OP_SAME_WITH_NULL (3)
2960 && OP_SAME_WITH_NULL (4))));
2963 case ARRAY_RANGE_REF
:
2964 /* Operands 2 and 3 may be null.
2965 Compare the array index by value if it is constant first as we
2966 may have different types but same value here. */
2969 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2970 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2971 TREE_OPERAND (arg1
, 1))
2973 && OP_SAME_WITH_NULL (2)
2974 && OP_SAME_WITH_NULL (3));
2977 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2978 may be NULL when we're called to compare MEM_EXPRs. */
2979 if (!OP_SAME_WITH_NULL (0)
2982 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2983 return OP_SAME_WITH_NULL (2);
2988 flags
&= ~(OEP_CONSTANT_ADDRESS_OF
|OEP_ADDRESS_OF
);
2989 return OP_SAME (1) && OP_SAME (2);
2995 case tcc_expression
:
2996 switch (TREE_CODE (arg0
))
2999 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3000 TREE_OPERAND (arg1
, 0),
3001 flags
| OEP_ADDRESS_OF
);
3003 case TRUTH_NOT_EXPR
:
3006 case TRUTH_ANDIF_EXPR
:
3007 case TRUTH_ORIF_EXPR
:
3008 return OP_SAME (0) && OP_SAME (1);
3011 case WIDEN_MULT_PLUS_EXPR
:
3012 case WIDEN_MULT_MINUS_EXPR
:
3015 /* The multiplcation operands are commutative. */
3018 case TRUTH_AND_EXPR
:
3020 case TRUTH_XOR_EXPR
:
3021 if (OP_SAME (0) && OP_SAME (1))
3024 /* Otherwise take into account this is a commutative operation. */
3025 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3026 TREE_OPERAND (arg1
, 1), flags
)
3027 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3028 TREE_OPERAND (arg1
, 0), flags
));
3033 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3040 switch (TREE_CODE (arg0
))
3043 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3044 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3045 /* If not both CALL_EXPRs are either internal or normal function
3046 functions, then they are not equal. */
3048 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3050 /* If the CALL_EXPRs call different internal functions, then they
3052 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3057 /* If the CALL_EXPRs call different functions, then they are not
3059 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3065 unsigned int cef
= call_expr_flags (arg0
);
3066 if (flags
& OEP_PURE_SAME
)
3067 cef
&= ECF_CONST
| ECF_PURE
;
3074 /* Now see if all the arguments are the same. */
3076 const_call_expr_arg_iterator iter0
, iter1
;
3078 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3079 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3081 a0
= next_const_call_expr_arg (&iter0
),
3082 a1
= next_const_call_expr_arg (&iter1
))
3083 if (! operand_equal_p (a0
, a1
, flags
))
3086 /* If we get here and both argument lists are exhausted
3087 then the CALL_EXPRs are equal. */
3088 return ! (a0
|| a1
);
3094 case tcc_declaration
:
3095 /* Consider __builtin_sqrt equal to sqrt. */
3096 return (TREE_CODE (arg0
) == FUNCTION_DECL
3097 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3098 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3099 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3106 #undef OP_SAME_WITH_NULL
3109 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3110 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3112 When in doubt, return 0. */
3115 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3117 int unsignedp1
, unsignedpo
;
3118 tree primarg0
, primarg1
, primother
;
3119 unsigned int correct_width
;
3121 if (operand_equal_p (arg0
, arg1
, 0))
3124 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3125 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3128 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3129 and see if the inner values are the same. This removes any
3130 signedness comparison, which doesn't matter here. */
3131 primarg0
= arg0
, primarg1
= arg1
;
3132 STRIP_NOPS (primarg0
);
3133 STRIP_NOPS (primarg1
);
3134 if (operand_equal_p (primarg0
, primarg1
, 0))
3137 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3138 actual comparison operand, ARG0.
3140 First throw away any conversions to wider types
3141 already present in the operands. */
3143 primarg1
= get_narrower (arg1
, &unsignedp1
);
3144 primother
= get_narrower (other
, &unsignedpo
);
3146 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3147 if (unsignedp1
== unsignedpo
3148 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3149 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3151 tree type
= TREE_TYPE (arg0
);
3153 /* Make sure shorter operand is extended the right way
3154 to match the longer operand. */
3155 primarg1
= fold_convert (signed_or_unsigned_type_for
3156 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3158 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3165 /* See if ARG is an expression that is either a comparison or is performing
3166 arithmetic on comparisons. The comparisons must only be comparing
3167 two different values, which will be stored in *CVAL1 and *CVAL2; if
3168 they are nonzero it means that some operands have already been found.
3169 No variables may be used anywhere else in the expression except in the
3170 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3171 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3173 If this is true, return 1. Otherwise, return zero. */
3176 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3178 enum tree_code code
= TREE_CODE (arg
);
3179 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3181 /* We can handle some of the tcc_expression cases here. */
3182 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3184 else if (tclass
== tcc_expression
3185 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3186 || code
== COMPOUND_EXPR
))
3187 tclass
= tcc_binary
;
3189 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3190 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3192 /* If we've already found a CVAL1 or CVAL2, this expression is
3193 two complex to handle. */
3194 if (*cval1
|| *cval2
)
3204 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3207 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3208 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3209 cval1
, cval2
, save_p
));
3214 case tcc_expression
:
3215 if (code
== COND_EXPR
)
3216 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3217 cval1
, cval2
, save_p
)
3218 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3219 cval1
, cval2
, save_p
)
3220 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3221 cval1
, cval2
, save_p
));
3224 case tcc_comparison
:
3225 /* First see if we can handle the first operand, then the second. For
3226 the second operand, we know *CVAL1 can't be zero. It must be that
3227 one side of the comparison is each of the values; test for the
3228 case where this isn't true by failing if the two operands
3231 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3232 TREE_OPERAND (arg
, 1), 0))
3236 *cval1
= TREE_OPERAND (arg
, 0);
3237 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3239 else if (*cval2
== 0)
3240 *cval2
= TREE_OPERAND (arg
, 0);
3241 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3246 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3248 else if (*cval2
== 0)
3249 *cval2
= TREE_OPERAND (arg
, 1);
3250 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3262 /* ARG is a tree that is known to contain just arithmetic operations and
3263 comparisons. Evaluate the operations in the tree substituting NEW0 for
3264 any occurrence of OLD0 as an operand of a comparison and likewise for
3268 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3269 tree old1
, tree new1
)
3271 tree type
= TREE_TYPE (arg
);
3272 enum tree_code code
= TREE_CODE (arg
);
3273 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3275 /* We can handle some of the tcc_expression cases here. */
3276 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3278 else if (tclass
== tcc_expression
3279 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3280 tclass
= tcc_binary
;
3285 return fold_build1_loc (loc
, code
, type
,
3286 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3287 old0
, new0
, old1
, new1
));
3290 return fold_build2_loc (loc
, code
, type
,
3291 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3292 old0
, new0
, old1
, new1
),
3293 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3294 old0
, new0
, old1
, new1
));
3296 case tcc_expression
:
3300 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3304 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3308 return fold_build3_loc (loc
, code
, type
,
3309 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3310 old0
, new0
, old1
, new1
),
3311 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3312 old0
, new0
, old1
, new1
),
3313 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3314 old0
, new0
, old1
, new1
));
3318 /* Fall through - ??? */
3320 case tcc_comparison
:
3322 tree arg0
= TREE_OPERAND (arg
, 0);
3323 tree arg1
= TREE_OPERAND (arg
, 1);
3325 /* We need to check both for exact equality and tree equality. The
3326 former will be true if the operand has a side-effect. In that
3327 case, we know the operand occurred exactly once. */
3329 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3331 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3334 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3336 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3339 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3347 /* Return a tree for the case when the result of an expression is RESULT
3348 converted to TYPE and OMITTED was previously an operand of the expression
3349 but is now not needed (e.g., we folded OMITTED * 0).
3351 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3352 the conversion of RESULT to TYPE. */
3355 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3357 tree t
= fold_convert_loc (loc
, type
, result
);
3359 /* If the resulting operand is an empty statement, just return the omitted
3360 statement casted to void. */
3361 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3362 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3363 fold_ignored_result (omitted
));
3365 if (TREE_SIDE_EFFECTS (omitted
))
3366 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3367 fold_ignored_result (omitted
), t
);
3369 return non_lvalue_loc (loc
, t
);
3372 /* Return a tree for the case when the result of an expression is RESULT
3373 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3374 of the expression but are now not needed.
3376 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3377 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3378 evaluated before OMITTED2. Otherwise, if neither has side effects,
3379 just do the conversion of RESULT to TYPE. */
3382 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3383 tree omitted1
, tree omitted2
)
3385 tree t
= fold_convert_loc (loc
, type
, result
);
3387 if (TREE_SIDE_EFFECTS (omitted2
))
3388 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3389 if (TREE_SIDE_EFFECTS (omitted1
))
3390 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3392 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3396 /* Return a simplified tree node for the truth-negation of ARG. This
3397 never alters ARG itself. We assume that ARG is an operation that
3398 returns a truth value (0 or 1).
3400 FIXME: one would think we would fold the result, but it causes
3401 problems with the dominator optimizer. */
3404 fold_truth_not_expr (location_t loc
, tree arg
)
3406 tree type
= TREE_TYPE (arg
);
3407 enum tree_code code
= TREE_CODE (arg
);
3408 location_t loc1
, loc2
;
3410 /* If this is a comparison, we can simply invert it, except for
3411 floating-point non-equality comparisons, in which case we just
3412 enclose a TRUTH_NOT_EXPR around what we have. */
3414 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3416 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3417 if (FLOAT_TYPE_P (op_type
)
3418 && flag_trapping_math
3419 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3420 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3423 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3424 if (code
== ERROR_MARK
)
3427 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3428 TREE_OPERAND (arg
, 1));
3434 return constant_boolean_node (integer_zerop (arg
), type
);
3436 case TRUTH_AND_EXPR
:
3437 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3438 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3439 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3440 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3441 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3444 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3445 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3446 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3447 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3448 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3450 case TRUTH_XOR_EXPR
:
3451 /* Here we can invert either operand. We invert the first operand
3452 unless the second operand is a TRUTH_NOT_EXPR in which case our
3453 result is the XOR of the first operand with the inside of the
3454 negation of the second operand. */
3456 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3457 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3458 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3460 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3461 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3462 TREE_OPERAND (arg
, 1));
3464 case TRUTH_ANDIF_EXPR
:
3465 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3466 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3467 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3468 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3469 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3471 case TRUTH_ORIF_EXPR
:
3472 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3473 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3474 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3475 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3476 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3478 case TRUTH_NOT_EXPR
:
3479 return TREE_OPERAND (arg
, 0);
3483 tree arg1
= TREE_OPERAND (arg
, 1);
3484 tree arg2
= TREE_OPERAND (arg
, 2);
3486 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3487 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3489 /* A COND_EXPR may have a throw as one operand, which
3490 then has void type. Just leave void operands
3492 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3493 VOID_TYPE_P (TREE_TYPE (arg1
))
3494 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3495 VOID_TYPE_P (TREE_TYPE (arg2
))
3496 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3500 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3501 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3502 TREE_OPERAND (arg
, 0),
3503 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3505 case NON_LVALUE_EXPR
:
3506 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3507 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3510 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3511 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3513 /* ... fall through ... */
3516 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3517 return build1_loc (loc
, TREE_CODE (arg
), type
,
3518 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3521 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3523 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3526 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3528 case CLEANUP_POINT_EXPR
:
3529 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3530 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3531 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3538 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3539 assume that ARG is an operation that returns a truth value (0 or 1
3540 for scalars, 0 or -1 for vectors). Return the folded expression if
3541 folding is successful. Otherwise, return NULL_TREE. */
3544 fold_invert_truthvalue (location_t loc
, tree arg
)
3546 tree type
= TREE_TYPE (arg
);
3547 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3553 /* Return a simplified tree node for the truth-negation of ARG. This
3554 never alters ARG itself. We assume that ARG is an operation that
3555 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3558 invert_truthvalue_loc (location_t loc
, tree arg
)
3560 if (TREE_CODE (arg
) == ERROR_MARK
)
3563 tree type
= TREE_TYPE (arg
);
3564 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3570 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3571 operands are another bit-wise operation with a common input. If so,
3572 distribute the bit operations to save an operation and possibly two if
3573 constants are involved. For example, convert
3574 (A | B) & (A | C) into A | (B & C)
3575 Further simplification will occur if B and C are constants.
3577 If this optimization cannot be done, 0 will be returned. */
3580 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3581 tree arg0
, tree arg1
)
3586 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3587 || TREE_CODE (arg0
) == code
3588 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3589 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3592 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3594 common
= TREE_OPERAND (arg0
, 0);
3595 left
= TREE_OPERAND (arg0
, 1);
3596 right
= TREE_OPERAND (arg1
, 1);
3598 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3600 common
= TREE_OPERAND (arg0
, 0);
3601 left
= TREE_OPERAND (arg0
, 1);
3602 right
= TREE_OPERAND (arg1
, 0);
3604 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3606 common
= TREE_OPERAND (arg0
, 1);
3607 left
= TREE_OPERAND (arg0
, 0);
3608 right
= TREE_OPERAND (arg1
, 1);
3610 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3612 common
= TREE_OPERAND (arg0
, 1);
3613 left
= TREE_OPERAND (arg0
, 0);
3614 right
= TREE_OPERAND (arg1
, 0);
3619 common
= fold_convert_loc (loc
, type
, common
);
3620 left
= fold_convert_loc (loc
, type
, left
);
3621 right
= fold_convert_loc (loc
, type
, right
);
3622 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3623 fold_build2_loc (loc
, code
, type
, left
, right
));
3626 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3627 with code CODE. This optimization is unsafe. */
3629 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3630 tree arg0
, tree arg1
)
3632 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3633 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3635 /* (A / C) +- (B / C) -> (A +- B) / C. */
3637 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3638 TREE_OPERAND (arg1
, 1), 0))
3639 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3640 fold_build2_loc (loc
, code
, type
,
3641 TREE_OPERAND (arg0
, 0),
3642 TREE_OPERAND (arg1
, 0)),
3643 TREE_OPERAND (arg0
, 1));
3645 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3646 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3647 TREE_OPERAND (arg1
, 0), 0)
3648 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3649 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3651 REAL_VALUE_TYPE r0
, r1
;
3652 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3653 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3655 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3657 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3658 real_arithmetic (&r0
, code
, &r0
, &r1
);
3659 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3660 TREE_OPERAND (arg0
, 0),
3661 build_real (type
, r0
));
3667 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3668 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3671 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3672 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3674 tree result
, bftype
;
3678 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3679 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3680 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3681 && tree_fits_shwi_p (size
)
3682 && tree_to_shwi (size
) == bitsize
)
3683 return fold_convert_loc (loc
, type
, inner
);
3687 if (TYPE_PRECISION (bftype
) != bitsize
3688 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3689 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3691 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3692 size_int (bitsize
), bitsize_int (bitpos
));
3695 result
= fold_convert_loc (loc
, type
, result
);
3700 /* Optimize a bit-field compare.
3702 There are two cases: First is a compare against a constant and the
3703 second is a comparison of two items where the fields are at the same
3704 bit position relative to the start of a chunk (byte, halfword, word)
3705 large enough to contain it. In these cases we can avoid the shift
3706 implicit in bitfield extractions.
3708 For constants, we emit a compare of the shifted constant with the
3709 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3710 compared. For two fields at the same position, we do the ANDs with the
3711 similar mask and compare the result of the ANDs.
3713 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3714 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3715 are the left and right operands of the comparison, respectively.
3717 If the optimization described above can be done, we return the resulting
3718 tree. Otherwise we return zero. */
3721 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3722 tree compare_type
, tree lhs
, tree rhs
)
3724 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3725 tree type
= TREE_TYPE (lhs
);
3727 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3728 machine_mode lmode
, rmode
, nmode
;
3729 int lunsignedp
, runsignedp
;
3730 int lvolatilep
= 0, rvolatilep
= 0;
3731 tree linner
, rinner
= NULL_TREE
;
3735 /* Get all the information about the extractions being done. If the bit size
3736 if the same as the size of the underlying object, we aren't doing an
3737 extraction at all and so can do nothing. We also don't want to
3738 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3739 then will no longer be able to replace it. */
3740 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3741 &lunsignedp
, &lvolatilep
, false);
3742 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3743 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3748 /* If this is not a constant, we can only do something if bit positions,
3749 sizes, and signedness are the same. */
3750 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3751 &runsignedp
, &rvolatilep
, false);
3753 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3754 || lunsignedp
!= runsignedp
|| offset
!= 0
3755 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3759 /* See if we can find a mode to refer to this field. We should be able to,
3760 but fail if we can't. */
3761 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3762 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3763 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3764 TYPE_ALIGN (TREE_TYPE (rinner
))),
3766 if (nmode
== VOIDmode
)
3769 /* Set signed and unsigned types of the precision of this mode for the
3771 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3773 /* Compute the bit position and size for the new reference and our offset
3774 within it. If the new reference is the same size as the original, we
3775 won't optimize anything, so return zero. */
3776 nbitsize
= GET_MODE_BITSIZE (nmode
);
3777 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3779 if (nbitsize
== lbitsize
)
3782 if (BYTES_BIG_ENDIAN
)
3783 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3785 /* Make the mask to be used against the extracted field. */
3786 mask
= build_int_cst_type (unsigned_type
, -1);
3787 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3788 mask
= const_binop (RSHIFT_EXPR
, mask
,
3789 size_int (nbitsize
- lbitsize
- lbitpos
));
3792 /* If not comparing with constant, just rework the comparison
3794 return fold_build2_loc (loc
, code
, compare_type
,
3795 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3796 make_bit_field_ref (loc
, linner
,
3801 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3802 make_bit_field_ref (loc
, rinner
,
3808 /* Otherwise, we are handling the constant case. See if the constant is too
3809 big for the field. Warn and return a tree of for 0 (false) if so. We do
3810 this not only for its own sake, but to avoid having to test for this
3811 error case below. If we didn't, we might generate wrong code.
3813 For unsigned fields, the constant shifted right by the field length should
3814 be all zero. For signed fields, the high-order bits should agree with
3819 if (wi::lrshift (rhs
, lbitsize
) != 0)
3821 warning (0, "comparison is always %d due to width of bit-field",
3823 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3828 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3829 if (tem
!= 0 && tem
!= -1)
3831 warning (0, "comparison is always %d due to width of bit-field",
3833 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3837 /* Single-bit compares should always be against zero. */
3838 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3840 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3841 rhs
= build_int_cst (type
, 0);
3844 /* Make a new bitfield reference, shift the constant over the
3845 appropriate number of bits and mask it with the computed mask
3846 (in case this was a signed field). If we changed it, make a new one. */
3847 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3849 rhs
= const_binop (BIT_AND_EXPR
,
3850 const_binop (LSHIFT_EXPR
,
3851 fold_convert_loc (loc
, unsigned_type
, rhs
),
3852 size_int (lbitpos
)),
3855 lhs
= build2_loc (loc
, code
, compare_type
,
3856 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3860 /* Subroutine for fold_truth_andor_1: decode a field reference.
3862 If EXP is a comparison reference, we return the innermost reference.
3864 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3865 set to the starting bit number.
3867 If the innermost field can be completely contained in a mode-sized
3868 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3870 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3871 otherwise it is not changed.
3873 *PUNSIGNEDP is set to the signedness of the field.
3875 *PMASK is set to the mask used. This is either contained in a
3876 BIT_AND_EXPR or derived from the width of the field.
3878 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3880 Return 0 if this is not a component reference or is one that we can't
3881 do anything with. */
3884 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3885 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3886 int *punsignedp
, int *pvolatilep
,
3887 tree
*pmask
, tree
*pand_mask
)
3889 tree outer_type
= 0;
3891 tree mask
, inner
, offset
;
3893 unsigned int precision
;
3895 /* All the optimizations using this function assume integer fields.
3896 There are problems with FP fields since the type_for_size call
3897 below can fail for, e.g., XFmode. */
3898 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3901 /* We are interested in the bare arrangement of bits, so strip everything
3902 that doesn't affect the machine mode. However, record the type of the
3903 outermost expression if it may matter below. */
3904 if (CONVERT_EXPR_P (exp
)
3905 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3906 outer_type
= TREE_TYPE (exp
);
3909 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3911 and_mask
= TREE_OPERAND (exp
, 1);
3912 exp
= TREE_OPERAND (exp
, 0);
3913 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3914 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3918 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3919 punsignedp
, pvolatilep
, false);
3920 if ((inner
== exp
&& and_mask
== 0)
3921 || *pbitsize
< 0 || offset
!= 0
3922 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3925 /* If the number of bits in the reference is the same as the bitsize of
3926 the outer type, then the outer type gives the signedness. Otherwise
3927 (in case of a small bitfield) the signedness is unchanged. */
3928 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3929 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3931 /* Compute the mask to access the bitfield. */
3932 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3933 precision
= TYPE_PRECISION (unsigned_type
);
3935 mask
= build_int_cst_type (unsigned_type
, -1);
3937 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3938 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3940 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3942 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3943 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3946 *pand_mask
= and_mask
;
3950 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3951 bit positions and MASK is SIGNED. */
3954 all_ones_mask_p (const_tree mask
, unsigned int size
)
3956 tree type
= TREE_TYPE (mask
);
3957 unsigned int precision
= TYPE_PRECISION (type
);
3959 /* If this function returns true when the type of the mask is
3960 UNSIGNED, then there will be errors. In particular see
3961 gcc.c-torture/execute/990326-1.c. There does not appear to be
3962 any documentation paper trail as to why this is so. But the pre
3963 wide-int worked with that restriction and it has been preserved
3965 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3968 return wi::mask (size
, false, precision
) == mask
;
3971 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3972 represents the sign bit of EXP's type. If EXP represents a sign
3973 or zero extension, also test VAL against the unextended type.
3974 The return value is the (sub)expression whose sign bit is VAL,
3975 or NULL_TREE otherwise. */
3978 sign_bit_p (tree exp
, const_tree val
)
3983 /* Tree EXP must have an integral type. */
3984 t
= TREE_TYPE (exp
);
3985 if (! INTEGRAL_TYPE_P (t
))
3988 /* Tree VAL must be an integer constant. */
3989 if (TREE_CODE (val
) != INTEGER_CST
3990 || TREE_OVERFLOW (val
))
3993 width
= TYPE_PRECISION (t
);
3994 if (wi::only_sign_bit_p (val
, width
))
3997 /* Handle extension from a narrower type. */
3998 if (TREE_CODE (exp
) == NOP_EXPR
3999 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4000 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4005 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4006 to be evaluated unconditionally. */
4009 simple_operand_p (const_tree exp
)
4011 /* Strip any conversions that don't change the machine mode. */
4014 return (CONSTANT_CLASS_P (exp
)
4015 || TREE_CODE (exp
) == SSA_NAME
4017 && ! TREE_ADDRESSABLE (exp
)
4018 && ! TREE_THIS_VOLATILE (exp
)
4019 && ! DECL_NONLOCAL (exp
)
4020 /* Don't regard global variables as simple. They may be
4021 allocated in ways unknown to the compiler (shared memory,
4022 #pragma weak, etc). */
4023 && ! TREE_PUBLIC (exp
)
4024 && ! DECL_EXTERNAL (exp
)
4025 /* Weakrefs are not safe to be read, since they can be NULL.
4026 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4027 have DECL_WEAK flag set. */
4028 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4029 /* Loading a static variable is unduly expensive, but global
4030 registers aren't expensive. */
4031 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4034 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4035 to be evaluated unconditionally.
4036 I addition to simple_operand_p, we assume that comparisons, conversions,
4037 and logic-not operations are simple, if their operands are simple, too. */
4040 simple_operand_p_2 (tree exp
)
4042 enum tree_code code
;
4044 if (TREE_SIDE_EFFECTS (exp
)
4045 || tree_could_trap_p (exp
))
4048 while (CONVERT_EXPR_P (exp
))
4049 exp
= TREE_OPERAND (exp
, 0);
4051 code
= TREE_CODE (exp
);
4053 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4054 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4055 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4057 if (code
== TRUTH_NOT_EXPR
)
4058 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4060 return simple_operand_p (exp
);
4064 /* The following functions are subroutines to fold_range_test and allow it to
4065 try to change a logical combination of comparisons into a range test.
4068 X == 2 || X == 3 || X == 4 || X == 5
4072 (unsigned) (X - 2) <= 3
4074 We describe each set of comparisons as being either inside or outside
4075 a range, using a variable named like IN_P, and then describe the
4076 range with a lower and upper bound. If one of the bounds is omitted,
4077 it represents either the highest or lowest value of the type.
4079 In the comments below, we represent a range by two numbers in brackets
4080 preceded by a "+" to designate being inside that range, or a "-" to
4081 designate being outside that range, so the condition can be inverted by
4082 flipping the prefix. An omitted bound is represented by a "-". For
4083 example, "- [-, 10]" means being outside the range starting at the lowest
4084 possible value and ending at 10, in other words, being greater than 10.
4085 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4088 We set up things so that the missing bounds are handled in a consistent
4089 manner so neither a missing bound nor "true" and "false" need to be
4090 handled using a special case. */
4092 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4093 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4094 and UPPER1_P are nonzero if the respective argument is an upper bound
4095 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4096 must be specified for a comparison. ARG1 will be converted to ARG0's
4097 type if both are specified. */
4100 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4101 tree arg1
, int upper1_p
)
4107 /* If neither arg represents infinity, do the normal operation.
4108 Else, if not a comparison, return infinity. Else handle the special
4109 comparison rules. Note that most of the cases below won't occur, but
4110 are handled for consistency. */
4112 if (arg0
!= 0 && arg1
!= 0)
4114 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4115 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4117 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4120 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4123 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4124 for neither. In real maths, we cannot assume open ended ranges are
4125 the same. But, this is computer arithmetic, where numbers are finite.
4126 We can therefore make the transformation of any unbounded range with
4127 the value Z, Z being greater than any representable number. This permits
4128 us to treat unbounded ranges as equal. */
4129 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4130 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4134 result
= sgn0
== sgn1
;
4137 result
= sgn0
!= sgn1
;
4140 result
= sgn0
< sgn1
;
4143 result
= sgn0
<= sgn1
;
4146 result
= sgn0
> sgn1
;
4149 result
= sgn0
>= sgn1
;
4155 return constant_boolean_node (result
, type
);
4158 /* Helper routine for make_range. Perform one step for it, return
4159 new expression if the loop should continue or NULL_TREE if it should
4163 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4164 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4165 bool *strict_overflow_p
)
4167 tree arg0_type
= TREE_TYPE (arg0
);
4168 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4169 int in_p
= *p_in_p
, n_in_p
;
4173 case TRUTH_NOT_EXPR
:
4174 /* We can only do something if the range is testing for zero. */
4175 if (low
== NULL_TREE
|| high
== NULL_TREE
4176 || ! integer_zerop (low
) || ! integer_zerop (high
))
4181 case EQ_EXPR
: case NE_EXPR
:
4182 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4183 /* We can only do something if the range is testing for zero
4184 and if the second operand is an integer constant. Note that
4185 saying something is "in" the range we make is done by
4186 complementing IN_P since it will set in the initial case of
4187 being not equal to zero; "out" is leaving it alone. */
4188 if (low
== NULL_TREE
|| high
== NULL_TREE
4189 || ! integer_zerop (low
) || ! integer_zerop (high
)
4190 || TREE_CODE (arg1
) != INTEGER_CST
)
4195 case NE_EXPR
: /* - [c, c] */
4198 case EQ_EXPR
: /* + [c, c] */
4199 in_p
= ! in_p
, low
= high
= arg1
;
4201 case GT_EXPR
: /* - [-, c] */
4202 low
= 0, high
= arg1
;
4204 case GE_EXPR
: /* + [c, -] */
4205 in_p
= ! in_p
, low
= arg1
, high
= 0;
4207 case LT_EXPR
: /* - [c, -] */
4208 low
= arg1
, high
= 0;
4210 case LE_EXPR
: /* + [-, c] */
4211 in_p
= ! in_p
, low
= 0, high
= arg1
;
4217 /* If this is an unsigned comparison, we also know that EXP is
4218 greater than or equal to zero. We base the range tests we make
4219 on that fact, so we record it here so we can parse existing
4220 range tests. We test arg0_type since often the return type
4221 of, e.g. EQ_EXPR, is boolean. */
4222 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4224 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4226 build_int_cst (arg0_type
, 0),
4230 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4232 /* If the high bound is missing, but we have a nonzero low
4233 bound, reverse the range so it goes from zero to the low bound
4235 if (high
== 0 && low
&& ! integer_zerop (low
))
4238 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4239 build_int_cst (TREE_TYPE (low
), 1), 0);
4240 low
= build_int_cst (arg0_type
, 0);
4250 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4251 low and high are non-NULL, then normalize will DTRT. */
4252 if (!TYPE_UNSIGNED (arg0_type
)
4253 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4255 if (low
== NULL_TREE
)
4256 low
= TYPE_MIN_VALUE (arg0_type
);
4257 if (high
== NULL_TREE
)
4258 high
= TYPE_MAX_VALUE (arg0_type
);
4261 /* (-x) IN [a,b] -> x in [-b, -a] */
4262 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4263 build_int_cst (exp_type
, 0),
4265 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4266 build_int_cst (exp_type
, 0),
4268 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4274 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4275 build_int_cst (exp_type
, 1));
4279 if (TREE_CODE (arg1
) != INTEGER_CST
)
4282 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4283 move a constant to the other side. */
4284 if (!TYPE_UNSIGNED (arg0_type
)
4285 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4288 /* If EXP is signed, any overflow in the computation is undefined,
4289 so we don't worry about it so long as our computations on
4290 the bounds don't overflow. For unsigned, overflow is defined
4291 and this is exactly the right thing. */
4292 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4293 arg0_type
, low
, 0, arg1
, 0);
4294 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4295 arg0_type
, high
, 1, arg1
, 0);
4296 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4297 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4300 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4301 *strict_overflow_p
= true;
4304 /* Check for an unsigned range which has wrapped around the maximum
4305 value thus making n_high < n_low, and normalize it. */
4306 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4308 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4309 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4310 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4311 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4313 /* If the range is of the form +/- [ x+1, x ], we won't
4314 be able to normalize it. But then, it represents the
4315 whole range or the empty set, so make it
4317 if (tree_int_cst_equal (n_low
, low
)
4318 && tree_int_cst_equal (n_high
, high
))
4324 low
= n_low
, high
= n_high
;
4332 case NON_LVALUE_EXPR
:
4333 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4336 if (! INTEGRAL_TYPE_P (arg0_type
)
4337 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4338 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4341 n_low
= low
, n_high
= high
;
4344 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4347 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4349 /* If we're converting arg0 from an unsigned type, to exp,
4350 a signed type, we will be doing the comparison as unsigned.
4351 The tests above have already verified that LOW and HIGH
4354 So we have to ensure that we will handle large unsigned
4355 values the same way that the current signed bounds treat
4358 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4362 /* For fixed-point modes, we need to pass the saturating flag
4363 as the 2nd parameter. */
4364 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4366 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4367 TYPE_SATURATING (arg0_type
));
4370 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4372 /* A range without an upper bound is, naturally, unbounded.
4373 Since convert would have cropped a very large value, use
4374 the max value for the destination type. */
4376 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4377 : TYPE_MAX_VALUE (arg0_type
);
4379 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4380 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4381 fold_convert_loc (loc
, arg0_type
,
4383 build_int_cst (arg0_type
, 1));
4385 /* If the low bound is specified, "and" the range with the
4386 range for which the original unsigned value will be
4390 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4391 1, fold_convert_loc (loc
, arg0_type
,
4396 in_p
= (n_in_p
== in_p
);
4400 /* Otherwise, "or" the range with the range of the input
4401 that will be interpreted as negative. */
4402 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4403 1, fold_convert_loc (loc
, arg0_type
,
4408 in_p
= (in_p
!= n_in_p
);
4422 /* Given EXP, a logical expression, set the range it is testing into
4423 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4424 actually being tested. *PLOW and *PHIGH will be made of the same
4425 type as the returned expression. If EXP is not a comparison, we
4426 will most likely not be returning a useful value and range. Set
4427 *STRICT_OVERFLOW_P to true if the return value is only valid
4428 because signed overflow is undefined; otherwise, do not change
4429 *STRICT_OVERFLOW_P. */
4432 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4433 bool *strict_overflow_p
)
4435 enum tree_code code
;
4436 tree arg0
, arg1
= NULL_TREE
;
4437 tree exp_type
, nexp
;
4440 location_t loc
= EXPR_LOCATION (exp
);
4442 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4443 and see if we can refine the range. Some of the cases below may not
4444 happen, but it doesn't seem worth worrying about this. We "continue"
4445 the outer loop when we've changed something; otherwise we "break"
4446 the switch, which will "break" the while. */
4449 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4453 code
= TREE_CODE (exp
);
4454 exp_type
= TREE_TYPE (exp
);
4457 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4459 if (TREE_OPERAND_LENGTH (exp
) > 0)
4460 arg0
= TREE_OPERAND (exp
, 0);
4461 if (TREE_CODE_CLASS (code
) == tcc_binary
4462 || TREE_CODE_CLASS (code
) == tcc_comparison
4463 || (TREE_CODE_CLASS (code
) == tcc_expression
4464 && TREE_OPERAND_LENGTH (exp
) > 1))
4465 arg1
= TREE_OPERAND (exp
, 1);
4467 if (arg0
== NULL_TREE
)
4470 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4471 &high
, &in_p
, strict_overflow_p
);
4472 if (nexp
== NULL_TREE
)
4477 /* If EXP is a constant, we can evaluate whether this is true or false. */
4478 if (TREE_CODE (exp
) == INTEGER_CST
)
4480 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4482 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4488 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4492 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4493 type, TYPE, return an expression to test if EXP is in (or out of, depending
4494 on IN_P) the range. Return 0 if the test couldn't be created. */
4497 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4498 tree low
, tree high
)
4500 tree etype
= TREE_TYPE (exp
), value
;
4502 #ifdef HAVE_canonicalize_funcptr_for_compare
4503 /* Disable this optimization for function pointer expressions
4504 on targets that require function pointer canonicalization. */
4505 if (HAVE_canonicalize_funcptr_for_compare
4506 && TREE_CODE (etype
) == POINTER_TYPE
4507 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4513 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4515 return invert_truthvalue_loc (loc
, value
);
4520 if (low
== 0 && high
== 0)
4521 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4524 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4525 fold_convert_loc (loc
, etype
, high
));
4528 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4529 fold_convert_loc (loc
, etype
, low
));
4531 if (operand_equal_p (low
, high
, 0))
4532 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4533 fold_convert_loc (loc
, etype
, low
));
4535 if (integer_zerop (low
))
4537 if (! TYPE_UNSIGNED (etype
))
4539 etype
= unsigned_type_for (etype
);
4540 high
= fold_convert_loc (loc
, etype
, high
);
4541 exp
= fold_convert_loc (loc
, etype
, exp
);
4543 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4546 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4547 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4549 int prec
= TYPE_PRECISION (etype
);
4551 if (wi::mask (prec
- 1, false, prec
) == high
)
4553 if (TYPE_UNSIGNED (etype
))
4555 tree signed_etype
= signed_type_for (etype
);
4556 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4558 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4560 etype
= signed_etype
;
4561 exp
= fold_convert_loc (loc
, etype
, exp
);
4563 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4564 build_int_cst (etype
, 0));
4568 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4569 This requires wrap-around arithmetics for the type of the expression.
4570 First make sure that arithmetics in this type is valid, then make sure
4571 that it wraps around. */
4572 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4573 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4574 TYPE_UNSIGNED (etype
));
4576 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4578 tree utype
, minv
, maxv
;
4580 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4581 for the type in question, as we rely on this here. */
4582 utype
= unsigned_type_for (etype
);
4583 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4584 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4585 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4586 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4588 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4595 high
= fold_convert_loc (loc
, etype
, high
);
4596 low
= fold_convert_loc (loc
, etype
, low
);
4597 exp
= fold_convert_loc (loc
, etype
, exp
);
4599 value
= const_binop (MINUS_EXPR
, high
, low
);
4602 if (POINTER_TYPE_P (etype
))
4604 if (value
!= 0 && !TREE_OVERFLOW (value
))
4606 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4607 return build_range_check (loc
, type
,
4608 fold_build_pointer_plus_loc (loc
, exp
, low
),
4609 1, build_int_cst (etype
, 0), value
);
4614 if (value
!= 0 && !TREE_OVERFLOW (value
))
4615 return build_range_check (loc
, type
,
4616 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4617 1, build_int_cst (etype
, 0), value
);
4622 /* Return the predecessor of VAL in its type, handling the infinite case. */
4625 range_predecessor (tree val
)
4627 tree type
= TREE_TYPE (val
);
4629 if (INTEGRAL_TYPE_P (type
)
4630 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4633 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4634 build_int_cst (TREE_TYPE (val
), 1), 0);
4637 /* Return the successor of VAL in its type, handling the infinite case. */
4640 range_successor (tree val
)
4642 tree type
= TREE_TYPE (val
);
4644 if (INTEGRAL_TYPE_P (type
)
4645 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4648 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4649 build_int_cst (TREE_TYPE (val
), 1), 0);
4652 /* Given two ranges, see if we can merge them into one. Return 1 if we
4653 can, 0 if we can't. Set the output range into the specified parameters. */
4656 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4657 tree high0
, int in1_p
, tree low1
, tree high1
)
4665 int lowequal
= ((low0
== 0 && low1
== 0)
4666 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4667 low0
, 0, low1
, 0)));
4668 int highequal
= ((high0
== 0 && high1
== 0)
4669 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4670 high0
, 1, high1
, 1)));
4672 /* Make range 0 be the range that starts first, or ends last if they
4673 start at the same value. Swap them if it isn't. */
4674 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4677 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4678 high1
, 1, high0
, 1))))
4680 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4681 tem
= low0
, low0
= low1
, low1
= tem
;
4682 tem
= high0
, high0
= high1
, high1
= tem
;
4685 /* Now flag two cases, whether the ranges are disjoint or whether the
4686 second range is totally subsumed in the first. Note that the tests
4687 below are simplified by the ones above. */
4688 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4689 high0
, 1, low1
, 0));
4690 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4691 high1
, 1, high0
, 1));
4693 /* We now have four cases, depending on whether we are including or
4694 excluding the two ranges. */
4697 /* If they don't overlap, the result is false. If the second range
4698 is a subset it is the result. Otherwise, the range is from the start
4699 of the second to the end of the first. */
4701 in_p
= 0, low
= high
= 0;
4703 in_p
= 1, low
= low1
, high
= high1
;
4705 in_p
= 1, low
= low1
, high
= high0
;
4708 else if (in0_p
&& ! in1_p
)
4710 /* If they don't overlap, the result is the first range. If they are
4711 equal, the result is false. If the second range is a subset of the
4712 first, and the ranges begin at the same place, we go from just after
4713 the end of the second range to the end of the first. If the second
4714 range is not a subset of the first, or if it is a subset and both
4715 ranges end at the same place, the range starts at the start of the
4716 first range and ends just before the second range.
4717 Otherwise, we can't describe this as a single range. */
4719 in_p
= 1, low
= low0
, high
= high0
;
4720 else if (lowequal
&& highequal
)
4721 in_p
= 0, low
= high
= 0;
4722 else if (subset
&& lowequal
)
4724 low
= range_successor (high1
);
4729 /* We are in the weird situation where high0 > high1 but
4730 high1 has no successor. Punt. */
4734 else if (! subset
|| highequal
)
4737 high
= range_predecessor (low1
);
4741 /* low0 < low1 but low1 has no predecessor. Punt. */
4749 else if (! in0_p
&& in1_p
)
4751 /* If they don't overlap, the result is the second range. If the second
4752 is a subset of the first, the result is false. Otherwise,
4753 the range starts just after the first range and ends at the
4754 end of the second. */
4756 in_p
= 1, low
= low1
, high
= high1
;
4757 else if (subset
|| highequal
)
4758 in_p
= 0, low
= high
= 0;
4761 low
= range_successor (high0
);
4766 /* high1 > high0 but high0 has no successor. Punt. */
4774 /* The case where we are excluding both ranges. Here the complex case
4775 is if they don't overlap. In that case, the only time we have a
4776 range is if they are adjacent. If the second is a subset of the
4777 first, the result is the first. Otherwise, the range to exclude
4778 starts at the beginning of the first range and ends at the end of the
4782 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4783 range_successor (high0
),
4785 in_p
= 0, low
= low0
, high
= high1
;
4788 /* Canonicalize - [min, x] into - [-, x]. */
4789 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4790 switch (TREE_CODE (TREE_TYPE (low0
)))
4793 if (TYPE_PRECISION (TREE_TYPE (low0
))
4794 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4798 if (tree_int_cst_equal (low0
,
4799 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4803 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4804 && integer_zerop (low0
))
4811 /* Canonicalize - [x, max] into - [x, -]. */
4812 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4813 switch (TREE_CODE (TREE_TYPE (high1
)))
4816 if (TYPE_PRECISION (TREE_TYPE (high1
))
4817 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4821 if (tree_int_cst_equal (high1
,
4822 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4826 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4827 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4829 build_int_cst (TREE_TYPE (high1
), 1),
4837 /* The ranges might be also adjacent between the maximum and
4838 minimum values of the given type. For
4839 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4840 return + [x + 1, y - 1]. */
4841 if (low0
== 0 && high1
== 0)
4843 low
= range_successor (high0
);
4844 high
= range_predecessor (low1
);
4845 if (low
== 0 || high
== 0)
4855 in_p
= 0, low
= low0
, high
= high0
;
4857 in_p
= 0, low
= low0
, high
= high1
;
4860 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4865 /* Subroutine of fold, looking inside expressions of the form
4866 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4867 of the COND_EXPR. This function is being used also to optimize
4868 A op B ? C : A, by reversing the comparison first.
4870 Return a folded expression whose code is not a COND_EXPR
4871 anymore, or NULL_TREE if no folding opportunity is found. */
4874 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4875 tree arg0
, tree arg1
, tree arg2
)
4877 enum tree_code comp_code
= TREE_CODE (arg0
);
4878 tree arg00
= TREE_OPERAND (arg0
, 0);
4879 tree arg01
= TREE_OPERAND (arg0
, 1);
4880 tree arg1_type
= TREE_TYPE (arg1
);
4886 /* If we have A op 0 ? A : -A, consider applying the following
4889 A == 0? A : -A same as -A
4890 A != 0? A : -A same as A
4891 A >= 0? A : -A same as abs (A)
4892 A > 0? A : -A same as abs (A)
4893 A <= 0? A : -A same as -abs (A)
4894 A < 0? A : -A same as -abs (A)
4896 None of these transformations work for modes with signed
4897 zeros. If A is +/-0, the first two transformations will
4898 change the sign of the result (from +0 to -0, or vice
4899 versa). The last four will fix the sign of the result,
4900 even though the original expressions could be positive or
4901 negative, depending on the sign of A.
4903 Note that all these transformations are correct if A is
4904 NaN, since the two alternatives (A and -A) are also NaNs. */
4905 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4906 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4907 ? real_zerop (arg01
)
4908 : integer_zerop (arg01
))
4909 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4910 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4911 /* In the case that A is of the form X-Y, '-A' (arg2) may
4912 have already been folded to Y-X, check for that. */
4913 || (TREE_CODE (arg1
) == MINUS_EXPR
4914 && TREE_CODE (arg2
) == MINUS_EXPR
4915 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4916 TREE_OPERAND (arg2
, 1), 0)
4917 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4918 TREE_OPERAND (arg2
, 0), 0))))
4923 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4924 return pedantic_non_lvalue_loc (loc
,
4925 fold_convert_loc (loc
, type
,
4926 negate_expr (tem
)));
4929 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4932 if (flag_trapping_math
)
4937 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4938 arg1
= fold_convert_loc (loc
, signed_type_for
4939 (TREE_TYPE (arg1
)), arg1
);
4940 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4941 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4944 if (flag_trapping_math
)
4948 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4949 arg1
= fold_convert_loc (loc
, signed_type_for
4950 (TREE_TYPE (arg1
)), arg1
);
4951 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4952 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4954 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4958 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4959 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4960 both transformations are correct when A is NaN: A != 0
4961 is then true, and A == 0 is false. */
4963 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4964 && integer_zerop (arg01
) && integer_zerop (arg2
))
4966 if (comp_code
== NE_EXPR
)
4967 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4968 else if (comp_code
== EQ_EXPR
)
4969 return build_zero_cst (type
);
4972 /* Try some transformations of A op B ? A : B.
4974 A == B? A : B same as B
4975 A != B? A : B same as A
4976 A >= B? A : B same as max (A, B)
4977 A > B? A : B same as max (B, A)
4978 A <= B? A : B same as min (A, B)
4979 A < B? A : B same as min (B, A)
4981 As above, these transformations don't work in the presence
4982 of signed zeros. For example, if A and B are zeros of
4983 opposite sign, the first two transformations will change
4984 the sign of the result. In the last four, the original
4985 expressions give different results for (A=+0, B=-0) and
4986 (A=-0, B=+0), but the transformed expressions do not.
4988 The first two transformations are correct if either A or B
4989 is a NaN. In the first transformation, the condition will
4990 be false, and B will indeed be chosen. In the case of the
4991 second transformation, the condition A != B will be true,
4992 and A will be chosen.
4994 The conversions to max() and min() are not correct if B is
4995 a number and A is not. The conditions in the original
4996 expressions will be false, so all four give B. The min()
4997 and max() versions would give a NaN instead. */
4998 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
4999 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5000 /* Avoid these transformations if the COND_EXPR may be used
5001 as an lvalue in the C++ front-end. PR c++/19199. */
5003 || VECTOR_TYPE_P (type
)
5004 || (! lang_GNU_CXX ()
5005 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5006 || ! maybe_lvalue_p (arg1
)
5007 || ! maybe_lvalue_p (arg2
)))
5009 tree comp_op0
= arg00
;
5010 tree comp_op1
= arg01
;
5011 tree comp_type
= TREE_TYPE (comp_op0
);
5013 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5014 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5024 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5026 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5031 /* In C++ a ?: expression can be an lvalue, so put the
5032 operand which will be used if they are equal first
5033 so that we can convert this back to the
5034 corresponding COND_EXPR. */
5035 if (!HONOR_NANS (arg1
))
5037 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5038 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5039 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5040 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5041 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5042 comp_op1
, comp_op0
);
5043 return pedantic_non_lvalue_loc (loc
,
5044 fold_convert_loc (loc
, type
, tem
));
5051 if (!HONOR_NANS (arg1
))
5053 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5054 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5055 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5056 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5057 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5058 comp_op1
, comp_op0
);
5059 return pedantic_non_lvalue_loc (loc
,
5060 fold_convert_loc (loc
, type
, tem
));
5064 if (!HONOR_NANS (arg1
))
5065 return pedantic_non_lvalue_loc (loc
,
5066 fold_convert_loc (loc
, type
, arg2
));
5069 if (!HONOR_NANS (arg1
))
5070 return pedantic_non_lvalue_loc (loc
,
5071 fold_convert_loc (loc
, type
, arg1
));
5074 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5079 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5080 we might still be able to simplify this. For example,
5081 if C1 is one less or one more than C2, this might have started
5082 out as a MIN or MAX and been transformed by this function.
5083 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5085 if (INTEGRAL_TYPE_P (type
)
5086 && TREE_CODE (arg01
) == INTEGER_CST
5087 && TREE_CODE (arg2
) == INTEGER_CST
)
5091 if (TREE_CODE (arg1
) == INTEGER_CST
)
5093 /* We can replace A with C1 in this case. */
5094 arg1
= fold_convert_loc (loc
, type
, arg01
);
5095 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5098 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5099 MIN_EXPR, to preserve the signedness of the comparison. */
5100 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5102 && operand_equal_p (arg01
,
5103 const_binop (PLUS_EXPR
, arg2
,
5104 build_int_cst (type
, 1)),
5107 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5108 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5110 return pedantic_non_lvalue_loc (loc
,
5111 fold_convert_loc (loc
, type
, tem
));
5116 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5118 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5120 && operand_equal_p (arg01
,
5121 const_binop (MINUS_EXPR
, arg2
,
5122 build_int_cst (type
, 1)),
5125 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5126 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5128 return pedantic_non_lvalue_loc (loc
,
5129 fold_convert_loc (loc
, type
, tem
));
5134 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5135 MAX_EXPR, to preserve the signedness of the comparison. */
5136 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5138 && operand_equal_p (arg01
,
5139 const_binop (MINUS_EXPR
, arg2
,
5140 build_int_cst (type
, 1)),
5143 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5144 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5146 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5151 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5152 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5154 && operand_equal_p (arg01
,
5155 const_binop (PLUS_EXPR
, arg2
,
5156 build_int_cst (type
, 1)),
5159 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5160 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5162 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5176 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5177 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5178 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5182 /* EXP is some logical combination of boolean tests. See if we can
5183 merge it into some range test. Return the new tree if so. */
5186 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5189 int or_op
= (code
== TRUTH_ORIF_EXPR
5190 || code
== TRUTH_OR_EXPR
);
5191 int in0_p
, in1_p
, in_p
;
5192 tree low0
, low1
, low
, high0
, high1
, high
;
5193 bool strict_overflow_p
= false;
5195 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5196 "when simplifying range test");
5198 if (!INTEGRAL_TYPE_P (type
))
5201 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5202 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5204 /* If this is an OR operation, invert both sides; we will invert
5205 again at the end. */
5207 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5209 /* If both expressions are the same, if we can merge the ranges, and we
5210 can build the range test, return it or it inverted. If one of the
5211 ranges is always true or always false, consider it to be the same
5212 expression as the other. */
5213 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5214 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5216 && 0 != (tem
= (build_range_check (loc
, type
,
5218 : rhs
!= 0 ? rhs
: integer_zero_node
,
5221 if (strict_overflow_p
)
5222 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5223 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5226 /* On machines where the branch cost is expensive, if this is a
5227 short-circuited branch and the underlying object on both sides
5228 is the same, make a non-short-circuit operation. */
5229 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5230 && lhs
!= 0 && rhs
!= 0
5231 && (code
== TRUTH_ANDIF_EXPR
5232 || code
== TRUTH_ORIF_EXPR
)
5233 && operand_equal_p (lhs
, rhs
, 0))
5235 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5236 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5237 which cases we can't do this. */
5238 if (simple_operand_p (lhs
))
5239 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5240 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5243 else if (!lang_hooks
.decls
.global_bindings_p ()
5244 && !CONTAINS_PLACEHOLDER_P (lhs
))
5246 tree common
= save_expr (lhs
);
5248 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5249 or_op
? ! in0_p
: in0_p
,
5251 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5252 or_op
? ! in1_p
: in1_p
,
5255 if (strict_overflow_p
)
5256 fold_overflow_warning (warnmsg
,
5257 WARN_STRICT_OVERFLOW_COMPARISON
);
5258 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5259 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5268 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5269 bit value. Arrange things so the extra bits will be set to zero if and
5270 only if C is signed-extended to its full width. If MASK is nonzero,
5271 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5274 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5276 tree type
= TREE_TYPE (c
);
5277 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5280 if (p
== modesize
|| unsignedp
)
5283 /* We work by getting just the sign bit into the low-order bit, then
5284 into the high-order bit, then sign-extend. We then XOR that value
5286 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5288 /* We must use a signed type in order to get an arithmetic right shift.
5289 However, we must also avoid introducing accidental overflows, so that
5290 a subsequent call to integer_zerop will work. Hence we must
5291 do the type conversion here. At this point, the constant is either
5292 zero or one, and the conversion to a signed type can never overflow.
5293 We could get an overflow if this conversion is done anywhere else. */
5294 if (TYPE_UNSIGNED (type
))
5295 temp
= fold_convert (signed_type_for (type
), temp
);
5297 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5298 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5300 temp
= const_binop (BIT_AND_EXPR
, temp
,
5301 fold_convert (TREE_TYPE (c
), mask
));
5302 /* If necessary, convert the type back to match the type of C. */
5303 if (TYPE_UNSIGNED (type
))
5304 temp
= fold_convert (type
, temp
);
5306 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5309 /* For an expression that has the form
5313 we can drop one of the inner expressions and simplify to
5317 LOC is the location of the resulting expression. OP is the inner
5318 logical operation; the left-hand side in the examples above, while CMPOP
5319 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5320 removing a condition that guards another, as in
5321 (A != NULL && A->...) || A == NULL
5322 which we must not transform. If RHS_ONLY is true, only eliminate the
5323 right-most operand of the inner logical operation. */
5326 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5329 tree type
= TREE_TYPE (cmpop
);
5330 enum tree_code code
= TREE_CODE (cmpop
);
5331 enum tree_code truthop_code
= TREE_CODE (op
);
5332 tree lhs
= TREE_OPERAND (op
, 0);
5333 tree rhs
= TREE_OPERAND (op
, 1);
5334 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5335 enum tree_code rhs_code
= TREE_CODE (rhs
);
5336 enum tree_code lhs_code
= TREE_CODE (lhs
);
5337 enum tree_code inv_code
;
5339 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5342 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5345 if (rhs_code
== truthop_code
)
5347 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5348 if (newrhs
!= NULL_TREE
)
5351 rhs_code
= TREE_CODE (rhs
);
5354 if (lhs_code
== truthop_code
&& !rhs_only
)
5356 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5357 if (newlhs
!= NULL_TREE
)
5360 lhs_code
= TREE_CODE (lhs
);
5364 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5365 if (inv_code
== rhs_code
5366 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5367 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5369 if (!rhs_only
&& inv_code
== lhs_code
5370 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5371 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5373 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5374 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5379 /* Find ways of folding logical expressions of LHS and RHS:
5380 Try to merge two comparisons to the same innermost item.
5381 Look for range tests like "ch >= '0' && ch <= '9'".
5382 Look for combinations of simple terms on machines with expensive branches
5383 and evaluate the RHS unconditionally.
5385 For example, if we have p->a == 2 && p->b == 4 and we can make an
5386 object large enough to span both A and B, we can do this with a comparison
5387 against the object ANDed with the a mask.
5389 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5390 operations to do this with one comparison.
5392 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5393 function and the one above.
5395 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5396 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5398 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5401 We return the simplified tree or 0 if no optimization is possible. */
5404 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5407 /* If this is the "or" of two comparisons, we can do something if
5408 the comparisons are NE_EXPR. If this is the "and", we can do something
5409 if the comparisons are EQ_EXPR. I.e.,
5410 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5412 WANTED_CODE is this operation code. For single bit fields, we can
5413 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5414 comparison for one-bit fields. */
5416 enum tree_code wanted_code
;
5417 enum tree_code lcode
, rcode
;
5418 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5419 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5420 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5421 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5422 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5423 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5424 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5425 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5426 machine_mode lnmode
, rnmode
;
5427 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5428 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5429 tree l_const
, r_const
;
5430 tree lntype
, rntype
, result
;
5431 HOST_WIDE_INT first_bit
, end_bit
;
5434 /* Start by getting the comparison codes. Fail if anything is volatile.
5435 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5436 it were surrounded with a NE_EXPR. */
5438 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5441 lcode
= TREE_CODE (lhs
);
5442 rcode
= TREE_CODE (rhs
);
5444 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5446 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5447 build_int_cst (TREE_TYPE (lhs
), 0));
5451 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5453 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5454 build_int_cst (TREE_TYPE (rhs
), 0));
5458 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5459 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5462 ll_arg
= TREE_OPERAND (lhs
, 0);
5463 lr_arg
= TREE_OPERAND (lhs
, 1);
5464 rl_arg
= TREE_OPERAND (rhs
, 0);
5465 rr_arg
= TREE_OPERAND (rhs
, 1);
5467 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5468 if (simple_operand_p (ll_arg
)
5469 && simple_operand_p (lr_arg
))
5471 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5472 && operand_equal_p (lr_arg
, rr_arg
, 0))
5474 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5475 truth_type
, ll_arg
, lr_arg
);
5479 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5480 && operand_equal_p (lr_arg
, rl_arg
, 0))
5482 result
= combine_comparisons (loc
, code
, lcode
,
5483 swap_tree_comparison (rcode
),
5484 truth_type
, ll_arg
, lr_arg
);
5490 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5491 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5493 /* If the RHS can be evaluated unconditionally and its operands are
5494 simple, it wins to evaluate the RHS unconditionally on machines
5495 with expensive branches. In this case, this isn't a comparison
5496 that can be merged. */
5498 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5500 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5501 && simple_operand_p (rl_arg
)
5502 && simple_operand_p (rr_arg
))
5504 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5505 if (code
== TRUTH_OR_EXPR
5506 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5507 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5508 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5509 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5510 return build2_loc (loc
, NE_EXPR
, truth_type
,
5511 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5513 build_int_cst (TREE_TYPE (ll_arg
), 0));
5515 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5516 if (code
== TRUTH_AND_EXPR
5517 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5518 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5519 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5520 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5521 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5522 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5524 build_int_cst (TREE_TYPE (ll_arg
), 0));
5527 /* See if the comparisons can be merged. Then get all the parameters for
5530 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5531 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5535 ll_inner
= decode_field_reference (loc
, ll_arg
,
5536 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5537 &ll_unsignedp
, &volatilep
, &ll_mask
,
5539 lr_inner
= decode_field_reference (loc
, lr_arg
,
5540 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5541 &lr_unsignedp
, &volatilep
, &lr_mask
,
5543 rl_inner
= decode_field_reference (loc
, rl_arg
,
5544 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5545 &rl_unsignedp
, &volatilep
, &rl_mask
,
5547 rr_inner
= decode_field_reference (loc
, rr_arg
,
5548 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5549 &rr_unsignedp
, &volatilep
, &rr_mask
,
5552 /* It must be true that the inner operation on the lhs of each
5553 comparison must be the same if we are to be able to do anything.
5554 Then see if we have constants. If not, the same must be true for
5556 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5557 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5560 if (TREE_CODE (lr_arg
) == INTEGER_CST
5561 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5562 l_const
= lr_arg
, r_const
= rr_arg
;
5563 else if (lr_inner
== 0 || rr_inner
== 0
5564 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5567 l_const
= r_const
= 0;
5569 /* If either comparison code is not correct for our logical operation,
5570 fail. However, we can convert a one-bit comparison against zero into
5571 the opposite comparison against that bit being set in the field. */
5573 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5574 if (lcode
!= wanted_code
)
5576 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5578 /* Make the left operand unsigned, since we are only interested
5579 in the value of one bit. Otherwise we are doing the wrong
5588 /* This is analogous to the code for l_const above. */
5589 if (rcode
!= wanted_code
)
5591 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5600 /* See if we can find a mode that contains both fields being compared on
5601 the left. If we can't, fail. Otherwise, update all constants and masks
5602 to be relative to a field of that size. */
5603 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5604 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5605 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5606 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5608 if (lnmode
== VOIDmode
)
5611 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5612 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5613 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5614 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5616 if (BYTES_BIG_ENDIAN
)
5618 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5619 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5622 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5623 size_int (xll_bitpos
));
5624 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5625 size_int (xrl_bitpos
));
5629 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5630 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5631 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5632 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5633 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5636 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5638 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5643 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5644 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5645 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5646 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5647 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5650 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5652 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5656 /* If the right sides are not constant, do the same for it. Also,
5657 disallow this optimization if a size or signedness mismatch occurs
5658 between the left and right sides. */
5661 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5662 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5663 /* Make sure the two fields on the right
5664 correspond to the left without being swapped. */
5665 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5668 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5669 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5670 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5671 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5673 if (rnmode
== VOIDmode
)
5676 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5677 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5678 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5679 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5681 if (BYTES_BIG_ENDIAN
)
5683 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5684 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5687 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5689 size_int (xlr_bitpos
));
5690 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5692 size_int (xrr_bitpos
));
5694 /* Make a mask that corresponds to both fields being compared.
5695 Do this for both items being compared. If the operands are the
5696 same size and the bits being compared are in the same position
5697 then we can do this by masking both and comparing the masked
5699 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5700 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5701 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5703 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5704 ll_unsignedp
|| rl_unsignedp
);
5705 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5706 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5708 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5709 lr_unsignedp
|| rr_unsignedp
);
5710 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5711 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5713 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5716 /* There is still another way we can do something: If both pairs of
5717 fields being compared are adjacent, we may be able to make a wider
5718 field containing them both.
5720 Note that we still must mask the lhs/rhs expressions. Furthermore,
5721 the mask must be shifted to account for the shift done by
5722 make_bit_field_ref. */
5723 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5724 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5725 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5726 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5730 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5731 ll_bitsize
+ rl_bitsize
,
5732 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5733 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5734 lr_bitsize
+ rr_bitsize
,
5735 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5737 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5738 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5739 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5740 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5742 /* Convert to the smaller type before masking out unwanted bits. */
5744 if (lntype
!= rntype
)
5746 if (lnbitsize
> rnbitsize
)
5748 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5749 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5752 else if (lnbitsize
< rnbitsize
)
5754 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5755 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5760 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5761 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5763 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5764 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5766 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5772 /* Handle the case of comparisons with constants. If there is something in
5773 common between the masks, those bits of the constants must be the same.
5774 If not, the condition is always false. Test for this to avoid generating
5775 incorrect code below. */
5776 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5777 if (! integer_zerop (result
)
5778 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5779 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5781 if (wanted_code
== NE_EXPR
)
5783 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5784 return constant_boolean_node (true, truth_type
);
5788 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5789 return constant_boolean_node (false, truth_type
);
5793 /* Construct the expression we will return. First get the component
5794 reference we will make. Unless the mask is all ones the width of
5795 that field, perform the mask operation. Then compare with the
5797 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5798 ll_unsignedp
|| rl_unsignedp
);
5800 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5801 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5802 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5804 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5805 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5808 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5812 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5816 enum tree_code op_code
;
5819 int consts_equal
, consts_lt
;
5822 STRIP_SIGN_NOPS (arg0
);
5824 op_code
= TREE_CODE (arg0
);
5825 minmax_const
= TREE_OPERAND (arg0
, 1);
5826 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5827 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5828 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5829 inner
= TREE_OPERAND (arg0
, 0);
5831 /* If something does not permit us to optimize, return the original tree. */
5832 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5833 || TREE_CODE (comp_const
) != INTEGER_CST
5834 || TREE_OVERFLOW (comp_const
)
5835 || TREE_CODE (minmax_const
) != INTEGER_CST
5836 || TREE_OVERFLOW (minmax_const
))
5839 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5840 and GT_EXPR, doing the rest with recursive calls using logical
5844 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5847 = optimize_minmax_comparison (loc
,
5848 invert_tree_comparison (code
, false),
5851 return invert_truthvalue_loc (loc
, tem
);
5857 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5858 optimize_minmax_comparison
5859 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5860 optimize_minmax_comparison
5861 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5864 if (op_code
== MAX_EXPR
&& consts_equal
)
5865 /* MAX (X, 0) == 0 -> X <= 0 */
5866 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5868 else if (op_code
== MAX_EXPR
&& consts_lt
)
5869 /* MAX (X, 0) == 5 -> X == 5 */
5870 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5872 else if (op_code
== MAX_EXPR
)
5873 /* MAX (X, 0) == -1 -> false */
5874 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5876 else if (consts_equal
)
5877 /* MIN (X, 0) == 0 -> X >= 0 */
5878 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5881 /* MIN (X, 0) == 5 -> false */
5882 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5885 /* MIN (X, 0) == -1 -> X == -1 */
5886 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5889 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5890 /* MAX (X, 0) > 0 -> X > 0
5891 MAX (X, 0) > 5 -> X > 5 */
5892 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5894 else if (op_code
== MAX_EXPR
)
5895 /* MAX (X, 0) > -1 -> true */
5896 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5898 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5899 /* MIN (X, 0) > 0 -> false
5900 MIN (X, 0) > 5 -> false */
5901 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5904 /* MIN (X, 0) > -1 -> X > -1 */
5905 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5912 /* T is an integer expression that is being multiplied, divided, or taken a
5913 modulus (CODE says which and what kind of divide or modulus) by a
5914 constant C. See if we can eliminate that operation by folding it with
5915 other operations already in T. WIDE_TYPE, if non-null, is a type that
5916 should be used for the computation if wider than our type.
5918 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5919 (X * 2) + (Y * 4). We must, however, be assured that either the original
5920 expression would not overflow or that overflow is undefined for the type
5921 in the language in question.
5923 If we return a non-null expression, it is an equivalent form of the
5924 original computation, but need not be in the original type.
5926 We set *STRICT_OVERFLOW_P to true if the return values depends on
5927 signed overflow being undefined. Otherwise we do not change
5928 *STRICT_OVERFLOW_P. */
5931 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5932 bool *strict_overflow_p
)
5934 /* To avoid exponential search depth, refuse to allow recursion past
5935 three levels. Beyond that (1) it's highly unlikely that we'll find
5936 something interesting and (2) we've probably processed it before
5937 when we built the inner expression. */
5946 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5953 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5954 bool *strict_overflow_p
)
5956 tree type
= TREE_TYPE (t
);
5957 enum tree_code tcode
= TREE_CODE (t
);
5958 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5959 > GET_MODE_SIZE (TYPE_MODE (type
)))
5960 ? wide_type
: type
);
5962 int same_p
= tcode
== code
;
5963 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5964 bool sub_strict_overflow_p
;
5966 /* Don't deal with constants of zero here; they confuse the code below. */
5967 if (integer_zerop (c
))
5970 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5971 op0
= TREE_OPERAND (t
, 0);
5973 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5974 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5976 /* Note that we need not handle conditional operations here since fold
5977 already handles those cases. So just do arithmetic here. */
5981 /* For a constant, we can always simplify if we are a multiply
5982 or (for divide and modulus) if it is a multiple of our constant. */
5983 if (code
== MULT_EXPR
5984 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5985 return const_binop (code
, fold_convert (ctype
, t
),
5986 fold_convert (ctype
, c
));
5989 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5990 /* If op0 is an expression ... */
5991 if ((COMPARISON_CLASS_P (op0
)
5992 || UNARY_CLASS_P (op0
)
5993 || BINARY_CLASS_P (op0
)
5994 || VL_EXP_CLASS_P (op0
)
5995 || EXPRESSION_CLASS_P (op0
))
5996 /* ... and has wrapping overflow, and its type is smaller
5997 than ctype, then we cannot pass through as widening. */
5998 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
5999 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6000 && (TYPE_PRECISION (ctype
)
6001 > TYPE_PRECISION (TREE_TYPE (op0
))))
6002 /* ... or this is a truncation (t is narrower than op0),
6003 then we cannot pass through this narrowing. */
6004 || (TYPE_PRECISION (type
)
6005 < TYPE_PRECISION (TREE_TYPE (op0
)))
6006 /* ... or signedness changes for division or modulus,
6007 then we cannot pass through this conversion. */
6008 || (code
!= MULT_EXPR
6009 && (TYPE_UNSIGNED (ctype
)
6010 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6011 /* ... or has undefined overflow while the converted to
6012 type has not, we cannot do the operation in the inner type
6013 as that would introduce undefined overflow. */
6014 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6015 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6016 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6019 /* Pass the constant down and see if we can make a simplification. If
6020 we can, replace this expression with the inner simplification for
6021 possible later conversion to our or some other type. */
6022 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6023 && TREE_CODE (t2
) == INTEGER_CST
6024 && !TREE_OVERFLOW (t2
)
6025 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6027 ? ctype
: NULL_TREE
,
6028 strict_overflow_p
))))
6033 /* If widening the type changes it from signed to unsigned, then we
6034 must avoid building ABS_EXPR itself as unsigned. */
6035 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6037 tree cstype
= (*signed_type_for
) (ctype
);
6038 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6041 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6042 return fold_convert (ctype
, t1
);
6046 /* If the constant is negative, we cannot simplify this. */
6047 if (tree_int_cst_sgn (c
) == -1)
6051 /* For division and modulus, type can't be unsigned, as e.g.
6052 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6053 For signed types, even with wrapping overflow, this is fine. */
6054 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6056 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6058 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6061 case MIN_EXPR
: case MAX_EXPR
:
6062 /* If widening the type changes the signedness, then we can't perform
6063 this optimization as that changes the result. */
6064 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6067 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6068 sub_strict_overflow_p
= false;
6069 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6070 &sub_strict_overflow_p
)) != 0
6071 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6072 &sub_strict_overflow_p
)) != 0)
6074 if (tree_int_cst_sgn (c
) < 0)
6075 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6076 if (sub_strict_overflow_p
)
6077 *strict_overflow_p
= true;
6078 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6079 fold_convert (ctype
, t2
));
6083 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6084 /* If the second operand is constant, this is a multiplication
6085 or floor division, by a power of two, so we can treat it that
6086 way unless the multiplier or divisor overflows. Signed
6087 left-shift overflow is implementation-defined rather than
6088 undefined in C90, so do not convert signed left shift into
6090 if (TREE_CODE (op1
) == INTEGER_CST
6091 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6092 /* const_binop may not detect overflow correctly,
6093 so check for it explicitly here. */
6094 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6095 && 0 != (t1
= fold_convert (ctype
,
6096 const_binop (LSHIFT_EXPR
,
6099 && !TREE_OVERFLOW (t1
))
6100 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6101 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6103 fold_convert (ctype
, op0
),
6105 c
, code
, wide_type
, strict_overflow_p
);
6108 case PLUS_EXPR
: case MINUS_EXPR
:
6109 /* See if we can eliminate the operation on both sides. If we can, we
6110 can return a new PLUS or MINUS. If we can't, the only remaining
6111 cases where we can do anything are if the second operand is a
6113 sub_strict_overflow_p
= false;
6114 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6115 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6116 if (t1
!= 0 && t2
!= 0
6117 && (code
== MULT_EXPR
6118 /* If not multiplication, we can only do this if both operands
6119 are divisible by c. */
6120 || (multiple_of_p (ctype
, op0
, c
)
6121 && multiple_of_p (ctype
, op1
, c
))))
6123 if (sub_strict_overflow_p
)
6124 *strict_overflow_p
= true;
6125 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6126 fold_convert (ctype
, t2
));
6129 /* If this was a subtraction, negate OP1 and set it to be an addition.
6130 This simplifies the logic below. */
6131 if (tcode
== MINUS_EXPR
)
6133 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6134 /* If OP1 was not easily negatable, the constant may be OP0. */
6135 if (TREE_CODE (op0
) == INTEGER_CST
)
6137 std::swap (op0
, op1
);
6142 if (TREE_CODE (op1
) != INTEGER_CST
)
6145 /* If either OP1 or C are negative, this optimization is not safe for
6146 some of the division and remainder types while for others we need
6147 to change the code. */
6148 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6150 if (code
== CEIL_DIV_EXPR
)
6151 code
= FLOOR_DIV_EXPR
;
6152 else if (code
== FLOOR_DIV_EXPR
)
6153 code
= CEIL_DIV_EXPR
;
6154 else if (code
!= MULT_EXPR
6155 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6159 /* If it's a multiply or a division/modulus operation of a multiple
6160 of our constant, do the operation and verify it doesn't overflow. */
6161 if (code
== MULT_EXPR
6162 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6164 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6165 fold_convert (ctype
, c
));
6166 /* We allow the constant to overflow with wrapping semantics. */
6168 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6174 /* If we have an unsigned type, we cannot widen the operation since it
6175 will change the result if the original computation overflowed. */
6176 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6179 /* If we were able to eliminate our operation from the first side,
6180 apply our operation to the second side and reform the PLUS. */
6181 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6182 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6184 /* The last case is if we are a multiply. In that case, we can
6185 apply the distributive law to commute the multiply and addition
6186 if the multiplication of the constants doesn't overflow
6187 and overflow is defined. With undefined overflow
6188 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6189 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6190 return fold_build2 (tcode
, ctype
,
6191 fold_build2 (code
, ctype
,
6192 fold_convert (ctype
, op0
),
6193 fold_convert (ctype
, c
)),
6199 /* We have a special case here if we are doing something like
6200 (C * 8) % 4 since we know that's zero. */
6201 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6202 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6203 /* If the multiplication can overflow we cannot optimize this. */
6204 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6205 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6206 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6208 *strict_overflow_p
= true;
6209 return omit_one_operand (type
, integer_zero_node
, op0
);
6212 /* ... fall through ... */
6214 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6215 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6216 /* If we can extract our operation from the LHS, do so and return a
6217 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6218 do something only if the second operand is a constant. */
6220 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6221 strict_overflow_p
)) != 0)
6222 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6223 fold_convert (ctype
, op1
));
6224 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6225 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6226 strict_overflow_p
)) != 0)
6227 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6228 fold_convert (ctype
, t1
));
6229 else if (TREE_CODE (op1
) != INTEGER_CST
)
6232 /* If these are the same operation types, we can associate them
6233 assuming no overflow. */
6236 bool overflow_p
= false;
6237 bool overflow_mul_p
;
6238 signop sign
= TYPE_SIGN (ctype
);
6239 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
6240 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6242 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6245 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6246 wide_int_to_tree (ctype
, mul
));
6249 /* If these operations "cancel" each other, we have the main
6250 optimizations of this pass, which occur when either constant is a
6251 multiple of the other, in which case we replace this with either an
6252 operation or CODE or TCODE.
6254 If we have an unsigned type, we cannot do this since it will change
6255 the result if the original computation overflowed. */
6256 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6257 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6258 || (tcode
== MULT_EXPR
6259 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6260 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6261 && code
!= MULT_EXPR
)))
6263 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6265 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6266 *strict_overflow_p
= true;
6267 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6268 fold_convert (ctype
,
6269 const_binop (TRUNC_DIV_EXPR
,
6272 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6274 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6275 *strict_overflow_p
= true;
6276 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6277 fold_convert (ctype
,
6278 const_binop (TRUNC_DIV_EXPR
,
6291 /* Return a node which has the indicated constant VALUE (either 0 or
6292 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6293 and is of the indicated TYPE. */
6296 constant_boolean_node (bool value
, tree type
)
6298 if (type
== integer_type_node
)
6299 return value
? integer_one_node
: integer_zero_node
;
6300 else if (type
== boolean_type_node
)
6301 return value
? boolean_true_node
: boolean_false_node
;
6302 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6303 return build_vector_from_val (type
,
6304 build_int_cst (TREE_TYPE (type
),
6307 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6311 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6312 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6313 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6314 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6315 COND is the first argument to CODE; otherwise (as in the example
6316 given here), it is the second argument. TYPE is the type of the
6317 original expression. Return NULL_TREE if no simplification is
6321 fold_binary_op_with_conditional_arg (location_t loc
,
6322 enum tree_code code
,
6323 tree type
, tree op0
, tree op1
,
6324 tree cond
, tree arg
, int cond_first_p
)
6326 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6327 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6328 tree test
, true_value
, false_value
;
6329 tree lhs
= NULL_TREE
;
6330 tree rhs
= NULL_TREE
;
6331 enum tree_code cond_code
= COND_EXPR
;
6333 if (TREE_CODE (cond
) == COND_EXPR
6334 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6336 test
= TREE_OPERAND (cond
, 0);
6337 true_value
= TREE_OPERAND (cond
, 1);
6338 false_value
= TREE_OPERAND (cond
, 2);
6339 /* If this operand throws an expression, then it does not make
6340 sense to try to perform a logical or arithmetic operation
6342 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6344 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6349 tree testtype
= TREE_TYPE (cond
);
6351 true_value
= constant_boolean_node (true, testtype
);
6352 false_value
= constant_boolean_node (false, testtype
);
6355 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6356 cond_code
= VEC_COND_EXPR
;
6358 /* This transformation is only worthwhile if we don't have to wrap ARG
6359 in a SAVE_EXPR and the operation can be simplified without recursing
6360 on at least one of the branches once its pushed inside the COND_EXPR. */
6361 if (!TREE_CONSTANT (arg
)
6362 && (TREE_SIDE_EFFECTS (arg
)
6363 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6364 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6367 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6370 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6372 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6374 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6378 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6380 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6382 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6385 /* Check that we have simplified at least one of the branches. */
6386 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6389 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6393 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6395 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6396 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6397 ADDEND is the same as X.
6399 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6400 and finite. The problematic cases are when X is zero, and its mode
6401 has signed zeros. In the case of rounding towards -infinity,
6402 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6403 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6406 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6408 if (!real_zerop (addend
))
6411 /* Don't allow the fold with -fsignaling-nans. */
6412 if (HONOR_SNANS (element_mode (type
)))
6415 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6416 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6419 /* In a vector or complex, we would need to check the sign of all zeros. */
6420 if (TREE_CODE (addend
) != REAL_CST
)
6423 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6424 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6427 /* The mode has signed zeros, and we have to honor their sign.
6428 In this situation, there is only one case we can return true for.
6429 X - 0 is the same as X unless rounding towards -infinity is
6431 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6434 /* Subroutine of fold() that checks comparisons of built-in math
6435 functions against real constants.
6437 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6438 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6439 is the type of the result and ARG0 and ARG1 are the operands of the
6440 comparison. ARG1 must be a TREE_REAL_CST.
6442 The function returns the constant folded tree if a simplification
6443 can be made, and NULL_TREE otherwise. */
6446 fold_mathfn_compare (location_t loc
,
6447 enum built_in_function fcode
, enum tree_code code
,
6448 tree type
, tree arg0
, tree arg1
)
6452 if (BUILTIN_SQRT_P (fcode
))
6454 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6455 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6457 c
= TREE_REAL_CST (arg1
);
6458 if (REAL_VALUE_NEGATIVE (c
))
6460 /* sqrt(x) < y is always false, if y is negative. */
6461 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6462 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6464 /* sqrt(x) > y is always true, if y is negative and we
6465 don't care about NaNs, i.e. negative values of x. */
6466 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6467 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6469 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6470 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6471 build_real (TREE_TYPE (arg
), dconst0
));
6473 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6477 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6478 real_convert (&c2
, mode
, &c2
);
6480 if (REAL_VALUE_ISINF (c2
))
6482 /* sqrt(x) > y is x == +Inf, when y is very large. */
6483 if (HONOR_INFINITIES (mode
))
6484 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6485 build_real (TREE_TYPE (arg
), c2
));
6487 /* sqrt(x) > y is always false, when y is very large
6488 and we don't care about infinities. */
6489 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6492 /* sqrt(x) > c is the same as x > c*c. */
6493 return fold_build2_loc (loc
, code
, type
, arg
,
6494 build_real (TREE_TYPE (arg
), c2
));
6496 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6500 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6501 real_convert (&c2
, mode
, &c2
);
6503 if (REAL_VALUE_ISINF (c2
))
6505 /* sqrt(x) < y is always true, when y is a very large
6506 value and we don't care about NaNs or Infinities. */
6507 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6508 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6510 /* sqrt(x) < y is x != +Inf when y is very large and we
6511 don't care about NaNs. */
6512 if (! HONOR_NANS (mode
))
6513 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6514 build_real (TREE_TYPE (arg
), c2
));
6516 /* sqrt(x) < y is x >= 0 when y is very large and we
6517 don't care about Infinities. */
6518 if (! HONOR_INFINITIES (mode
))
6519 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6520 build_real (TREE_TYPE (arg
), dconst0
));
6522 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6523 arg
= save_expr (arg
);
6524 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6525 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6526 build_real (TREE_TYPE (arg
),
6528 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6529 build_real (TREE_TYPE (arg
),
6533 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6534 if (! HONOR_NANS (mode
))
6535 return fold_build2_loc (loc
, code
, type
, arg
,
6536 build_real (TREE_TYPE (arg
), c2
));
6538 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6539 arg
= save_expr (arg
);
6540 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6541 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6542 build_real (TREE_TYPE (arg
),
6544 fold_build2_loc (loc
, code
, type
, arg
,
6545 build_real (TREE_TYPE (arg
),
6553 /* Subroutine of fold() that optimizes comparisons against Infinities,
6554 either +Inf or -Inf.
6556 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6557 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6558 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6560 The function returns the constant folded tree if a simplification
6561 can be made, and NULL_TREE otherwise. */
6564 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6565 tree arg0
, tree arg1
)
6568 REAL_VALUE_TYPE max
;
6572 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6574 /* For negative infinity swap the sense of the comparison. */
6575 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6577 code
= swap_tree_comparison (code
);
6582 /* x > +Inf is always false, if with ignore sNANs. */
6583 if (HONOR_SNANS (mode
))
6585 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6588 /* x <= +Inf is always true, if we don't case about NaNs. */
6589 if (! HONOR_NANS (mode
))
6590 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6592 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6593 arg0
= save_expr (arg0
);
6594 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6598 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6599 real_maxval (&max
, neg
, mode
);
6600 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6601 arg0
, build_real (TREE_TYPE (arg0
), max
));
6604 /* x < +Inf is always equal to x <= DBL_MAX. */
6605 real_maxval (&max
, neg
, mode
);
6606 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6607 arg0
, build_real (TREE_TYPE (arg0
), max
));
6610 /* x != +Inf is always equal to !(x > DBL_MAX). */
6611 real_maxval (&max
, neg
, mode
);
6612 if (! HONOR_NANS (mode
))
6613 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6614 arg0
, build_real (TREE_TYPE (arg0
), max
));
6616 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6617 arg0
, build_real (TREE_TYPE (arg0
), max
));
6618 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6627 /* Subroutine of fold() that optimizes comparisons of a division by
6628 a nonzero integer constant against an integer constant, i.e.
6631 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6632 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6633 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6635 The function returns the constant folded tree if a simplification
6636 can be made, and NULL_TREE otherwise. */
6639 fold_div_compare (location_t loc
,
6640 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6642 tree prod
, tmp
, hi
, lo
;
6643 tree arg00
= TREE_OPERAND (arg0
, 0);
6644 tree arg01
= TREE_OPERAND (arg0
, 1);
6645 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6646 bool neg_overflow
= false;
6649 /* We have to do this the hard way to detect unsigned overflow.
6650 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6651 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6652 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6653 neg_overflow
= false;
6655 if (sign
== UNSIGNED
)
6657 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6658 build_int_cst (TREE_TYPE (arg01
), 1));
6661 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6662 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6663 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6664 -1, overflow
| TREE_OVERFLOW (prod
));
6666 else if (tree_int_cst_sgn (arg01
) >= 0)
6668 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6669 build_int_cst (TREE_TYPE (arg01
), 1));
6670 switch (tree_int_cst_sgn (arg1
))
6673 neg_overflow
= true;
6674 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6679 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6684 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6694 /* A negative divisor reverses the relational operators. */
6695 code
= swap_tree_comparison (code
);
6697 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6698 build_int_cst (TREE_TYPE (arg01
), 1));
6699 switch (tree_int_cst_sgn (arg1
))
6702 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6707 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6712 neg_overflow
= true;
6713 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6725 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6726 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6727 if (TREE_OVERFLOW (hi
))
6728 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6729 if (TREE_OVERFLOW (lo
))
6730 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6731 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6734 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6735 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6736 if (TREE_OVERFLOW (hi
))
6737 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6738 if (TREE_OVERFLOW (lo
))
6739 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6740 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6743 if (TREE_OVERFLOW (lo
))
6745 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6746 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6748 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6751 if (TREE_OVERFLOW (hi
))
6753 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6754 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6756 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6759 if (TREE_OVERFLOW (hi
))
6761 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6762 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6764 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6767 if (TREE_OVERFLOW (lo
))
6769 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6770 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6772 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6782 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6783 equality/inequality test, then return a simplified form of the test
6784 using a sign testing. Otherwise return NULL. TYPE is the desired
6788 fold_single_bit_test_into_sign_test (location_t loc
,
6789 enum tree_code code
, tree arg0
, tree arg1
,
6792 /* If this is testing a single bit, we can optimize the test. */
6793 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6794 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6795 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6797 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6798 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6799 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6801 if (arg00
!= NULL_TREE
6802 /* This is only a win if casting to a signed type is cheap,
6803 i.e. when arg00's type is not a partial mode. */
6804 && TYPE_PRECISION (TREE_TYPE (arg00
))
6805 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6807 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6808 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6810 fold_convert_loc (loc
, stype
, arg00
),
6811 build_int_cst (stype
, 0));
6818 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6819 equality/inequality test, then return a simplified form of
6820 the test using shifts and logical operations. Otherwise return
6821 NULL. TYPE is the desired result type. */
6824 fold_single_bit_test (location_t loc
, enum tree_code code
,
6825 tree arg0
, tree arg1
, tree result_type
)
6827 /* If this is testing a single bit, we can optimize the test. */
6828 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6829 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6830 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6832 tree inner
= TREE_OPERAND (arg0
, 0);
6833 tree type
= TREE_TYPE (arg0
);
6834 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6835 machine_mode operand_mode
= TYPE_MODE (type
);
6837 tree signed_type
, unsigned_type
, intermediate_type
;
6840 /* First, see if we can fold the single bit test into a sign-bit
6842 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6847 /* Otherwise we have (A & C) != 0 where C is a single bit,
6848 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6849 Similarly for (A & C) == 0. */
6851 /* If INNER is a right shift of a constant and it plus BITNUM does
6852 not overflow, adjust BITNUM and INNER. */
6853 if (TREE_CODE (inner
) == RSHIFT_EXPR
6854 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6855 && bitnum
< TYPE_PRECISION (type
)
6856 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6857 TYPE_PRECISION (type
) - bitnum
))
6859 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6860 inner
= TREE_OPERAND (inner
, 0);
6863 /* If we are going to be able to omit the AND below, we must do our
6864 operations as unsigned. If we must use the AND, we have a choice.
6865 Normally unsigned is faster, but for some machines signed is. */
6866 #ifdef LOAD_EXTEND_OP
6867 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6868 && !flag_syntax_only
) ? 0 : 1;
6873 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6874 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6875 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6876 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6879 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6880 inner
, size_int (bitnum
));
6882 one
= build_int_cst (intermediate_type
, 1);
6884 if (code
== EQ_EXPR
)
6885 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6887 /* Put the AND last so it can combine with more things. */
6888 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6890 /* Make sure to return the proper type. */
6891 inner
= fold_convert_loc (loc
, result_type
, inner
);
6898 /* Check whether we are allowed to reorder operands arg0 and arg1,
6899 such that the evaluation of arg1 occurs before arg0. */
6902 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6904 if (! flag_evaluation_order
)
6906 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6908 return ! TREE_SIDE_EFFECTS (arg0
)
6909 && ! TREE_SIDE_EFFECTS (arg1
);
6912 /* Test whether it is preferable two swap two operands, ARG0 and
6913 ARG1, for example because ARG0 is an integer constant and ARG1
6914 isn't. If REORDER is true, only recommend swapping if we can
6915 evaluate the operands in reverse order. */
6918 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6920 if (CONSTANT_CLASS_P (arg1
))
6922 if (CONSTANT_CLASS_P (arg0
))
6928 if (TREE_CONSTANT (arg1
))
6930 if (TREE_CONSTANT (arg0
))
6933 if (reorder
&& flag_evaluation_order
6934 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6937 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6938 for commutative and comparison operators. Ensuring a canonical
6939 form allows the optimizers to find additional redundancies without
6940 having to explicitly check for both orderings. */
6941 if (TREE_CODE (arg0
) == SSA_NAME
6942 && TREE_CODE (arg1
) == SSA_NAME
6943 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6946 /* Put SSA_NAMEs last. */
6947 if (TREE_CODE (arg1
) == SSA_NAME
)
6949 if (TREE_CODE (arg0
) == SSA_NAME
)
6952 /* Put variables last. */
6961 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6962 ARG0 is extended to a wider type. */
6965 fold_widened_comparison (location_t loc
, enum tree_code code
,
6966 tree type
, tree arg0
, tree arg1
)
6968 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6970 tree shorter_type
, outer_type
;
6974 if (arg0_unw
== arg0
)
6976 shorter_type
= TREE_TYPE (arg0_unw
);
6978 #ifdef HAVE_canonicalize_funcptr_for_compare
6979 /* Disable this optimization if we're casting a function pointer
6980 type on targets that require function pointer canonicalization. */
6981 if (HAVE_canonicalize_funcptr_for_compare
6982 && TREE_CODE (shorter_type
) == POINTER_TYPE
6983 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6987 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6990 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6992 /* If possible, express the comparison in the shorter mode. */
6993 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6994 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6995 && (TREE_TYPE (arg1_unw
) == shorter_type
6996 || ((TYPE_PRECISION (shorter_type
)
6997 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6998 && (TYPE_UNSIGNED (shorter_type
)
6999 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
7000 || (TREE_CODE (arg1_unw
) == INTEGER_CST
7001 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
7002 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
7003 && int_fits_type_p (arg1_unw
, shorter_type
))))
7004 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
7005 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
7007 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7008 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7009 || !int_fits_type_p (arg1_unw
, shorter_type
))
7012 /* If we are comparing with the integer that does not fit into the range
7013 of the shorter type, the result is known. */
7014 outer_type
= TREE_TYPE (arg1_unw
);
7015 min
= lower_bound_in_type (outer_type
, shorter_type
);
7016 max
= upper_bound_in_type (outer_type
, shorter_type
);
7018 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7020 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7027 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7032 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7038 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7040 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7045 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7047 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7056 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7057 ARG0 just the signedness is changed. */
7060 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
7061 tree arg0
, tree arg1
)
7064 tree inner_type
, outer_type
;
7066 if (!CONVERT_EXPR_P (arg0
))
7069 outer_type
= TREE_TYPE (arg0
);
7070 arg0_inner
= TREE_OPERAND (arg0
, 0);
7071 inner_type
= TREE_TYPE (arg0_inner
);
7073 #ifdef HAVE_canonicalize_funcptr_for_compare
7074 /* Disable this optimization if we're casting a function pointer
7075 type on targets that require function pointer canonicalization. */
7076 if (HAVE_canonicalize_funcptr_for_compare
7077 && TREE_CODE (inner_type
) == POINTER_TYPE
7078 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7082 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7085 if (TREE_CODE (arg1
) != INTEGER_CST
7086 && !(CONVERT_EXPR_P (arg1
)
7087 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7090 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7095 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
7098 if (TREE_CODE (arg1
) == INTEGER_CST
)
7099 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
7100 TREE_OVERFLOW (arg1
));
7102 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
7104 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
7108 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7109 means A >= Y && A != MAX, but in this case we know that
7110 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7113 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7115 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7117 if (TREE_CODE (bound
) == LT_EXPR
)
7118 a
= TREE_OPERAND (bound
, 0);
7119 else if (TREE_CODE (bound
) == GT_EXPR
)
7120 a
= TREE_OPERAND (bound
, 1);
7124 typea
= TREE_TYPE (a
);
7125 if (!INTEGRAL_TYPE_P (typea
)
7126 && !POINTER_TYPE_P (typea
))
7129 if (TREE_CODE (ineq
) == LT_EXPR
)
7131 a1
= TREE_OPERAND (ineq
, 1);
7132 y
= TREE_OPERAND (ineq
, 0);
7134 else if (TREE_CODE (ineq
) == GT_EXPR
)
7136 a1
= TREE_OPERAND (ineq
, 0);
7137 y
= TREE_OPERAND (ineq
, 1);
7142 if (TREE_TYPE (a1
) != typea
)
7145 if (POINTER_TYPE_P (typea
))
7147 /* Convert the pointer types into integer before taking the difference. */
7148 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7149 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7150 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7153 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7155 if (!diff
|| !integer_onep (diff
))
7158 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7161 /* Fold a sum or difference of at least one multiplication.
7162 Returns the folded tree or NULL if no simplification could be made. */
7165 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7166 tree arg0
, tree arg1
)
7168 tree arg00
, arg01
, arg10
, arg11
;
7169 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7171 /* (A * C) +- (B * C) -> (A+-B) * C.
7172 (A * C) +- A -> A * (C+-1).
7173 We are most concerned about the case where C is a constant,
7174 but other combinations show up during loop reduction. Since
7175 it is not difficult, try all four possibilities. */
7177 if (TREE_CODE (arg0
) == MULT_EXPR
)
7179 arg00
= TREE_OPERAND (arg0
, 0);
7180 arg01
= TREE_OPERAND (arg0
, 1);
7182 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7184 arg00
= build_one_cst (type
);
7189 /* We cannot generate constant 1 for fract. */
7190 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7193 arg01
= build_one_cst (type
);
7195 if (TREE_CODE (arg1
) == MULT_EXPR
)
7197 arg10
= TREE_OPERAND (arg1
, 0);
7198 arg11
= TREE_OPERAND (arg1
, 1);
7200 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7202 arg10
= build_one_cst (type
);
7203 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7204 the purpose of this canonicalization. */
7205 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7206 && negate_expr_p (arg1
)
7207 && code
== PLUS_EXPR
)
7209 arg11
= negate_expr (arg1
);
7217 /* We cannot generate constant 1 for fract. */
7218 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7221 arg11
= build_one_cst (type
);
7225 if (operand_equal_p (arg01
, arg11
, 0))
7226 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7227 else if (operand_equal_p (arg00
, arg10
, 0))
7228 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7229 else if (operand_equal_p (arg00
, arg11
, 0))
7230 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7231 else if (operand_equal_p (arg01
, arg10
, 0))
7232 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7234 /* No identical multiplicands; see if we can find a common
7235 power-of-two factor in non-power-of-two multiplies. This
7236 can help in multi-dimensional array access. */
7237 else if (tree_fits_shwi_p (arg01
)
7238 && tree_fits_shwi_p (arg11
))
7240 HOST_WIDE_INT int01
, int11
, tmp
;
7243 int01
= tree_to_shwi (arg01
);
7244 int11
= tree_to_shwi (arg11
);
7246 /* Move min of absolute values to int11. */
7247 if (absu_hwi (int01
) < absu_hwi (int11
))
7249 tmp
= int01
, int01
= int11
, int11
= tmp
;
7250 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7257 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7258 /* The remainder should not be a constant, otherwise we
7259 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7260 increased the number of multiplications necessary. */
7261 && TREE_CODE (arg10
) != INTEGER_CST
)
7263 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7264 build_int_cst (TREE_TYPE (arg00
),
7269 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7274 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7275 fold_build2_loc (loc
, code
, type
,
7276 fold_convert_loc (loc
, type
, alt0
),
7277 fold_convert_loc (loc
, type
, alt1
)),
7278 fold_convert_loc (loc
, type
, same
));
7283 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7284 specified by EXPR into the buffer PTR of length LEN bytes.
7285 Return the number of bytes placed in the buffer, or zero
7289 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7291 tree type
= TREE_TYPE (expr
);
7292 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7293 int byte
, offset
, word
, words
;
7294 unsigned char value
;
7296 if ((off
== -1 && total_bytes
> len
)
7297 || off
>= total_bytes
)
7301 words
= total_bytes
/ UNITS_PER_WORD
;
7303 for (byte
= 0; byte
< total_bytes
; byte
++)
7305 int bitpos
= byte
* BITS_PER_UNIT
;
7306 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7308 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7310 if (total_bytes
> UNITS_PER_WORD
)
7312 word
= byte
/ UNITS_PER_WORD
;
7313 if (WORDS_BIG_ENDIAN
)
7314 word
= (words
- 1) - word
;
7315 offset
= word
* UNITS_PER_WORD
;
7316 if (BYTES_BIG_ENDIAN
)
7317 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7319 offset
+= byte
% UNITS_PER_WORD
;
7322 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7324 && offset
- off
< len
)
7325 ptr
[offset
- off
] = value
;
7327 return MIN (len
, total_bytes
- off
);
7331 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7332 specified by EXPR into the buffer PTR of length LEN bytes.
7333 Return the number of bytes placed in the buffer, or zero
7337 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7339 tree type
= TREE_TYPE (expr
);
7340 machine_mode mode
= TYPE_MODE (type
);
7341 int total_bytes
= GET_MODE_SIZE (mode
);
7342 FIXED_VALUE_TYPE value
;
7343 tree i_value
, i_type
;
7345 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7348 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7350 if (NULL_TREE
== i_type
7351 || TYPE_PRECISION (i_type
) != total_bytes
)
7354 value
= TREE_FIXED_CST (expr
);
7355 i_value
= double_int_to_tree (i_type
, value
.data
);
7357 return native_encode_int (i_value
, ptr
, len
, off
);
7361 /* Subroutine of native_encode_expr. Encode the REAL_CST
7362 specified by EXPR into the buffer PTR of length LEN bytes.
7363 Return the number of bytes placed in the buffer, or zero
7367 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7369 tree type
= TREE_TYPE (expr
);
7370 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7371 int byte
, offset
, word
, words
, bitpos
;
7372 unsigned char value
;
7374 /* There are always 32 bits in each long, no matter the size of
7375 the hosts long. We handle floating point representations with
7379 if ((off
== -1 && total_bytes
> len
)
7380 || off
>= total_bytes
)
7384 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7386 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7388 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7389 bitpos
+= BITS_PER_UNIT
)
7391 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7392 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7394 if (UNITS_PER_WORD
< 4)
7396 word
= byte
/ UNITS_PER_WORD
;
7397 if (WORDS_BIG_ENDIAN
)
7398 word
= (words
- 1) - word
;
7399 offset
= word
* UNITS_PER_WORD
;
7400 if (BYTES_BIG_ENDIAN
)
7401 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7403 offset
+= byte
% UNITS_PER_WORD
;
7406 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7407 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7409 && offset
- off
< len
)
7410 ptr
[offset
- off
] = value
;
7412 return MIN (len
, total_bytes
- off
);
7415 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7416 specified by EXPR into the buffer PTR of length LEN bytes.
7417 Return the number of bytes placed in the buffer, or zero
7421 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7426 part
= TREE_REALPART (expr
);
7427 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7431 part
= TREE_IMAGPART (expr
);
7433 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7434 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7438 return rsize
+ isize
;
7442 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7443 specified by EXPR into the buffer PTR of length LEN bytes.
7444 Return the number of bytes placed in the buffer, or zero
7448 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7455 count
= VECTOR_CST_NELTS (expr
);
7456 itype
= TREE_TYPE (TREE_TYPE (expr
));
7457 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7458 for (i
= 0; i
< count
; i
++)
7465 elem
= VECTOR_CST_ELT (expr
, i
);
7466 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7467 if ((off
== -1 && res
!= size
)
7480 /* Subroutine of native_encode_expr. Encode the STRING_CST
7481 specified by EXPR into the buffer PTR of length LEN bytes.
7482 Return the number of bytes placed in the buffer, or zero
7486 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7488 tree type
= TREE_TYPE (expr
);
7489 HOST_WIDE_INT total_bytes
;
7491 if (TREE_CODE (type
) != ARRAY_TYPE
7492 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7493 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7494 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7496 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7497 if ((off
== -1 && total_bytes
> len
)
7498 || off
>= total_bytes
)
7502 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7505 if (off
< TREE_STRING_LENGTH (expr
))
7507 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7508 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7510 memset (ptr
+ written
, 0,
7511 MIN (total_bytes
- written
, len
- written
));
7514 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7515 return MIN (total_bytes
- off
, len
);
7519 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7520 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7521 buffer PTR of length LEN bytes. If OFF is not -1 then start
7522 the encoding at byte offset OFF and encode at most LEN bytes.
7523 Return the number of bytes placed in the buffer, or zero upon failure. */
7526 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7528 switch (TREE_CODE (expr
))
7531 return native_encode_int (expr
, ptr
, len
, off
);
7534 return native_encode_real (expr
, ptr
, len
, off
);
7537 return native_encode_fixed (expr
, ptr
, len
, off
);
7540 return native_encode_complex (expr
, ptr
, len
, off
);
7543 return native_encode_vector (expr
, ptr
, len
, off
);
7546 return native_encode_string (expr
, ptr
, len
, off
);
7554 /* Subroutine of native_interpret_expr. Interpret the contents of
7555 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7556 If the buffer cannot be interpreted, return NULL_TREE. */
7559 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7561 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7563 if (total_bytes
> len
7564 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7567 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7569 return wide_int_to_tree (type
, result
);
7573 /* Subroutine of native_interpret_expr. Interpret the contents of
7574 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7575 If the buffer cannot be interpreted, return NULL_TREE. */
7578 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7580 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7582 FIXED_VALUE_TYPE fixed_value
;
7584 if (total_bytes
> len
7585 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7588 result
= double_int::from_buffer (ptr
, total_bytes
);
7589 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7591 return build_fixed (type
, fixed_value
);
7595 /* Subroutine of native_interpret_expr. Interpret the contents of
7596 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7597 If the buffer cannot be interpreted, return NULL_TREE. */
7600 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7602 machine_mode mode
= TYPE_MODE (type
);
7603 int total_bytes
= GET_MODE_SIZE (mode
);
7604 int byte
, offset
, word
, words
, bitpos
;
7605 unsigned char value
;
7606 /* There are always 32 bits in each long, no matter the size of
7607 the hosts long. We handle floating point representations with
7612 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7613 if (total_bytes
> len
|| total_bytes
> 24)
7615 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7617 memset (tmp
, 0, sizeof (tmp
));
7618 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7619 bitpos
+= BITS_PER_UNIT
)
7621 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7622 if (UNITS_PER_WORD
< 4)
7624 word
= byte
/ UNITS_PER_WORD
;
7625 if (WORDS_BIG_ENDIAN
)
7626 word
= (words
- 1) - word
;
7627 offset
= word
* UNITS_PER_WORD
;
7628 if (BYTES_BIG_ENDIAN
)
7629 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7631 offset
+= byte
% UNITS_PER_WORD
;
7634 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7635 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7637 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7640 real_from_target (&r
, tmp
, mode
);
7641 return build_real (type
, r
);
7645 /* Subroutine of native_interpret_expr. Interpret the contents of
7646 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7647 If the buffer cannot be interpreted, return NULL_TREE. */
7650 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7652 tree etype
, rpart
, ipart
;
7655 etype
= TREE_TYPE (type
);
7656 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7659 rpart
= native_interpret_expr (etype
, ptr
, size
);
7662 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7665 return build_complex (type
, rpart
, ipart
);
7669 /* Subroutine of native_interpret_expr. Interpret the contents of
7670 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7671 If the buffer cannot be interpreted, return NULL_TREE. */
7674 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7680 etype
= TREE_TYPE (type
);
7681 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7682 count
= TYPE_VECTOR_SUBPARTS (type
);
7683 if (size
* count
> len
)
7686 elements
= XALLOCAVEC (tree
, count
);
7687 for (i
= count
- 1; i
>= 0; i
--)
7689 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7694 return build_vector (type
, elements
);
7698 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7699 the buffer PTR of length LEN as a constant of type TYPE. For
7700 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7701 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7702 return NULL_TREE. */
7705 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7707 switch (TREE_CODE (type
))
7713 case REFERENCE_TYPE
:
7714 return native_interpret_int (type
, ptr
, len
);
7717 return native_interpret_real (type
, ptr
, len
);
7719 case FIXED_POINT_TYPE
:
7720 return native_interpret_fixed (type
, ptr
, len
);
7723 return native_interpret_complex (type
, ptr
, len
);
7726 return native_interpret_vector (type
, ptr
, len
);
7733 /* Returns true if we can interpret the contents of a native encoding
7737 can_native_interpret_type_p (tree type
)
7739 switch (TREE_CODE (type
))
7745 case REFERENCE_TYPE
:
7746 case FIXED_POINT_TYPE
:
7756 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7757 TYPE at compile-time. If we're unable to perform the conversion
7758 return NULL_TREE. */
7761 fold_view_convert_expr (tree type
, tree expr
)
7763 /* We support up to 512-bit values (for V8DFmode). */
7764 unsigned char buffer
[64];
7767 /* Check that the host and target are sane. */
7768 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7771 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7775 return native_interpret_expr (type
, buffer
, len
);
7778 /* Build an expression for the address of T. Folds away INDIRECT_REF
7779 to avoid confusing the gimplify process. */
7782 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7784 /* The size of the object is not relevant when talking about its address. */
7785 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7786 t
= TREE_OPERAND (t
, 0);
7788 if (TREE_CODE (t
) == INDIRECT_REF
)
7790 t
= TREE_OPERAND (t
, 0);
7792 if (TREE_TYPE (t
) != ptrtype
)
7793 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7795 else if (TREE_CODE (t
) == MEM_REF
7796 && integer_zerop (TREE_OPERAND (t
, 1)))
7797 return TREE_OPERAND (t
, 0);
7798 else if (TREE_CODE (t
) == MEM_REF
7799 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7800 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7801 TREE_OPERAND (t
, 0),
7802 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7803 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7805 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7807 if (TREE_TYPE (t
) != ptrtype
)
7808 t
= fold_convert_loc (loc
, ptrtype
, t
);
7811 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7816 /* Build an expression for the address of T. */
7819 build_fold_addr_expr_loc (location_t loc
, tree t
)
7821 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7823 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7826 /* Fold a unary expression of code CODE and type TYPE with operand
7827 OP0. Return the folded expression if folding is successful.
7828 Otherwise, return NULL_TREE. */
7831 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7835 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7837 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7838 && TREE_CODE_LENGTH (code
) == 1);
7843 if (CONVERT_EXPR_CODE_P (code
)
7844 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7846 /* Don't use STRIP_NOPS, because signedness of argument type
7848 STRIP_SIGN_NOPS (arg0
);
7852 /* Strip any conversions that don't change the mode. This
7853 is safe for every expression, except for a comparison
7854 expression because its signedness is derived from its
7857 Note that this is done as an internal manipulation within
7858 the constant folder, in order to find the simplest
7859 representation of the arguments so that their form can be
7860 studied. In any cases, the appropriate type conversions
7861 should be put back in the tree that will get out of the
7866 if (CONSTANT_CLASS_P (arg0
))
7868 tree tem
= const_unop (code
, type
, arg0
);
7871 if (TREE_TYPE (tem
) != type
)
7872 tem
= fold_convert_loc (loc
, type
, tem
);
7878 tem
= generic_simplify (loc
, code
, type
, op0
);
7882 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7884 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7885 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7886 fold_build1_loc (loc
, code
, type
,
7887 fold_convert_loc (loc
, TREE_TYPE (op0
),
7888 TREE_OPERAND (arg0
, 1))));
7889 else if (TREE_CODE (arg0
) == COND_EXPR
)
7891 tree arg01
= TREE_OPERAND (arg0
, 1);
7892 tree arg02
= TREE_OPERAND (arg0
, 2);
7893 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7894 arg01
= fold_build1_loc (loc
, code
, type
,
7895 fold_convert_loc (loc
,
7896 TREE_TYPE (op0
), arg01
));
7897 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7898 arg02
= fold_build1_loc (loc
, code
, type
,
7899 fold_convert_loc (loc
,
7900 TREE_TYPE (op0
), arg02
));
7901 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7904 /* If this was a conversion, and all we did was to move into
7905 inside the COND_EXPR, bring it back out. But leave it if
7906 it is a conversion from integer to integer and the
7907 result precision is no wider than a word since such a
7908 conversion is cheap and may be optimized away by combine,
7909 while it couldn't if it were outside the COND_EXPR. Then return
7910 so we don't get into an infinite recursion loop taking the
7911 conversion out and then back in. */
7913 if ((CONVERT_EXPR_CODE_P (code
)
7914 || code
== NON_LVALUE_EXPR
)
7915 && TREE_CODE (tem
) == COND_EXPR
7916 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7917 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7918 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7919 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7920 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7921 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7922 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7924 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7925 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7926 || flag_syntax_only
))
7927 tem
= build1_loc (loc
, code
, type
,
7929 TREE_TYPE (TREE_OPERAND
7930 (TREE_OPERAND (tem
, 1), 0)),
7931 TREE_OPERAND (tem
, 0),
7932 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7933 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7941 case NON_LVALUE_EXPR
:
7942 if (!maybe_lvalue_p (op0
))
7943 return fold_convert_loc (loc
, type
, op0
);
7948 case FIX_TRUNC_EXPR
:
7949 if (COMPARISON_CLASS_P (op0
))
7951 /* If we have (type) (a CMP b) and type is an integral type, return
7952 new expression involving the new type. Canonicalize
7953 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7955 Do not fold the result as that would not simplify further, also
7956 folding again results in recursions. */
7957 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7958 return build2_loc (loc
, TREE_CODE (op0
), type
,
7959 TREE_OPERAND (op0
, 0),
7960 TREE_OPERAND (op0
, 1));
7961 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7962 && TREE_CODE (type
) != VECTOR_TYPE
)
7963 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7964 constant_boolean_node (true, type
),
7965 constant_boolean_node (false, type
));
7968 /* Handle (T *)&A.B.C for A being of type T and B and C
7969 living at offset zero. This occurs frequently in
7970 C++ upcasting and then accessing the base. */
7971 if (TREE_CODE (op0
) == ADDR_EXPR
7972 && POINTER_TYPE_P (type
)
7973 && handled_component_p (TREE_OPERAND (op0
, 0)))
7975 HOST_WIDE_INT bitsize
, bitpos
;
7978 int unsignedp
, volatilep
;
7979 tree base
= TREE_OPERAND (op0
, 0);
7980 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7981 &mode
, &unsignedp
, &volatilep
, false);
7982 /* If the reference was to a (constant) zero offset, we can use
7983 the address of the base if it has the same base type
7984 as the result type and the pointer type is unqualified. */
7985 if (! offset
&& bitpos
== 0
7986 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7987 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7988 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7989 return fold_convert_loc (loc
, type
,
7990 build_fold_addr_expr_loc (loc
, base
));
7993 if (TREE_CODE (op0
) == MODIFY_EXPR
7994 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7995 /* Detect assigning a bitfield. */
7996 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7998 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8000 /* Don't leave an assignment inside a conversion
8001 unless assigning a bitfield. */
8002 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8003 /* First do the assignment, then return converted constant. */
8004 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8005 TREE_NO_WARNING (tem
) = 1;
8006 TREE_USED (tem
) = 1;
8010 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8011 constants (if x has signed type, the sign bit cannot be set
8012 in c). This folds extension into the BIT_AND_EXPR.
8013 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8014 very likely don't have maximal range for their precision and this
8015 transformation effectively doesn't preserve non-maximal ranges. */
8016 if (TREE_CODE (type
) == INTEGER_TYPE
8017 && TREE_CODE (op0
) == BIT_AND_EXPR
8018 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8020 tree and_expr
= op0
;
8021 tree and0
= TREE_OPERAND (and_expr
, 0);
8022 tree and1
= TREE_OPERAND (and_expr
, 1);
8025 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8026 || (TYPE_PRECISION (type
)
8027 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8029 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8030 <= HOST_BITS_PER_WIDE_INT
8031 && tree_fits_uhwi_p (and1
))
8033 unsigned HOST_WIDE_INT cst
;
8035 cst
= tree_to_uhwi (and1
);
8036 cst
&= HOST_WIDE_INT_M1U
8037 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8038 change
= (cst
== 0);
8039 #ifdef LOAD_EXTEND_OP
8041 && !flag_syntax_only
8042 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8045 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8046 and0
= fold_convert_loc (loc
, uns
, and0
);
8047 and1
= fold_convert_loc (loc
, uns
, and1
);
8053 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8054 TREE_OVERFLOW (and1
));
8055 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8056 fold_convert_loc (loc
, type
, and0
), tem
);
8060 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8061 when one of the new casts will fold away. Conservatively we assume
8062 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8063 if (POINTER_TYPE_P (type
)
8064 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8065 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8066 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8067 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8068 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8070 tree arg00
= TREE_OPERAND (arg0
, 0);
8071 tree arg01
= TREE_OPERAND (arg0
, 1);
8073 return fold_build_pointer_plus_loc
8074 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8077 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8078 of the same precision, and X is an integer type not narrower than
8079 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8080 if (INTEGRAL_TYPE_P (type
)
8081 && TREE_CODE (op0
) == BIT_NOT_EXPR
8082 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8083 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8084 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8086 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8087 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8088 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8089 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8090 fold_convert_loc (loc
, type
, tem
));
8093 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8094 type of X and Y (integer types only). */
8095 if (INTEGRAL_TYPE_P (type
)
8096 && TREE_CODE (op0
) == MULT_EXPR
8097 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8098 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8100 /* Be careful not to introduce new overflows. */
8102 if (TYPE_OVERFLOW_WRAPS (type
))
8105 mult_type
= unsigned_type_for (type
);
8107 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8109 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8110 fold_convert_loc (loc
, mult_type
,
8111 TREE_OPERAND (op0
, 0)),
8112 fold_convert_loc (loc
, mult_type
,
8113 TREE_OPERAND (op0
, 1)));
8114 return fold_convert_loc (loc
, type
, tem
);
8120 case VIEW_CONVERT_EXPR
:
8121 if (TREE_CODE (op0
) == MEM_REF
)
8122 return fold_build2_loc (loc
, MEM_REF
, type
,
8123 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8128 tem
= fold_negate_expr (loc
, arg0
);
8130 return fold_convert_loc (loc
, type
, tem
);
8134 /* Convert fabs((double)float) into (double)fabsf(float). */
8135 if (TREE_CODE (arg0
) == NOP_EXPR
8136 && TREE_CODE (type
) == REAL_TYPE
)
8138 tree targ0
= strip_float_extensions (arg0
);
8140 return fold_convert_loc (loc
, type
,
8141 fold_build1_loc (loc
, ABS_EXPR
,
8145 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8146 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8149 /* Strip sign ops from argument. */
8150 if (TREE_CODE (type
) == REAL_TYPE
)
8152 tem
= fold_strip_sign_ops (arg0
);
8154 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8155 fold_convert_loc (loc
, type
, tem
));
8160 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8161 return fold_convert_loc (loc
, type
, arg0
);
8162 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8164 tree itype
= TREE_TYPE (type
);
8165 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8166 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8167 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8168 negate_expr (ipart
));
8170 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8171 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8175 /* Convert ~ (-A) to A - 1. */
8176 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8177 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8178 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8179 build_int_cst (type
, 1));
8180 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8181 else if (INTEGRAL_TYPE_P (type
)
8182 && ((TREE_CODE (arg0
) == MINUS_EXPR
8183 && integer_onep (TREE_OPERAND (arg0
, 1)))
8184 || (TREE_CODE (arg0
) == PLUS_EXPR
8185 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8187 /* Perform the negation in ARG0's type and only then convert
8188 to TYPE as to avoid introducing undefined behavior. */
8189 tree t
= fold_build1_loc (loc
, NEGATE_EXPR
,
8190 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
8191 TREE_OPERAND (arg0
, 0));
8192 return fold_convert_loc (loc
, type
, t
);
8194 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8195 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8196 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8197 fold_convert_loc (loc
, type
,
8198 TREE_OPERAND (arg0
, 0)))))
8199 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8200 fold_convert_loc (loc
, type
,
8201 TREE_OPERAND (arg0
, 1)));
8202 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8203 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8204 fold_convert_loc (loc
, type
,
8205 TREE_OPERAND (arg0
, 1)))))
8206 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8207 fold_convert_loc (loc
, type
,
8208 TREE_OPERAND (arg0
, 0)), tem
);
8212 case TRUTH_NOT_EXPR
:
8213 /* Note that the operand of this must be an int
8214 and its values must be 0 or 1.
8215 ("true" is a fixed value perhaps depending on the language,
8216 but we don't handle values other than 1 correctly yet.) */
8217 tem
= fold_truth_not_expr (loc
, arg0
);
8220 return fold_convert_loc (loc
, type
, tem
);
8223 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8224 return fold_convert_loc (loc
, type
, arg0
);
8225 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8227 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8228 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8229 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8230 TREE_OPERAND (arg0
, 0)),
8231 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8232 TREE_OPERAND (arg0
, 1)));
8233 return fold_convert_loc (loc
, type
, tem
);
8235 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8237 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8238 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8239 TREE_OPERAND (arg0
, 0));
8240 return fold_convert_loc (loc
, type
, tem
);
8242 if (TREE_CODE (arg0
) == CALL_EXPR
)
8244 tree fn
= get_callee_fndecl (arg0
);
8245 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8246 switch (DECL_FUNCTION_CODE (fn
))
8248 CASE_FLT_FN (BUILT_IN_CEXPI
):
8249 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8251 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8261 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8262 return build_zero_cst (type
);
8263 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8265 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8266 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8267 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8268 TREE_OPERAND (arg0
, 0)),
8269 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8270 TREE_OPERAND (arg0
, 1)));
8271 return fold_convert_loc (loc
, type
, tem
);
8273 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8275 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8276 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8277 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8279 if (TREE_CODE (arg0
) == CALL_EXPR
)
8281 tree fn
= get_callee_fndecl (arg0
);
8282 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8283 switch (DECL_FUNCTION_CODE (fn
))
8285 CASE_FLT_FN (BUILT_IN_CEXPI
):
8286 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8288 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8298 /* Fold *&X to X if X is an lvalue. */
8299 if (TREE_CODE (op0
) == ADDR_EXPR
)
8301 tree op00
= TREE_OPERAND (op0
, 0);
8302 if ((TREE_CODE (op00
) == VAR_DECL
8303 || TREE_CODE (op00
) == PARM_DECL
8304 || TREE_CODE (op00
) == RESULT_DECL
)
8305 && !TREE_READONLY (op00
))
8312 } /* switch (code) */
8316 /* If the operation was a conversion do _not_ mark a resulting constant
8317 with TREE_OVERFLOW if the original constant was not. These conversions
8318 have implementation defined behavior and retaining the TREE_OVERFLOW
8319 flag here would confuse later passes such as VRP. */
8321 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8322 tree type
, tree op0
)
8324 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8326 && TREE_CODE (res
) == INTEGER_CST
8327 && TREE_CODE (op0
) == INTEGER_CST
8328 && CONVERT_EXPR_CODE_P (code
))
8329 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8334 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8335 operands OP0 and OP1. LOC is the location of the resulting expression.
8336 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8337 Return the folded expression if folding is successful. Otherwise,
8338 return NULL_TREE. */
8340 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8341 tree arg0
, tree arg1
, tree op0
, tree op1
)
8345 /* We only do these simplifications if we are optimizing. */
8349 /* Check for things like (A || B) && (A || C). We can convert this
8350 to A || (B && C). Note that either operator can be any of the four
8351 truth and/or operations and the transformation will still be
8352 valid. Also note that we only care about order for the
8353 ANDIF and ORIF operators. If B contains side effects, this
8354 might change the truth-value of A. */
8355 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8356 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8357 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8358 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8359 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8360 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8362 tree a00
= TREE_OPERAND (arg0
, 0);
8363 tree a01
= TREE_OPERAND (arg0
, 1);
8364 tree a10
= TREE_OPERAND (arg1
, 0);
8365 tree a11
= TREE_OPERAND (arg1
, 1);
8366 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8367 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8368 && (code
== TRUTH_AND_EXPR
8369 || code
== TRUTH_OR_EXPR
));
8371 if (operand_equal_p (a00
, a10
, 0))
8372 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8373 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8374 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8375 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8376 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8377 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8378 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8379 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8381 /* This case if tricky because we must either have commutative
8382 operators or else A10 must not have side-effects. */
8384 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8385 && operand_equal_p (a01
, a11
, 0))
8386 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8387 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8391 /* See if we can build a range comparison. */
8392 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8395 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8396 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8398 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8400 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8403 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8404 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8406 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8408 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8411 /* Check for the possibility of merging component references. If our
8412 lhs is another similar operation, try to merge its rhs with our
8413 rhs. Then try to merge our lhs and rhs. */
8414 if (TREE_CODE (arg0
) == code
8415 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8416 TREE_OPERAND (arg0
, 1), arg1
)))
8417 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8419 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8422 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8423 && (code
== TRUTH_AND_EXPR
8424 || code
== TRUTH_ANDIF_EXPR
8425 || code
== TRUTH_OR_EXPR
8426 || code
== TRUTH_ORIF_EXPR
))
8428 enum tree_code ncode
, icode
;
8430 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8431 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8432 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8434 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8435 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8436 We don't want to pack more than two leafs to a non-IF AND/OR
8438 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8439 equal to IF-CODE, then we don't want to add right-hand operand.
8440 If the inner right-hand side of left-hand operand has
8441 side-effects, or isn't simple, then we can't add to it,
8442 as otherwise we might destroy if-sequence. */
8443 if (TREE_CODE (arg0
) == icode
8444 && simple_operand_p_2 (arg1
)
8445 /* Needed for sequence points to handle trappings, and
8447 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8449 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8451 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8454 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8455 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8456 else if (TREE_CODE (arg1
) == icode
8457 && simple_operand_p_2 (arg0
)
8458 /* Needed for sequence points to handle trappings, and
8460 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8462 tem
= fold_build2_loc (loc
, ncode
, type
,
8463 arg0
, TREE_OPERAND (arg1
, 0));
8464 return fold_build2_loc (loc
, icode
, type
, tem
,
8465 TREE_OPERAND (arg1
, 1));
8467 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8469 For sequence point consistancy, we need to check for trapping,
8470 and side-effects. */
8471 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8472 && simple_operand_p_2 (arg1
))
8473 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8479 /* Fold a binary expression of code CODE and type TYPE with operands
8480 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8481 Return the folded expression if folding is successful. Otherwise,
8482 return NULL_TREE. */
8485 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8487 enum tree_code compl_code
;
8489 if (code
== MIN_EXPR
)
8490 compl_code
= MAX_EXPR
;
8491 else if (code
== MAX_EXPR
)
8492 compl_code
= MIN_EXPR
;
8496 /* MIN (MAX (a, b), b) == b. */
8497 if (TREE_CODE (op0
) == compl_code
8498 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8499 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8501 /* MIN (MAX (b, a), b) == b. */
8502 if (TREE_CODE (op0
) == compl_code
8503 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8504 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8505 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8507 /* MIN (a, MAX (a, b)) == a. */
8508 if (TREE_CODE (op1
) == compl_code
8509 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8510 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8511 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8513 /* MIN (a, MAX (b, a)) == a. */
8514 if (TREE_CODE (op1
) == compl_code
8515 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8516 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8517 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8522 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8523 by changing CODE to reduce the magnitude of constants involved in
8524 ARG0 of the comparison.
8525 Returns a canonicalized comparison tree if a simplification was
8526 possible, otherwise returns NULL_TREE.
8527 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8528 valid if signed overflow is undefined. */
8531 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8532 tree arg0
, tree arg1
,
8533 bool *strict_overflow_p
)
8535 enum tree_code code0
= TREE_CODE (arg0
);
8536 tree t
, cst0
= NULL_TREE
;
8540 /* Match A +- CST code arg1 and CST code arg1. We can change the
8541 first form only if overflow is undefined. */
8542 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8543 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8544 /* In principle pointers also have undefined overflow behavior,
8545 but that causes problems elsewhere. */
8546 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8547 && (code0
== MINUS_EXPR
8548 || code0
== PLUS_EXPR
)
8549 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8550 || code0
== INTEGER_CST
))
8553 /* Identify the constant in arg0 and its sign. */
8554 if (code0
== INTEGER_CST
)
8557 cst0
= TREE_OPERAND (arg0
, 1);
8558 sgn0
= tree_int_cst_sgn (cst0
);
8560 /* Overflowed constants and zero will cause problems. */
8561 if (integer_zerop (cst0
)
8562 || TREE_OVERFLOW (cst0
))
8565 /* See if we can reduce the magnitude of the constant in
8566 arg0 by changing the comparison code. */
8567 if (code0
== INTEGER_CST
)
8569 /* CST <= arg1 -> CST-1 < arg1. */
8570 if (code
== LE_EXPR
&& sgn0
== 1)
8572 /* -CST < arg1 -> -CST-1 <= arg1. */
8573 else if (code
== LT_EXPR
&& sgn0
== -1)
8575 /* CST > arg1 -> CST-1 >= arg1. */
8576 else if (code
== GT_EXPR
&& sgn0
== 1)
8578 /* -CST >= arg1 -> -CST-1 > arg1. */
8579 else if (code
== GE_EXPR
&& sgn0
== -1)
8583 /* arg1 code' CST' might be more canonical. */
8588 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8590 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8592 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8593 else if (code
== GT_EXPR
8594 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8596 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8597 else if (code
== LE_EXPR
8598 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8600 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8601 else if (code
== GE_EXPR
8602 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8606 *strict_overflow_p
= true;
8609 /* Now build the constant reduced in magnitude. But not if that
8610 would produce one outside of its types range. */
8611 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8613 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8614 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8616 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8617 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8618 /* We cannot swap the comparison here as that would cause us to
8619 endlessly recurse. */
8622 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8623 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8624 if (code0
!= INTEGER_CST
)
8625 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8626 t
= fold_convert (TREE_TYPE (arg1
), t
);
8628 /* If swapping might yield to a more canonical form, do so. */
8630 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8632 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8635 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8636 overflow further. Try to decrease the magnitude of constants involved
8637 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8638 and put sole constants at the second argument position.
8639 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8642 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8643 tree arg0
, tree arg1
)
8646 bool strict_overflow_p
;
8647 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8648 "when reducing constant in comparison");
8650 /* Try canonicalization by simplifying arg0. */
8651 strict_overflow_p
= false;
8652 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8653 &strict_overflow_p
);
8656 if (strict_overflow_p
)
8657 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8661 /* Try canonicalization by simplifying arg1 using the swapped
8663 code
= swap_tree_comparison (code
);
8664 strict_overflow_p
= false;
8665 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8666 &strict_overflow_p
);
8667 if (t
&& strict_overflow_p
)
8668 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8672 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8673 space. This is used to avoid issuing overflow warnings for
8674 expressions like &p->x which can not wrap. */
8677 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8679 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8686 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8687 if (offset
== NULL_TREE
)
8688 wi_offset
= wi::zero (precision
);
8689 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8695 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8696 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8700 if (!wi::fits_uhwi_p (total
))
8703 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8707 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8709 if (TREE_CODE (base
) == ADDR_EXPR
)
8711 HOST_WIDE_INT base_size
;
8713 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8714 if (base_size
> 0 && size
< base_size
)
8718 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8721 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8722 kind INTEGER_CST. This makes sure to properly sign-extend the
8725 static HOST_WIDE_INT
8726 size_low_cst (const_tree t
)
8728 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8729 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8730 if (prec
< HOST_BITS_PER_WIDE_INT
)
8731 return sext_hwi (w
, prec
);
8735 /* Subroutine of fold_binary. This routine performs all of the
8736 transformations that are common to the equality/inequality
8737 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8738 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8739 fold_binary should call fold_binary. Fold a comparison with
8740 tree code CODE and type TYPE with operands OP0 and OP1. Return
8741 the folded comparison or NULL_TREE. */
8744 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8747 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8748 tree arg0
, arg1
, tem
;
8753 STRIP_SIGN_NOPS (arg0
);
8754 STRIP_SIGN_NOPS (arg1
);
8756 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8757 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8759 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8760 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
8761 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8762 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8763 && TREE_CODE (arg1
) == INTEGER_CST
8764 && !TREE_OVERFLOW (arg1
))
8766 const enum tree_code
8767 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8768 tree const1
= TREE_OPERAND (arg0
, 1);
8769 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8770 tree variable
= TREE_OPERAND (arg0
, 0);
8771 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8773 /* If the constant operation overflowed this can be
8774 simplified as a comparison against INT_MAX/INT_MIN. */
8775 if (TREE_OVERFLOW (new_const
)
8776 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8778 int const1_sgn
= tree_int_cst_sgn (const1
);
8779 enum tree_code code2
= code
;
8781 /* Get the sign of the constant on the lhs if the
8782 operation were VARIABLE + CONST1. */
8783 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8784 const1_sgn
= -const1_sgn
;
8786 /* The sign of the constant determines if we overflowed
8787 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8788 Canonicalize to the INT_MIN overflow by swapping the comparison
8790 if (const1_sgn
== -1)
8791 code2
= swap_tree_comparison (code
);
8793 /* We now can look at the canonicalized case
8794 VARIABLE + 1 CODE2 INT_MIN
8795 and decide on the result. */
8802 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8808 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8817 fold_overflow_warning ("assuming signed overflow does not occur "
8818 "when changing X +- C1 cmp C2 to "
8820 WARN_STRICT_OVERFLOW_COMPARISON
);
8821 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8825 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8826 if (TREE_CODE (arg0
) == MINUS_EXPR
8828 && integer_zerop (arg1
))
8830 /* ??? The transformation is valid for the other operators if overflow
8831 is undefined for the type, but performing it here badly interacts
8832 with the transformation in fold_cond_expr_with_comparison which
8833 attempts to synthetize ABS_EXPR. */
8835 fold_overflow_warning ("assuming signed overflow does not occur "
8836 "when changing X - Y cmp 0 to X cmp Y",
8837 WARN_STRICT_OVERFLOW_COMPARISON
);
8838 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8839 TREE_OPERAND (arg0
, 1));
8842 /* For comparisons of pointers we can decompose it to a compile time
8843 comparison of the base objects and the offsets into the object.
8844 This requires at least one operand being an ADDR_EXPR or a
8845 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8846 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8847 && (TREE_CODE (arg0
) == ADDR_EXPR
8848 || TREE_CODE (arg1
) == ADDR_EXPR
8849 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8850 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8852 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8853 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8855 int volatilep
, unsignedp
;
8856 bool indirect_base0
= false, indirect_base1
= false;
8858 /* Get base and offset for the access. Strip ADDR_EXPR for
8859 get_inner_reference, but put it back by stripping INDIRECT_REF
8860 off the base object if possible. indirect_baseN will be true
8861 if baseN is not an address but refers to the object itself. */
8863 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8865 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8866 &bitsize
, &bitpos0
, &offset0
, &mode
,
8867 &unsignedp
, &volatilep
, false);
8868 if (TREE_CODE (base0
) == INDIRECT_REF
)
8869 base0
= TREE_OPERAND (base0
, 0);
8871 indirect_base0
= true;
8873 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8875 base0
= TREE_OPERAND (arg0
, 0);
8876 STRIP_SIGN_NOPS (base0
);
8877 if (TREE_CODE (base0
) == ADDR_EXPR
)
8879 base0
= TREE_OPERAND (base0
, 0);
8880 indirect_base0
= true;
8882 offset0
= TREE_OPERAND (arg0
, 1);
8883 if (tree_fits_shwi_p (offset0
))
8885 HOST_WIDE_INT off
= size_low_cst (offset0
);
8886 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8888 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8890 bitpos0
= off
* BITS_PER_UNIT
;
8891 offset0
= NULL_TREE
;
8897 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8899 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8900 &bitsize
, &bitpos1
, &offset1
, &mode
,
8901 &unsignedp
, &volatilep
, false);
8902 if (TREE_CODE (base1
) == INDIRECT_REF
)
8903 base1
= TREE_OPERAND (base1
, 0);
8905 indirect_base1
= true;
8907 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8909 base1
= TREE_OPERAND (arg1
, 0);
8910 STRIP_SIGN_NOPS (base1
);
8911 if (TREE_CODE (base1
) == ADDR_EXPR
)
8913 base1
= TREE_OPERAND (base1
, 0);
8914 indirect_base1
= true;
8916 offset1
= TREE_OPERAND (arg1
, 1);
8917 if (tree_fits_shwi_p (offset1
))
8919 HOST_WIDE_INT off
= size_low_cst (offset1
);
8920 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8922 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8924 bitpos1
= off
* BITS_PER_UNIT
;
8925 offset1
= NULL_TREE
;
8930 /* A local variable can never be pointed to by
8931 the default SSA name of an incoming parameter. */
8932 if ((TREE_CODE (arg0
) == ADDR_EXPR
8934 && TREE_CODE (base0
) == VAR_DECL
8935 && auto_var_in_fn_p (base0
, current_function_decl
)
8937 && TREE_CODE (base1
) == SSA_NAME
8938 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8939 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8940 || (TREE_CODE (arg1
) == ADDR_EXPR
8942 && TREE_CODE (base1
) == VAR_DECL
8943 && auto_var_in_fn_p (base1
, current_function_decl
)
8945 && TREE_CODE (base0
) == SSA_NAME
8946 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8947 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8949 if (code
== NE_EXPR
)
8950 return constant_boolean_node (1, type
);
8951 else if (code
== EQ_EXPR
)
8952 return constant_boolean_node (0, type
);
8954 /* If we have equivalent bases we might be able to simplify. */
8955 else if (indirect_base0
== indirect_base1
8956 && operand_equal_p (base0
, base1
, 0))
8958 /* We can fold this expression to a constant if the non-constant
8959 offset parts are equal. */
8960 if ((offset0
== offset1
8961 || (offset0
&& offset1
8962 && operand_equal_p (offset0
, offset1
, 0)))
8965 || (indirect_base0
&& DECL_P (base0
))
8966 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8970 && bitpos0
!= bitpos1
8971 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8972 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8973 fold_overflow_warning (("assuming pointer wraparound does not "
8974 "occur when comparing P +- C1 with "
8976 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8981 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8983 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8985 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8987 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8989 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8991 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8995 /* We can simplify the comparison to a comparison of the variable
8996 offset parts if the constant offset parts are equal.
8997 Be careful to use signed sizetype here because otherwise we
8998 mess with array offsets in the wrong way. This is possible
8999 because pointer arithmetic is restricted to retain within an
9000 object and overflow on pointer differences is undefined as of
9001 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9002 else if (bitpos0
== bitpos1
9004 || (indirect_base0
&& DECL_P (base0
))
9005 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9007 /* By converting to signed sizetype we cover middle-end pointer
9008 arithmetic which operates on unsigned pointer types of size
9009 type size and ARRAY_REF offsets which are properly sign or
9010 zero extended from their type in case it is narrower than
9012 if (offset0
== NULL_TREE
)
9013 offset0
= build_int_cst (ssizetype
, 0);
9015 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9016 if (offset1
== NULL_TREE
)
9017 offset1
= build_int_cst (ssizetype
, 0);
9019 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9022 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9023 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9024 fold_overflow_warning (("assuming pointer wraparound does not "
9025 "occur when comparing P +- C1 with "
9027 WARN_STRICT_OVERFLOW_COMPARISON
);
9029 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9032 /* For non-equal bases we can simplify if they are addresses
9033 declarations with different addresses. */
9034 else if (indirect_base0
&& indirect_base1
9035 /* We know that !operand_equal_p (base0, base1, 0)
9036 because the if condition was false. But make
9037 sure two decls are not the same. */
9039 && TREE_CODE (arg0
) == ADDR_EXPR
9040 && TREE_CODE (arg1
) == ADDR_EXPR
9043 /* Watch for aliases. */
9044 && (!decl_in_symtab_p (base0
)
9045 || !decl_in_symtab_p (base1
)
9046 || !symtab_node::get_create (base0
)->equal_address_to
9047 (symtab_node::get_create (base1
))))
9049 if (code
== EQ_EXPR
)
9050 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9052 else if (code
== NE_EXPR
)
9053 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9056 /* For equal offsets we can simplify to a comparison of the
9058 else if (bitpos0
== bitpos1
9060 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9062 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9063 && ((offset0
== offset1
)
9064 || (offset0
&& offset1
9065 && operand_equal_p (offset0
, offset1
, 0))))
9068 base0
= build_fold_addr_expr_loc (loc
, base0
);
9070 base1
= build_fold_addr_expr_loc (loc
, base1
);
9071 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9075 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9076 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9077 the resulting offset is smaller in absolute value than the
9078 original one and has the same sign. */
9079 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9080 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9081 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9082 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9083 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9084 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9085 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9086 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9088 tree const1
= TREE_OPERAND (arg0
, 1);
9089 tree const2
= TREE_OPERAND (arg1
, 1);
9090 tree variable1
= TREE_OPERAND (arg0
, 0);
9091 tree variable2
= TREE_OPERAND (arg1
, 0);
9093 const char * const warnmsg
= G_("assuming signed overflow does not "
9094 "occur when combining constants around "
9097 /* Put the constant on the side where it doesn't overflow and is
9098 of lower absolute value and of same sign than before. */
9099 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9100 ? MINUS_EXPR
: PLUS_EXPR
,
9102 if (!TREE_OVERFLOW (cst
)
9103 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9104 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9106 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9107 return fold_build2_loc (loc
, code
, type
,
9109 fold_build2_loc (loc
, TREE_CODE (arg1
),
9114 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9115 ? MINUS_EXPR
: PLUS_EXPR
,
9117 if (!TREE_OVERFLOW (cst
)
9118 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9119 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9121 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9122 return fold_build2_loc (loc
, code
, type
,
9123 fold_build2_loc (loc
, TREE_CODE (arg0
),
9130 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9131 signed arithmetic case. That form is created by the compiler
9132 often enough for folding it to be of value. One example is in
9133 computing loop trip counts after Operator Strength Reduction. */
9134 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9135 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9136 && TREE_CODE (arg0
) == MULT_EXPR
9137 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9139 && integer_zerop (arg1
))
9141 tree const1
= TREE_OPERAND (arg0
, 1);
9142 tree const2
= arg1
; /* zero */
9143 tree variable1
= TREE_OPERAND (arg0
, 0);
9144 enum tree_code cmp_code
= code
;
9146 /* Handle unfolded multiplication by zero. */
9147 if (integer_zerop (const1
))
9148 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9150 fold_overflow_warning (("assuming signed overflow does not occur when "
9151 "eliminating multiplication in comparison "
9153 WARN_STRICT_OVERFLOW_COMPARISON
);
9155 /* If const1 is negative we swap the sense of the comparison. */
9156 if (tree_int_cst_sgn (const1
) < 0)
9157 cmp_code
= swap_tree_comparison (cmp_code
);
9159 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9162 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9166 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9168 tree targ0
= strip_float_extensions (arg0
);
9169 tree targ1
= strip_float_extensions (arg1
);
9170 tree newtype
= TREE_TYPE (targ0
);
9172 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9173 newtype
= TREE_TYPE (targ1
);
9175 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9176 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9177 return fold_build2_loc (loc
, code
, type
,
9178 fold_convert_loc (loc
, newtype
, targ0
),
9179 fold_convert_loc (loc
, newtype
, targ1
));
9181 if (TREE_CODE (arg1
) == REAL_CST
)
9183 REAL_VALUE_TYPE cst
;
9184 cst
= TREE_REAL_CST (arg1
);
9186 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9187 /* a CMP (-0) -> a CMP 0 */
9188 if (REAL_VALUE_MINUS_ZERO (cst
))
9189 return fold_build2_loc (loc
, code
, type
, arg0
,
9190 build_real (TREE_TYPE (arg1
), dconst0
));
9192 /* x != NaN is always true, other ops are always false. */
9193 if (REAL_VALUE_ISNAN (cst
)
9194 && ! HONOR_SNANS (arg1
))
9196 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9197 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9200 /* Fold comparisons against infinity. */
9201 if (REAL_VALUE_ISINF (cst
)
9202 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9204 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9205 if (tem
!= NULL_TREE
)
9210 /* If this is a comparison of a real constant with a PLUS_EXPR
9211 or a MINUS_EXPR of a real constant, we can convert it into a
9212 comparison with a revised real constant as long as no overflow
9213 occurs when unsafe_math_optimizations are enabled. */
9214 if (flag_unsafe_math_optimizations
9215 && TREE_CODE (arg1
) == REAL_CST
9216 && (TREE_CODE (arg0
) == PLUS_EXPR
9217 || TREE_CODE (arg0
) == MINUS_EXPR
)
9218 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9219 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9220 ? MINUS_EXPR
: PLUS_EXPR
,
9221 arg1
, TREE_OPERAND (arg0
, 1)))
9222 && !TREE_OVERFLOW (tem
))
9223 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9225 /* Likewise, we can simplify a comparison of a real constant with
9226 a MINUS_EXPR whose first operand is also a real constant, i.e.
9227 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9228 floating-point types only if -fassociative-math is set. */
9229 if (flag_associative_math
9230 && TREE_CODE (arg1
) == REAL_CST
9231 && TREE_CODE (arg0
) == MINUS_EXPR
9232 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9233 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9235 && !TREE_OVERFLOW (tem
))
9236 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9237 TREE_OPERAND (arg0
, 1), tem
);
9239 /* Fold comparisons against built-in math functions. */
9240 if (TREE_CODE (arg1
) == REAL_CST
9241 && flag_unsafe_math_optimizations
9242 && ! flag_errno_math
)
9244 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9246 if (fcode
!= END_BUILTINS
)
9248 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9249 if (tem
!= NULL_TREE
)
9255 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9256 && CONVERT_EXPR_P (arg0
))
9258 /* If we are widening one operand of an integer comparison,
9259 see if the other operand is similarly being widened. Perhaps we
9260 can do the comparison in the narrower type. */
9261 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9265 /* Or if we are changing signedness. */
9266 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9271 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9272 constant, we can simplify it. */
9273 if (TREE_CODE (arg1
) == INTEGER_CST
9274 && (TREE_CODE (arg0
) == MIN_EXPR
9275 || TREE_CODE (arg0
) == MAX_EXPR
)
9276 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9278 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9283 /* Simplify comparison of something with itself. (For IEEE
9284 floating-point, we can only do some of these simplifications.) */
9285 if (operand_equal_p (arg0
, arg1
, 0))
9290 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9291 || ! HONOR_NANS (arg0
))
9292 return constant_boolean_node (1, type
);
9297 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9298 || ! HONOR_NANS (arg0
))
9299 return constant_boolean_node (1, type
);
9300 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9303 /* For NE, we can only do this simplification if integer
9304 or we don't honor IEEE floating point NaNs. */
9305 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9306 && HONOR_NANS (arg0
))
9308 /* ... fall through ... */
9311 return constant_boolean_node (0, type
);
9317 /* If we are comparing an expression that just has comparisons
9318 of two integer values, arithmetic expressions of those comparisons,
9319 and constants, we can simplify it. There are only three cases
9320 to check: the two values can either be equal, the first can be
9321 greater, or the second can be greater. Fold the expression for
9322 those three values. Since each value must be 0 or 1, we have
9323 eight possibilities, each of which corresponds to the constant 0
9324 or 1 or one of the six possible comparisons.
9326 This handles common cases like (a > b) == 0 but also handles
9327 expressions like ((x > y) - (y > x)) > 0, which supposedly
9328 occur in macroized code. */
9330 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9332 tree cval1
= 0, cval2
= 0;
9335 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9336 /* Don't handle degenerate cases here; they should already
9337 have been handled anyway. */
9338 && cval1
!= 0 && cval2
!= 0
9339 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9340 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9341 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9342 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9343 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9344 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9345 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9347 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9348 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9350 /* We can't just pass T to eval_subst in case cval1 or cval2
9351 was the same as ARG1. */
9354 = fold_build2_loc (loc
, code
, type
,
9355 eval_subst (loc
, arg0
, cval1
, maxval
,
9359 = fold_build2_loc (loc
, code
, type
,
9360 eval_subst (loc
, arg0
, cval1
, maxval
,
9364 = fold_build2_loc (loc
, code
, type
,
9365 eval_subst (loc
, arg0
, cval1
, minval
,
9369 /* All three of these results should be 0 or 1. Confirm they are.
9370 Then use those values to select the proper code to use. */
9372 if (TREE_CODE (high_result
) == INTEGER_CST
9373 && TREE_CODE (equal_result
) == INTEGER_CST
9374 && TREE_CODE (low_result
) == INTEGER_CST
)
9376 /* Make a 3-bit mask with the high-order bit being the
9377 value for `>', the next for '=', and the low for '<'. */
9378 switch ((integer_onep (high_result
) * 4)
9379 + (integer_onep (equal_result
) * 2)
9380 + integer_onep (low_result
))
9384 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9405 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9410 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9411 SET_EXPR_LOCATION (tem
, loc
);
9414 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9419 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9420 into a single range test. */
9421 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9422 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9423 && TREE_CODE (arg1
) == INTEGER_CST
9424 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9425 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9426 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9427 && !TREE_OVERFLOW (arg1
))
9429 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9430 if (tem
!= NULL_TREE
)
9434 /* Fold ~X op ~Y as Y op X. */
9435 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9436 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9438 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9439 return fold_build2_loc (loc
, code
, type
,
9440 fold_convert_loc (loc
, cmp_type
,
9441 TREE_OPERAND (arg1
, 0)),
9442 TREE_OPERAND (arg0
, 0));
9445 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9446 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9447 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9449 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9450 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9451 TREE_OPERAND (arg0
, 0),
9452 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9453 fold_convert_loc (loc
, cmp_type
, arg1
)));
9460 /* Subroutine of fold_binary. Optimize complex multiplications of the
9461 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9462 argument EXPR represents the expression "z" of type TYPE. */
9465 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9467 tree itype
= TREE_TYPE (type
);
9468 tree rpart
, ipart
, tem
;
9470 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9472 rpart
= TREE_OPERAND (expr
, 0);
9473 ipart
= TREE_OPERAND (expr
, 1);
9475 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9477 rpart
= TREE_REALPART (expr
);
9478 ipart
= TREE_IMAGPART (expr
);
9482 expr
= save_expr (expr
);
9483 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9484 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9487 rpart
= save_expr (rpart
);
9488 ipart
= save_expr (ipart
);
9489 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9490 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9491 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9492 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9493 build_zero_cst (itype
));
9497 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9498 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9499 guarantees that P and N have the same least significant log2(M) bits.
9500 N is not otherwise constrained. In particular, N is not normalized to
9501 0 <= N < M as is common. In general, the precise value of P is unknown.
9502 M is chosen as large as possible such that constant N can be determined.
9504 Returns M and sets *RESIDUE to N.
9506 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9507 account. This is not always possible due to PR 35705.
9510 static unsigned HOST_WIDE_INT
9511 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9512 bool allow_func_align
)
9514 enum tree_code code
;
9518 code
= TREE_CODE (expr
);
9519 if (code
== ADDR_EXPR
)
9521 unsigned int bitalign
;
9522 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9523 *residue
/= BITS_PER_UNIT
;
9524 return bitalign
/ BITS_PER_UNIT
;
9526 else if (code
== POINTER_PLUS_EXPR
)
9529 unsigned HOST_WIDE_INT modulus
;
9530 enum tree_code inner_code
;
9532 op0
= TREE_OPERAND (expr
, 0);
9534 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9537 op1
= TREE_OPERAND (expr
, 1);
9539 inner_code
= TREE_CODE (op1
);
9540 if (inner_code
== INTEGER_CST
)
9542 *residue
+= TREE_INT_CST_LOW (op1
);
9545 else if (inner_code
== MULT_EXPR
)
9547 op1
= TREE_OPERAND (op1
, 1);
9548 if (TREE_CODE (op1
) == INTEGER_CST
)
9550 unsigned HOST_WIDE_INT align
;
9552 /* Compute the greatest power-of-2 divisor of op1. */
9553 align
= TREE_INT_CST_LOW (op1
);
9556 /* If align is non-zero and less than *modulus, replace
9557 *modulus with align., If align is 0, then either op1 is 0
9558 or the greatest power-of-2 divisor of op1 doesn't fit in an
9559 unsigned HOST_WIDE_INT. In either case, no additional
9560 constraint is imposed. */
9562 modulus
= MIN (modulus
, align
);
9569 /* If we get here, we were unable to determine anything useful about the
9574 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9575 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9578 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9580 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9582 if (TREE_CODE (arg
) == VECTOR_CST
)
9584 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9585 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9587 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9589 constructor_elt
*elt
;
9591 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9592 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9595 elts
[i
] = elt
->value
;
9599 for (; i
< nelts
; i
++)
9601 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9605 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9606 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9607 NULL_TREE otherwise. */
9610 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9612 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9614 bool need_ctor
= false;
9616 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9617 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9618 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9619 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9622 elts
= XALLOCAVEC (tree
, nelts
* 3);
9623 if (!vec_cst_ctor_to_array (arg0
, elts
)
9624 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9627 for (i
= 0; i
< nelts
; i
++)
9629 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9631 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9636 vec
<constructor_elt
, va_gc
> *v
;
9637 vec_alloc (v
, nelts
);
9638 for (i
= 0; i
< nelts
; i
++)
9639 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9640 return build_constructor (type
, v
);
9643 return build_vector (type
, &elts
[2 * nelts
]);
9646 /* Try to fold a pointer difference of type TYPE two address expressions of
9647 array references AREF0 and AREF1 using location LOC. Return a
9648 simplified expression for the difference or NULL_TREE. */
9651 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9652 tree aref0
, tree aref1
)
9654 tree base0
= TREE_OPERAND (aref0
, 0);
9655 tree base1
= TREE_OPERAND (aref1
, 0);
9656 tree base_offset
= build_int_cst (type
, 0);
9658 /* If the bases are array references as well, recurse. If the bases
9659 are pointer indirections compute the difference of the pointers.
9660 If the bases are equal, we are set. */
9661 if ((TREE_CODE (base0
) == ARRAY_REF
9662 && TREE_CODE (base1
) == ARRAY_REF
9664 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9665 || (INDIRECT_REF_P (base0
)
9666 && INDIRECT_REF_P (base1
)
9667 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9668 TREE_OPERAND (base0
, 0),
9669 TREE_OPERAND (base1
, 0))))
9670 || operand_equal_p (base0
, base1
, 0))
9672 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9673 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9674 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9675 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9676 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9678 fold_build2_loc (loc
, MULT_EXPR
, type
,
9684 /* If the real or vector real constant CST of type TYPE has an exact
9685 inverse, return it, else return NULL. */
9688 exact_inverse (tree type
, tree cst
)
9691 tree unit_type
, *elts
;
9693 unsigned vec_nelts
, i
;
9695 switch (TREE_CODE (cst
))
9698 r
= TREE_REAL_CST (cst
);
9700 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9701 return build_real (type
, r
);
9706 vec_nelts
= VECTOR_CST_NELTS (cst
);
9707 elts
= XALLOCAVEC (tree
, vec_nelts
);
9708 unit_type
= TREE_TYPE (type
);
9709 mode
= TYPE_MODE (unit_type
);
9711 for (i
= 0; i
< vec_nelts
; i
++)
9713 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9714 if (!exact_real_inverse (mode
, &r
))
9716 elts
[i
] = build_real (unit_type
, r
);
9719 return build_vector (type
, elts
);
9726 /* Mask out the tz least significant bits of X of type TYPE where
9727 tz is the number of trailing zeroes in Y. */
9729 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9731 int tz
= wi::ctz (y
);
9733 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9737 /* Return true when T is an address and is known to be nonzero.
9738 For floating point we further ensure that T is not denormal.
9739 Similar logic is present in nonzero_address in rtlanal.h.
9741 If the return value is based on the assumption that signed overflow
9742 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9743 change *STRICT_OVERFLOW_P. */
9746 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9748 tree type
= TREE_TYPE (t
);
9749 enum tree_code code
;
9751 /* Doing something useful for floating point would need more work. */
9752 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9755 code
= TREE_CODE (t
);
9756 switch (TREE_CODE_CLASS (code
))
9759 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9762 case tcc_comparison
:
9763 return tree_binary_nonzero_warnv_p (code
, type
,
9764 TREE_OPERAND (t
, 0),
9765 TREE_OPERAND (t
, 1),
9768 case tcc_declaration
:
9770 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9778 case TRUTH_NOT_EXPR
:
9779 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9782 case TRUTH_AND_EXPR
:
9784 case TRUTH_XOR_EXPR
:
9785 return tree_binary_nonzero_warnv_p (code
, type
,
9786 TREE_OPERAND (t
, 0),
9787 TREE_OPERAND (t
, 1),
9795 case WITH_SIZE_EXPR
:
9797 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9802 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9806 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9811 tree fndecl
= get_callee_fndecl (t
);
9812 if (!fndecl
) return false;
9813 if (flag_delete_null_pointer_checks
&& !flag_check_new
9814 && DECL_IS_OPERATOR_NEW (fndecl
)
9815 && !TREE_NOTHROW (fndecl
))
9817 if (flag_delete_null_pointer_checks
9818 && lookup_attribute ("returns_nonnull",
9819 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9821 return alloca_call_p (t
);
9830 /* Return true when T is an address and is known to be nonzero.
9831 Handle warnings about undefined signed overflow. */
9834 tree_expr_nonzero_p (tree t
)
9836 bool ret
, strict_overflow_p
;
9838 strict_overflow_p
= false;
9839 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9840 if (strict_overflow_p
)
9841 fold_overflow_warning (("assuming signed overflow does not occur when "
9842 "determining that expression is always "
9844 WARN_STRICT_OVERFLOW_MISC
);
9848 /* Fold a binary expression of code CODE and type TYPE with operands
9849 OP0 and OP1. LOC is the location of the resulting expression.
9850 Return the folded expression if folding is successful. Otherwise,
9851 return NULL_TREE. */
9854 fold_binary_loc (location_t loc
,
9855 enum tree_code code
, tree type
, tree op0
, tree op1
)
9857 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9858 tree arg0
, arg1
, tem
;
9859 tree t1
= NULL_TREE
;
9860 bool strict_overflow_p
;
9863 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9864 && TREE_CODE_LENGTH (code
) == 2
9866 && op1
!= NULL_TREE
);
9871 /* Strip any conversions that don't change the mode. This is
9872 safe for every expression, except for a comparison expression
9873 because its signedness is derived from its operands. So, in
9874 the latter case, only strip conversions that don't change the
9875 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9878 Note that this is done as an internal manipulation within the
9879 constant folder, in order to find the simplest representation
9880 of the arguments so that their form can be studied. In any
9881 cases, the appropriate type conversions should be put back in
9882 the tree that will get out of the constant folder. */
9884 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9886 STRIP_SIGN_NOPS (arg0
);
9887 STRIP_SIGN_NOPS (arg1
);
9895 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9896 constant but we can't do arithmetic on them. */
9897 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9899 tem
= const_binop (code
, type
, arg0
, arg1
);
9900 if (tem
!= NULL_TREE
)
9902 if (TREE_TYPE (tem
) != type
)
9903 tem
= fold_convert_loc (loc
, type
, tem
);
9908 /* If this is a commutative operation, and ARG0 is a constant, move it
9909 to ARG1 to reduce the number of tests below. */
9910 if (commutative_tree_code (code
)
9911 && tree_swap_operands_p (arg0
, arg1
, true))
9912 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9914 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9915 to ARG1 to reduce the number of tests below. */
9916 if (kind
== tcc_comparison
9917 && tree_swap_operands_p (arg0
, arg1
, true))
9918 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9920 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9924 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9926 First check for cases where an arithmetic operation is applied to a
9927 compound, conditional, or comparison operation. Push the arithmetic
9928 operation inside the compound or conditional to see if any folding
9929 can then be done. Convert comparison to conditional for this purpose.
9930 The also optimizes non-constant cases that used to be done in
9933 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9934 one of the operands is a comparison and the other is a comparison, a
9935 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9936 code below would make the expression more complex. Change it to a
9937 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9938 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9940 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9941 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9942 && TREE_CODE (type
) != VECTOR_TYPE
9943 && ((truth_value_p (TREE_CODE (arg0
))
9944 && (truth_value_p (TREE_CODE (arg1
))
9945 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9946 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9947 || (truth_value_p (TREE_CODE (arg1
))
9948 && (truth_value_p (TREE_CODE (arg0
))
9949 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9950 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9952 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9953 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9956 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9957 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9959 if (code
== EQ_EXPR
)
9960 tem
= invert_truthvalue_loc (loc
, tem
);
9962 return fold_convert_loc (loc
, type
, tem
);
9965 if (TREE_CODE_CLASS (code
) == tcc_binary
9966 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9968 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9970 tem
= fold_build2_loc (loc
, code
, type
,
9971 fold_convert_loc (loc
, TREE_TYPE (op0
),
9972 TREE_OPERAND (arg0
, 1)), op1
);
9973 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9976 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9977 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9979 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9980 fold_convert_loc (loc
, TREE_TYPE (op1
),
9981 TREE_OPERAND (arg1
, 1)));
9982 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9986 if (TREE_CODE (arg0
) == COND_EXPR
9987 || TREE_CODE (arg0
) == VEC_COND_EXPR
9988 || COMPARISON_CLASS_P (arg0
))
9990 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9992 /*cond_first_p=*/1);
9993 if (tem
!= NULL_TREE
)
9997 if (TREE_CODE (arg1
) == COND_EXPR
9998 || TREE_CODE (arg1
) == VEC_COND_EXPR
9999 || COMPARISON_CLASS_P (arg1
))
10001 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10003 /*cond_first_p=*/0);
10004 if (tem
!= NULL_TREE
)
10012 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10013 if (TREE_CODE (arg0
) == ADDR_EXPR
10014 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10016 tree iref
= TREE_OPERAND (arg0
, 0);
10017 return fold_build2 (MEM_REF
, type
,
10018 TREE_OPERAND (iref
, 0),
10019 int_const_binop (PLUS_EXPR
, arg1
,
10020 TREE_OPERAND (iref
, 1)));
10023 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10024 if (TREE_CODE (arg0
) == ADDR_EXPR
10025 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10028 HOST_WIDE_INT coffset
;
10029 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10033 return fold_build2 (MEM_REF
, type
,
10034 build_fold_addr_expr (base
),
10035 int_const_binop (PLUS_EXPR
, arg1
,
10036 size_int (coffset
)));
10041 case POINTER_PLUS_EXPR
:
10042 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10043 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10044 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10045 return fold_convert_loc (loc
, type
,
10046 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10047 fold_convert_loc (loc
, sizetype
,
10049 fold_convert_loc (loc
, sizetype
,
10055 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10057 /* X + (X / CST) * -CST is X % CST. */
10058 if (TREE_CODE (arg1
) == MULT_EXPR
10059 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10060 && operand_equal_p (arg0
,
10061 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10063 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10064 tree cst1
= TREE_OPERAND (arg1
, 1);
10065 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10067 if (sum
&& integer_zerop (sum
))
10068 return fold_convert_loc (loc
, type
,
10069 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10070 TREE_TYPE (arg0
), arg0
,
10075 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10076 one. Make sure the type is not saturating and has the signedness of
10077 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10078 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10079 if ((TREE_CODE (arg0
) == MULT_EXPR
10080 || TREE_CODE (arg1
) == MULT_EXPR
)
10081 && !TYPE_SATURATING (type
)
10082 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10083 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10084 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10086 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10091 if (! FLOAT_TYPE_P (type
))
10093 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10094 with a constant, and the two constants have no bits in common,
10095 we should treat this as a BIT_IOR_EXPR since this may produce more
10096 simplifications. */
10097 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10098 && TREE_CODE (arg1
) == BIT_AND_EXPR
10099 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10100 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10101 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10102 TREE_OPERAND (arg1
, 1)) == 0)
10104 code
= BIT_IOR_EXPR
;
10108 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10109 (plus (plus (mult) (mult)) (foo)) so that we can
10110 take advantage of the factoring cases below. */
10111 if (ANY_INTEGRAL_TYPE_P (type
)
10112 && TYPE_OVERFLOW_WRAPS (type
)
10113 && (((TREE_CODE (arg0
) == PLUS_EXPR
10114 || TREE_CODE (arg0
) == MINUS_EXPR
)
10115 && TREE_CODE (arg1
) == MULT_EXPR
)
10116 || ((TREE_CODE (arg1
) == PLUS_EXPR
10117 || TREE_CODE (arg1
) == MINUS_EXPR
)
10118 && TREE_CODE (arg0
) == MULT_EXPR
)))
10120 tree parg0
, parg1
, parg
, marg
;
10121 enum tree_code pcode
;
10123 if (TREE_CODE (arg1
) == MULT_EXPR
)
10124 parg
= arg0
, marg
= arg1
;
10126 parg
= arg1
, marg
= arg0
;
10127 pcode
= TREE_CODE (parg
);
10128 parg0
= TREE_OPERAND (parg
, 0);
10129 parg1
= TREE_OPERAND (parg
, 1);
10130 STRIP_NOPS (parg0
);
10131 STRIP_NOPS (parg1
);
10133 if (TREE_CODE (parg0
) == MULT_EXPR
10134 && TREE_CODE (parg1
) != MULT_EXPR
)
10135 return fold_build2_loc (loc
, pcode
, type
,
10136 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10137 fold_convert_loc (loc
, type
,
10139 fold_convert_loc (loc
, type
,
10141 fold_convert_loc (loc
, type
, parg1
));
10142 if (TREE_CODE (parg0
) != MULT_EXPR
10143 && TREE_CODE (parg1
) == MULT_EXPR
)
10145 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10146 fold_convert_loc (loc
, type
, parg0
),
10147 fold_build2_loc (loc
, pcode
, type
,
10148 fold_convert_loc (loc
, type
, marg
),
10149 fold_convert_loc (loc
, type
,
10155 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10156 to __complex__ ( x, y ). This is not the same for SNaNs or
10157 if signed zeros are involved. */
10158 if (!HONOR_SNANS (element_mode (arg0
))
10159 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10160 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10162 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10163 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10164 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10165 bool arg0rz
= false, arg0iz
= false;
10166 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10167 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10169 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10170 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10171 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10173 tree rp
= arg1r
? arg1r
10174 : build1 (REALPART_EXPR
, rtype
, arg1
);
10175 tree ip
= arg0i
? arg0i
10176 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10177 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10179 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10181 tree rp
= arg0r
? arg0r
10182 : build1 (REALPART_EXPR
, rtype
, arg0
);
10183 tree ip
= arg1i
? arg1i
10184 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10185 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10190 if (flag_unsafe_math_optimizations
10191 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10192 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10193 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10196 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10197 We associate floats only if the user has specified
10198 -fassociative-math. */
10199 if (flag_associative_math
10200 && TREE_CODE (arg1
) == PLUS_EXPR
10201 && TREE_CODE (arg0
) != MULT_EXPR
)
10203 tree tree10
= TREE_OPERAND (arg1
, 0);
10204 tree tree11
= TREE_OPERAND (arg1
, 1);
10205 if (TREE_CODE (tree11
) == MULT_EXPR
10206 && TREE_CODE (tree10
) == MULT_EXPR
)
10209 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10210 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10213 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10214 We associate floats only if the user has specified
10215 -fassociative-math. */
10216 if (flag_associative_math
10217 && TREE_CODE (arg0
) == PLUS_EXPR
10218 && TREE_CODE (arg1
) != MULT_EXPR
)
10220 tree tree00
= TREE_OPERAND (arg0
, 0);
10221 tree tree01
= TREE_OPERAND (arg0
, 1);
10222 if (TREE_CODE (tree01
) == MULT_EXPR
10223 && TREE_CODE (tree00
) == MULT_EXPR
)
10226 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10227 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10233 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10234 is a rotate of A by C1 bits. */
10235 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10236 is a rotate of A by B bits. */
10238 enum tree_code code0
, code1
;
10240 code0
= TREE_CODE (arg0
);
10241 code1
= TREE_CODE (arg1
);
10242 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10243 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10244 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10245 TREE_OPERAND (arg1
, 0), 0)
10246 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10247 TYPE_UNSIGNED (rtype
))
10248 /* Only create rotates in complete modes. Other cases are not
10249 expanded properly. */
10250 && (element_precision (rtype
)
10251 == element_precision (TYPE_MODE (rtype
))))
10253 tree tree01
, tree11
;
10254 enum tree_code code01
, code11
;
10256 tree01
= TREE_OPERAND (arg0
, 1);
10257 tree11
= TREE_OPERAND (arg1
, 1);
10258 STRIP_NOPS (tree01
);
10259 STRIP_NOPS (tree11
);
10260 code01
= TREE_CODE (tree01
);
10261 code11
= TREE_CODE (tree11
);
10262 if (code01
== INTEGER_CST
10263 && code11
== INTEGER_CST
10264 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10265 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10267 tem
= build2_loc (loc
, LROTATE_EXPR
,
10268 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10269 TREE_OPERAND (arg0
, 0),
10270 code0
== LSHIFT_EXPR
10271 ? TREE_OPERAND (arg0
, 1)
10272 : TREE_OPERAND (arg1
, 1));
10273 return fold_convert_loc (loc
, type
, tem
);
10275 else if (code11
== MINUS_EXPR
)
10277 tree tree110
, tree111
;
10278 tree110
= TREE_OPERAND (tree11
, 0);
10279 tree111
= TREE_OPERAND (tree11
, 1);
10280 STRIP_NOPS (tree110
);
10281 STRIP_NOPS (tree111
);
10282 if (TREE_CODE (tree110
) == INTEGER_CST
10283 && 0 == compare_tree_int (tree110
,
10285 (TREE_TYPE (TREE_OPERAND
10287 && operand_equal_p (tree01
, tree111
, 0))
10289 fold_convert_loc (loc
, type
,
10290 build2 ((code0
== LSHIFT_EXPR
10293 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10294 TREE_OPERAND (arg0
, 0),
10295 TREE_OPERAND (arg0
, 1)));
10297 else if (code01
== MINUS_EXPR
)
10299 tree tree010
, tree011
;
10300 tree010
= TREE_OPERAND (tree01
, 0);
10301 tree011
= TREE_OPERAND (tree01
, 1);
10302 STRIP_NOPS (tree010
);
10303 STRIP_NOPS (tree011
);
10304 if (TREE_CODE (tree010
) == INTEGER_CST
10305 && 0 == compare_tree_int (tree010
,
10307 (TREE_TYPE (TREE_OPERAND
10309 && operand_equal_p (tree11
, tree011
, 0))
10310 return fold_convert_loc
10312 build2 ((code0
!= LSHIFT_EXPR
10315 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10316 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
10322 /* In most languages, can't associate operations on floats through
10323 parentheses. Rather than remember where the parentheses were, we
10324 don't associate floats at all, unless the user has specified
10325 -fassociative-math.
10326 And, we need to make sure type is not saturating. */
10328 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10329 && !TYPE_SATURATING (type
))
10331 tree var0
, con0
, lit0
, minus_lit0
;
10332 tree var1
, con1
, lit1
, minus_lit1
;
10336 /* Split both trees into variables, constants, and literals. Then
10337 associate each group together, the constants with literals,
10338 then the result with variables. This increases the chances of
10339 literals being recombined later and of generating relocatable
10340 expressions for the sum of a constant and literal. */
10341 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10342 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10343 code
== MINUS_EXPR
);
10345 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10346 if (code
== MINUS_EXPR
)
10349 /* With undefined overflow prefer doing association in a type
10350 which wraps on overflow, if that is one of the operand types. */
10351 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10352 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10354 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10355 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10356 atype
= TREE_TYPE (arg0
);
10357 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10358 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10359 atype
= TREE_TYPE (arg1
);
10360 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10363 /* With undefined overflow we can only associate constants with one
10364 variable, and constants whose association doesn't overflow. */
10365 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10366 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10373 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10374 tmp0
= TREE_OPERAND (tmp0
, 0);
10375 if (CONVERT_EXPR_P (tmp0
)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10378 <= TYPE_PRECISION (atype
)))
10379 tmp0
= TREE_OPERAND (tmp0
, 0);
10380 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10381 tmp1
= TREE_OPERAND (tmp1
, 0);
10382 if (CONVERT_EXPR_P (tmp1
)
10383 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10384 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10385 <= TYPE_PRECISION (atype
)))
10386 tmp1
= TREE_OPERAND (tmp1
, 0);
10387 /* The only case we can still associate with two variables
10388 is if they are the same, modulo negation and bit-pattern
10389 preserving conversions. */
10390 if (!operand_equal_p (tmp0
, tmp1
, 0))
10395 /* Only do something if we found more than two objects. Otherwise,
10396 nothing has changed and we risk infinite recursion. */
10398 && (2 < ((var0
!= 0) + (var1
!= 0)
10399 + (con0
!= 0) + (con1
!= 0)
10400 + (lit0
!= 0) + (lit1
!= 0)
10401 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10403 bool any_overflows
= false;
10404 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10405 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10406 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10407 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10408 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10409 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10410 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10411 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10414 /* Preserve the MINUS_EXPR if the negative part of the literal is
10415 greater than the positive part. Otherwise, the multiplicative
10416 folding code (i.e extract_muldiv) may be fooled in case
10417 unsigned constants are subtracted, like in the following
10418 example: ((X*2 + 4) - 8U)/2. */
10419 if (minus_lit0
&& lit0
)
10421 if (TREE_CODE (lit0
) == INTEGER_CST
10422 && TREE_CODE (minus_lit0
) == INTEGER_CST
10423 && tree_int_cst_lt (lit0
, minus_lit0
))
10425 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10426 MINUS_EXPR
, atype
);
10431 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10432 MINUS_EXPR
, atype
);
10437 /* Don't introduce overflows through reassociation. */
10439 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
10440 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
10447 fold_convert_loc (loc
, type
,
10448 associate_trees (loc
, var0
, minus_lit0
,
10449 MINUS_EXPR
, atype
));
10452 con0
= associate_trees (loc
, con0
, minus_lit0
,
10453 MINUS_EXPR
, atype
);
10455 fold_convert_loc (loc
, type
,
10456 associate_trees (loc
, var0
, con0
,
10457 PLUS_EXPR
, atype
));
10461 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10463 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10471 /* Pointer simplifications for subtraction, simple reassociations. */
10472 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10474 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10475 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10476 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10478 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10479 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10480 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10481 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10482 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10483 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10485 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10488 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10489 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10491 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10492 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10493 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10494 fold_convert_loc (loc
, type
, arg1
));
10496 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10498 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10500 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10502 tree arg10
= fold_convert_loc (loc
, type
,
10503 TREE_OPERAND (arg1
, 0));
10504 tree arg11
= fold_convert_loc (loc
, type
,
10505 TREE_OPERAND (arg1
, 1));
10506 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10507 fold_convert_loc (loc
, type
, arg0
),
10510 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10513 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10514 if (TREE_CODE (arg0
) == NEGATE_EXPR
10515 && negate_expr_p (arg1
)
10516 && reorder_operands_p (arg0
, arg1
))
10517 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10518 fold_convert_loc (loc
, type
,
10519 negate_expr (arg1
)),
10520 fold_convert_loc (loc
, type
,
10521 TREE_OPERAND (arg0
, 0)));
10523 /* X - (X / Y) * Y is X % Y. */
10524 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10525 && TREE_CODE (arg1
) == MULT_EXPR
10526 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10527 && operand_equal_p (arg0
,
10528 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10529 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10530 TREE_OPERAND (arg1
, 1), 0))
10532 fold_convert_loc (loc
, type
,
10533 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10534 arg0
, TREE_OPERAND (arg1
, 1)));
10536 if (! FLOAT_TYPE_P (type
))
10538 /* Fold A - (A & B) into ~B & A. */
10539 if (!TREE_SIDE_EFFECTS (arg0
)
10540 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10542 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10544 tree arg10
= fold_convert_loc (loc
, type
,
10545 TREE_OPERAND (arg1
, 0));
10546 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10547 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10549 fold_convert_loc (loc
, type
, arg0
));
10551 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10553 tree arg11
= fold_convert_loc (loc
,
10554 type
, TREE_OPERAND (arg1
, 1));
10555 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10556 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10558 fold_convert_loc (loc
, type
, arg0
));
10562 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10563 any power of 2 minus 1. */
10564 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10565 && TREE_CODE (arg1
) == BIT_AND_EXPR
10566 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10567 TREE_OPERAND (arg1
, 0), 0))
10569 tree mask0
= TREE_OPERAND (arg0
, 1);
10570 tree mask1
= TREE_OPERAND (arg1
, 1);
10571 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10573 if (operand_equal_p (tem
, mask1
, 0))
10575 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10576 TREE_OPERAND (arg0
, 0), mask1
);
10577 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10582 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10583 __complex__ ( x, -y ). This is not the same for SNaNs or if
10584 signed zeros are involved. */
10585 if (!HONOR_SNANS (element_mode (arg0
))
10586 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10587 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10589 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10590 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10591 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10592 bool arg0rz
= false, arg0iz
= false;
10593 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10594 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10596 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10597 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10598 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10600 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10602 : build1 (REALPART_EXPR
, rtype
, arg1
));
10603 tree ip
= arg0i
? arg0i
10604 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10605 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10607 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10609 tree rp
= arg0r
? arg0r
10610 : build1 (REALPART_EXPR
, rtype
, arg0
);
10611 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10613 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10614 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10619 /* A - B -> A + (-B) if B is easily negatable. */
10620 if (negate_expr_p (arg1
)
10621 && !TYPE_OVERFLOW_SANITIZED (type
)
10622 && ((FLOAT_TYPE_P (type
)
10623 /* Avoid this transformation if B is a positive REAL_CST. */
10624 && (TREE_CODE (arg1
) != REAL_CST
10625 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10626 || INTEGRAL_TYPE_P (type
)))
10627 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10628 fold_convert_loc (loc
, type
, arg0
),
10629 fold_convert_loc (loc
, type
,
10630 negate_expr (arg1
)));
10632 /* Try folding difference of addresses. */
10634 HOST_WIDE_INT diff
;
10636 if ((TREE_CODE (arg0
) == ADDR_EXPR
10637 || TREE_CODE (arg1
) == ADDR_EXPR
)
10638 && ptr_difference_const (arg0
, arg1
, &diff
))
10639 return build_int_cst_type (type
, diff
);
10642 /* Fold &a[i] - &a[j] to i-j. */
10643 if (TREE_CODE (arg0
) == ADDR_EXPR
10644 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10645 && TREE_CODE (arg1
) == ADDR_EXPR
10646 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10648 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10649 TREE_OPERAND (arg0
, 0),
10650 TREE_OPERAND (arg1
, 0));
10655 if (FLOAT_TYPE_P (type
)
10656 && flag_unsafe_math_optimizations
10657 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10658 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10659 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10662 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10663 one. Make sure the type is not saturating and has the signedness of
10664 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10665 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10666 if ((TREE_CODE (arg0
) == MULT_EXPR
10667 || TREE_CODE (arg1
) == MULT_EXPR
)
10668 && !TYPE_SATURATING (type
)
10669 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10670 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10671 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10673 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10681 /* (-A) * (-B) -> A * B */
10682 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10683 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10684 fold_convert_loc (loc
, type
,
10685 TREE_OPERAND (arg0
, 0)),
10686 fold_convert_loc (loc
, type
,
10687 negate_expr (arg1
)));
10688 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10689 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10690 fold_convert_loc (loc
, type
,
10691 negate_expr (arg0
)),
10692 fold_convert_loc (loc
, type
,
10693 TREE_OPERAND (arg1
, 0)));
10695 if (! FLOAT_TYPE_P (type
))
10697 /* Transform x * -C into -x * C if x is easily negatable. */
10698 if (TREE_CODE (arg1
) == INTEGER_CST
10699 && tree_int_cst_sgn (arg1
) == -1
10700 && negate_expr_p (arg0
)
10701 && (tem
= negate_expr (arg1
)) != arg1
10702 && !TREE_OVERFLOW (tem
))
10703 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10704 fold_convert_loc (loc
, type
,
10705 negate_expr (arg0
)),
10708 /* (a * (1 << b)) is (a << b) */
10709 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10710 && integer_onep (TREE_OPERAND (arg1
, 0)))
10711 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10712 TREE_OPERAND (arg1
, 1));
10713 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10714 && integer_onep (TREE_OPERAND (arg0
, 0)))
10715 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10716 TREE_OPERAND (arg0
, 1));
10718 /* (A + A) * C -> A * 2 * C */
10719 if (TREE_CODE (arg0
) == PLUS_EXPR
10720 && TREE_CODE (arg1
) == INTEGER_CST
10721 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10722 TREE_OPERAND (arg0
, 1), 0))
10723 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10724 omit_one_operand_loc (loc
, type
,
10725 TREE_OPERAND (arg0
, 0),
10726 TREE_OPERAND (arg0
, 1)),
10727 fold_build2_loc (loc
, MULT_EXPR
, type
,
10728 build_int_cst (type
, 2) , arg1
));
10730 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10731 sign-changing only. */
10732 if (TREE_CODE (arg1
) == INTEGER_CST
10733 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10734 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10735 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10737 strict_overflow_p
= false;
10738 if (TREE_CODE (arg1
) == INTEGER_CST
10739 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10740 &strict_overflow_p
)))
10742 if (strict_overflow_p
)
10743 fold_overflow_warning (("assuming signed overflow does not "
10744 "occur when simplifying "
10746 WARN_STRICT_OVERFLOW_MISC
);
10747 return fold_convert_loc (loc
, type
, tem
);
10750 /* Optimize z * conj(z) for integer complex numbers. */
10751 if (TREE_CODE (arg0
) == CONJ_EXPR
10752 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10753 return fold_mult_zconjz (loc
, type
, arg1
);
10754 if (TREE_CODE (arg1
) == CONJ_EXPR
10755 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10756 return fold_mult_zconjz (loc
, type
, arg0
);
10760 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10761 the result for floating point types due to rounding so it is applied
10762 only if -fassociative-math was specify. */
10763 if (flag_associative_math
10764 && TREE_CODE (arg0
) == RDIV_EXPR
10765 && TREE_CODE (arg1
) == REAL_CST
10766 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10768 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10771 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10772 TREE_OPERAND (arg0
, 1));
10775 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10776 if (operand_equal_p (arg0
, arg1
, 0))
10778 tree tem
= fold_strip_sign_ops (arg0
);
10779 if (tem
!= NULL_TREE
)
10781 tem
= fold_convert_loc (loc
, type
, tem
);
10782 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10786 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10787 This is not the same for NaNs or if signed zeros are
10789 if (!HONOR_NANS (arg0
)
10790 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10791 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10792 && TREE_CODE (arg1
) == COMPLEX_CST
10793 && real_zerop (TREE_REALPART (arg1
)))
10795 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10796 if (real_onep (TREE_IMAGPART (arg1
)))
10798 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10799 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10801 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10802 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10804 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10805 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10806 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10810 /* Optimize z * conj(z) for floating point complex numbers.
10811 Guarded by flag_unsafe_math_optimizations as non-finite
10812 imaginary components don't produce scalar results. */
10813 if (flag_unsafe_math_optimizations
10814 && TREE_CODE (arg0
) == CONJ_EXPR
10815 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10816 return fold_mult_zconjz (loc
, type
, arg1
);
10817 if (flag_unsafe_math_optimizations
10818 && TREE_CODE (arg1
) == CONJ_EXPR
10819 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10820 return fold_mult_zconjz (loc
, type
, arg0
);
10822 if (flag_unsafe_math_optimizations
)
10824 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10825 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10827 /* Optimizations of root(...)*root(...). */
10828 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10831 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10832 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10834 /* Optimize sqrt(x)*sqrt(x) as x. */
10835 if (BUILTIN_SQRT_P (fcode0
)
10836 && operand_equal_p (arg00
, arg10
, 0)
10837 && ! HONOR_SNANS (element_mode (type
)))
10840 /* Optimize root(x)*root(y) as root(x*y). */
10841 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10842 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10843 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10846 /* Optimize expN(x)*expN(y) as expN(x+y). */
10847 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10849 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10850 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10851 CALL_EXPR_ARG (arg0
, 0),
10852 CALL_EXPR_ARG (arg1
, 0));
10853 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10856 /* Optimizations of pow(...)*pow(...). */
10857 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10858 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10859 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10861 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10862 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10863 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10864 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10866 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10867 if (operand_equal_p (arg01
, arg11
, 0))
10869 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10870 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10872 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10875 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10876 if (operand_equal_p (arg00
, arg10
, 0))
10878 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10879 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10881 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10885 /* Optimize tan(x)*cos(x) as sin(x). */
10886 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10887 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10888 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10889 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10890 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10891 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10892 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10893 CALL_EXPR_ARG (arg1
, 0), 0))
10895 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10897 if (sinfn
!= NULL_TREE
)
10898 return build_call_expr_loc (loc
, sinfn
, 1,
10899 CALL_EXPR_ARG (arg0
, 0));
10902 /* Optimize x*pow(x,c) as pow(x,c+1). */
10903 if (fcode1
== BUILT_IN_POW
10904 || fcode1
== BUILT_IN_POWF
10905 || fcode1
== BUILT_IN_POWL
)
10907 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10908 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10909 if (TREE_CODE (arg11
) == REAL_CST
10910 && !TREE_OVERFLOW (arg11
)
10911 && operand_equal_p (arg0
, arg10
, 0))
10913 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10917 c
= TREE_REAL_CST (arg11
);
10918 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10919 arg
= build_real (type
, c
);
10920 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10924 /* Optimize pow(x,c)*x as pow(x,c+1). */
10925 if (fcode0
== BUILT_IN_POW
10926 || fcode0
== BUILT_IN_POWF
10927 || fcode0
== BUILT_IN_POWL
)
10929 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10930 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10931 if (TREE_CODE (arg01
) == REAL_CST
10932 && !TREE_OVERFLOW (arg01
)
10933 && operand_equal_p (arg1
, arg00
, 0))
10935 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10939 c
= TREE_REAL_CST (arg01
);
10940 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10941 arg
= build_real (type
, c
);
10942 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10946 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10947 if (!in_gimple_form
10949 && operand_equal_p (arg0
, arg1
, 0))
10951 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10955 tree arg
= build_real (type
, dconst2
);
10956 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10965 /* ~X | X is -1. */
10966 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10969 t1
= build_zero_cst (type
);
10970 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10971 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10974 /* X | ~X is -1. */
10975 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10976 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10978 t1
= build_zero_cst (type
);
10979 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10980 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10983 /* Canonicalize (X & C1) | C2. */
10984 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10985 && TREE_CODE (arg1
) == INTEGER_CST
10986 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10988 int width
= TYPE_PRECISION (type
), w
;
10989 wide_int c1
= TREE_OPERAND (arg0
, 1);
10990 wide_int c2
= arg1
;
10992 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10993 if ((c1
& c2
) == c1
)
10994 return omit_one_operand_loc (loc
, type
, arg1
,
10995 TREE_OPERAND (arg0
, 0));
10997 wide_int msk
= wi::mask (width
, false,
10998 TYPE_PRECISION (TREE_TYPE (arg1
)));
11000 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11001 if (msk
.and_not (c1
| c2
) == 0)
11002 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11003 TREE_OPERAND (arg0
, 0), arg1
);
11005 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11006 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11007 mode which allows further optimizations. */
11010 wide_int c3
= c1
.and_not (c2
);
11011 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11013 wide_int mask
= wi::mask (w
, false,
11014 TYPE_PRECISION (type
));
11015 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11023 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11024 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11025 TREE_OPERAND (arg0
, 0),
11026 wide_int_to_tree (type
,
11031 /* (X & ~Y) | (~X & Y) is X ^ Y */
11032 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11033 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11035 tree a0
, a1
, l0
, l1
, n0
, n1
;
11037 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11038 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11040 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11041 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11043 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11044 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11046 if ((operand_equal_p (n0
, a0
, 0)
11047 && operand_equal_p (n1
, a1
, 0))
11048 || (operand_equal_p (n0
, a1
, 0)
11049 && operand_equal_p (n1
, a0
, 0)))
11050 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11053 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11054 if (t1
!= NULL_TREE
)
11057 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11059 This results in more efficient code for machines without a NAND
11060 instruction. Combine will canonicalize to the first form
11061 which will allow use of NAND instructions provided by the
11062 backend if they exist. */
11063 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11064 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11067 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11068 build2 (BIT_AND_EXPR
, type
,
11069 fold_convert_loc (loc
, type
,
11070 TREE_OPERAND (arg0
, 0)),
11071 fold_convert_loc (loc
, type
,
11072 TREE_OPERAND (arg1
, 0))));
11075 /* See if this can be simplified into a rotate first. If that
11076 is unsuccessful continue in the association code. */
11080 /* ~X ^ X is -1. */
11081 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11084 t1
= build_zero_cst (type
);
11085 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11086 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11089 /* X ^ ~X is -1. */
11090 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11091 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11093 t1
= build_zero_cst (type
);
11094 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11095 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11098 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11099 with a constant, and the two constants have no bits in common,
11100 we should treat this as a BIT_IOR_EXPR since this may produce more
11101 simplifications. */
11102 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11103 && TREE_CODE (arg1
) == BIT_AND_EXPR
11104 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11105 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11106 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11107 TREE_OPERAND (arg1
, 1)) == 0)
11109 code
= BIT_IOR_EXPR
;
11113 /* (X | Y) ^ X -> Y & ~ X*/
11114 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11117 tree t2
= TREE_OPERAND (arg0
, 1);
11118 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11120 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11121 fold_convert_loc (loc
, type
, t2
),
11122 fold_convert_loc (loc
, type
, t1
));
11126 /* (Y | X) ^ X -> Y & ~ X*/
11127 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11128 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11130 tree t2
= TREE_OPERAND (arg0
, 0);
11131 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11133 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11134 fold_convert_loc (loc
, type
, t2
),
11135 fold_convert_loc (loc
, type
, t1
));
11139 /* X ^ (X | Y) -> Y & ~ X*/
11140 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11141 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11143 tree t2
= TREE_OPERAND (arg1
, 1);
11144 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11146 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11147 fold_convert_loc (loc
, type
, t2
),
11148 fold_convert_loc (loc
, type
, t1
));
11152 /* X ^ (Y | X) -> Y & ~ X*/
11153 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11156 tree t2
= TREE_OPERAND (arg1
, 0);
11157 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11159 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11160 fold_convert_loc (loc
, type
, t2
),
11161 fold_convert_loc (loc
, type
, t1
));
11165 /* Convert ~X ^ ~Y to X ^ Y. */
11166 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11167 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11168 return fold_build2_loc (loc
, code
, type
,
11169 fold_convert_loc (loc
, type
,
11170 TREE_OPERAND (arg0
, 0)),
11171 fold_convert_loc (loc
, type
,
11172 TREE_OPERAND (arg1
, 0)));
11174 /* Convert ~X ^ C to X ^ ~C. */
11175 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11176 && TREE_CODE (arg1
) == INTEGER_CST
)
11177 return fold_build2_loc (loc
, code
, type
,
11178 fold_convert_loc (loc
, type
,
11179 TREE_OPERAND (arg0
, 0)),
11180 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11182 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11183 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11184 && INTEGRAL_TYPE_P (type
)
11185 && integer_onep (TREE_OPERAND (arg0
, 1))
11186 && integer_onep (arg1
))
11187 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11188 build_zero_cst (TREE_TYPE (arg0
)));
11190 /* Fold (X & Y) ^ Y as ~X & Y. */
11191 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11192 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11194 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11195 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11196 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11197 fold_convert_loc (loc
, type
, arg1
));
11199 /* Fold (X & Y) ^ X as ~Y & X. */
11200 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11201 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11202 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11204 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11205 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11206 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11207 fold_convert_loc (loc
, type
, arg1
));
11209 /* Fold X ^ (X & Y) as X & ~Y. */
11210 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11211 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11213 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11214 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11215 fold_convert_loc (loc
, type
, arg0
),
11216 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11218 /* Fold X ^ (Y & X) as ~Y & X. */
11219 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11220 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11221 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11223 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11224 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11225 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11226 fold_convert_loc (loc
, type
, arg0
));
11229 /* See if this can be simplified into a rotate first. If that
11230 is unsuccessful continue in the association code. */
11234 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11235 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11236 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11237 || (TREE_CODE (arg0
) == EQ_EXPR
11238 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11239 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11240 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11242 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11243 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11244 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11245 || (TREE_CODE (arg1
) == EQ_EXPR
11246 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11247 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11248 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11250 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11251 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11252 && INTEGRAL_TYPE_P (type
)
11253 && integer_onep (TREE_OPERAND (arg0
, 1))
11254 && integer_onep (arg1
))
11257 tem
= TREE_OPERAND (arg0
, 0);
11258 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11259 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11261 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11262 build_zero_cst (TREE_TYPE (tem
)));
11264 /* Fold ~X & 1 as (X & 1) == 0. */
11265 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11266 && INTEGRAL_TYPE_P (type
)
11267 && integer_onep (arg1
))
11270 tem
= TREE_OPERAND (arg0
, 0);
11271 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11272 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11274 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11275 build_zero_cst (TREE_TYPE (tem
)));
11277 /* Fold !X & 1 as X == 0. */
11278 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11279 && integer_onep (arg1
))
11281 tem
= TREE_OPERAND (arg0
, 0);
11282 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11283 build_zero_cst (TREE_TYPE (tem
)));
11286 /* Fold (X ^ Y) & Y as ~X & Y. */
11287 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11288 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11290 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11291 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11292 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11293 fold_convert_loc (loc
, type
, arg1
));
11295 /* Fold (X ^ Y) & X as ~Y & X. */
11296 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11297 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11298 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11300 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11301 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11302 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11303 fold_convert_loc (loc
, type
, arg1
));
11305 /* Fold X & (X ^ Y) as X & ~Y. */
11306 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11307 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11309 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11310 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11311 fold_convert_loc (loc
, type
, arg0
),
11312 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11314 /* Fold X & (Y ^ X) as ~Y & X. */
11315 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11316 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11317 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11319 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11320 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11321 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11322 fold_convert_loc (loc
, type
, arg0
));
11325 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11326 multiple of 1 << CST. */
11327 if (TREE_CODE (arg1
) == INTEGER_CST
)
11329 wide_int cst1
= arg1
;
11330 wide_int ncst1
= -cst1
;
11331 if ((cst1
& ncst1
) == ncst1
11332 && multiple_of_p (type
, arg0
,
11333 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11334 return fold_convert_loc (loc
, type
, arg0
);
11337 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11339 if (TREE_CODE (arg1
) == INTEGER_CST
11340 && TREE_CODE (arg0
) == MULT_EXPR
11341 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11343 wide_int warg1
= arg1
;
11344 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11347 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11349 else if (masked
!= warg1
)
11351 /* Avoid the transform if arg1 is a mask of some
11352 mode which allows further optimizations. */
11353 int pop
= wi::popcount (warg1
);
11354 if (!(pop
>= BITS_PER_UNIT
11355 && exact_log2 (pop
) != -1
11356 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11357 return fold_build2_loc (loc
, code
, type
, op0
,
11358 wide_int_to_tree (type
, masked
));
11362 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11363 ((A & N) + B) & M -> (A + B) & M
11364 Similarly if (N & M) == 0,
11365 ((A | N) + B) & M -> (A + B) & M
11366 and for - instead of + (or unary - instead of +)
11367 and/or ^ instead of |.
11368 If B is constant and (B & M) == 0, fold into A & M. */
11369 if (TREE_CODE (arg1
) == INTEGER_CST
)
11371 wide_int cst1
= arg1
;
11372 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11373 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11374 && (TREE_CODE (arg0
) == PLUS_EXPR
11375 || TREE_CODE (arg0
) == MINUS_EXPR
11376 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11377 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11378 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11384 /* Now we know that arg0 is (C + D) or (C - D) or
11385 -C and arg1 (M) is == (1LL << cst) - 1.
11386 Store C into PMOP[0] and D into PMOP[1]. */
11387 pmop
[0] = TREE_OPERAND (arg0
, 0);
11389 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11391 pmop
[1] = TREE_OPERAND (arg0
, 1);
11395 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11398 for (; which
>= 0; which
--)
11399 switch (TREE_CODE (pmop
[which
]))
11404 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11407 cst0
= TREE_OPERAND (pmop
[which
], 1);
11409 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11414 else if (cst0
!= 0)
11416 /* If C or D is of the form (A & N) where
11417 (N & M) == M, or of the form (A | N) or
11418 (A ^ N) where (N & M) == 0, replace it with A. */
11419 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11422 /* If C or D is a N where (N & M) == 0, it can be
11423 omitted (assumed 0). */
11424 if ((TREE_CODE (arg0
) == PLUS_EXPR
11425 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11426 && (cst1
& pmop
[which
]) == 0)
11427 pmop
[which
] = NULL
;
11433 /* Only build anything new if we optimized one or both arguments
11435 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11436 || (TREE_CODE (arg0
) != NEGATE_EXPR
11437 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11439 tree utype
= TREE_TYPE (arg0
);
11440 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11442 /* Perform the operations in a type that has defined
11443 overflow behavior. */
11444 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11445 if (pmop
[0] != NULL
)
11446 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11447 if (pmop
[1] != NULL
)
11448 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11451 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11452 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11453 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11455 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11456 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11458 else if (pmop
[0] != NULL
)
11460 else if (pmop
[1] != NULL
)
11463 return build_int_cst (type
, 0);
11465 else if (pmop
[0] == NULL
)
11466 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11468 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11470 /* TEM is now the new binary +, - or unary - replacement. */
11471 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11472 fold_convert_loc (loc
, utype
, arg1
));
11473 return fold_convert_loc (loc
, type
, tem
);
11478 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11479 if (t1
!= NULL_TREE
)
11481 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11482 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11483 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11485 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11487 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11490 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11493 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11495 This results in more efficient code for machines without a NOR
11496 instruction. Combine will canonicalize to the first form
11497 which will allow use of NOR instructions provided by the
11498 backend if they exist. */
11499 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11500 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11502 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11503 build2 (BIT_IOR_EXPR
, type
,
11504 fold_convert_loc (loc
, type
,
11505 TREE_OPERAND (arg0
, 0)),
11506 fold_convert_loc (loc
, type
,
11507 TREE_OPERAND (arg1
, 0))));
11510 /* If arg0 is derived from the address of an object or function, we may
11511 be able to fold this expression using the object or function's
11513 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11515 unsigned HOST_WIDE_INT modulus
, residue
;
11516 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11518 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11519 integer_onep (arg1
));
11521 /* This works because modulus is a power of 2. If this weren't the
11522 case, we'd have to replace it by its greatest power-of-2
11523 divisor: modulus & -modulus. */
11525 return build_int_cst (type
, residue
& low
);
11528 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11529 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11530 if the new mask might be further optimized. */
11531 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11532 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11533 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11534 && TREE_CODE (arg1
) == INTEGER_CST
11535 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11536 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11537 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11538 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11540 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11541 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11542 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11543 tree shift_type
= TREE_TYPE (arg0
);
11545 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11546 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11547 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11548 && TYPE_PRECISION (TREE_TYPE (arg0
))
11549 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11551 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11552 tree arg00
= TREE_OPERAND (arg0
, 0);
11553 /* See if more bits can be proven as zero because of
11555 if (TREE_CODE (arg00
) == NOP_EXPR
11556 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11558 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11559 if (TYPE_PRECISION (inner_type
)
11560 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11561 && TYPE_PRECISION (inner_type
) < prec
)
11563 prec
= TYPE_PRECISION (inner_type
);
11564 /* See if we can shorten the right shift. */
11566 shift_type
= inner_type
;
11567 /* Otherwise X >> C1 is all zeros, so we'll optimize
11568 it into (X, 0) later on by making sure zerobits
11572 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11575 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11576 zerobits
<<= prec
- shiftc
;
11578 /* For arithmetic shift if sign bit could be set, zerobits
11579 can contain actually sign bits, so no transformation is
11580 possible, unless MASK masks them all away. In that
11581 case the shift needs to be converted into logical shift. */
11582 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11583 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11585 if ((mask
& zerobits
) == 0)
11586 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11592 /* ((X << 16) & 0xff00) is (X, 0). */
11593 if ((mask
& zerobits
) == mask
)
11594 return omit_one_operand_loc (loc
, type
,
11595 build_int_cst (type
, 0), arg0
);
11597 newmask
= mask
| zerobits
;
11598 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11600 /* Only do the transformation if NEWMASK is some integer
11602 for (prec
= BITS_PER_UNIT
;
11603 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11604 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11606 if (prec
< HOST_BITS_PER_WIDE_INT
11607 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11611 if (shift_type
!= TREE_TYPE (arg0
))
11613 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11614 fold_convert_loc (loc
, shift_type
,
11615 TREE_OPERAND (arg0
, 0)),
11616 TREE_OPERAND (arg0
, 1));
11617 tem
= fold_convert_loc (loc
, type
, tem
);
11621 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11622 if (!tree_int_cst_equal (newmaskt
, arg1
))
11623 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11631 /* Don't touch a floating-point divide by zero unless the mode
11632 of the constant can represent infinity. */
11633 if (TREE_CODE (arg1
) == REAL_CST
11634 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11635 && real_zerop (arg1
))
11638 /* (-A) / (-B) -> A / B */
11639 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11640 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11641 TREE_OPERAND (arg0
, 0),
11642 negate_expr (arg1
));
11643 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11644 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11645 negate_expr (arg0
),
11646 TREE_OPERAND (arg1
, 0));
11648 /* Convert A/B/C to A/(B*C). */
11649 if (flag_reciprocal_math
11650 && TREE_CODE (arg0
) == RDIV_EXPR
)
11651 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11652 fold_build2_loc (loc
, MULT_EXPR
, type
,
11653 TREE_OPERAND (arg0
, 1), arg1
));
11655 /* Convert A/(B/C) to (A/B)*C. */
11656 if (flag_reciprocal_math
11657 && TREE_CODE (arg1
) == RDIV_EXPR
)
11658 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11659 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11660 TREE_OPERAND (arg1
, 0)),
11661 TREE_OPERAND (arg1
, 1));
11663 /* Convert C1/(X*C2) into (C1/C2)/X. */
11664 if (flag_reciprocal_math
11665 && TREE_CODE (arg1
) == MULT_EXPR
11666 && TREE_CODE (arg0
) == REAL_CST
11667 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11669 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11670 TREE_OPERAND (arg1
, 1));
11672 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11673 TREE_OPERAND (arg1
, 0));
11676 if (flag_unsafe_math_optimizations
)
11678 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11679 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11681 /* Optimize sin(x)/cos(x) as tan(x). */
11682 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11683 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11684 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11685 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11686 CALL_EXPR_ARG (arg1
, 0), 0))
11688 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11690 if (tanfn
!= NULL_TREE
)
11691 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11694 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11695 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11696 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11697 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11698 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11699 CALL_EXPR_ARG (arg1
, 0), 0))
11701 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11703 if (tanfn
!= NULL_TREE
)
11705 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11706 CALL_EXPR_ARG (arg0
, 0));
11707 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11708 build_real (type
, dconst1
), tmp
);
11712 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11713 NaNs or Infinities. */
11714 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11715 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11716 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11718 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11719 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11721 if (! HONOR_NANS (arg00
)
11722 && ! HONOR_INFINITIES (element_mode (arg00
))
11723 && operand_equal_p (arg00
, arg01
, 0))
11725 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11727 if (cosfn
!= NULL_TREE
)
11728 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11732 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11733 NaNs or Infinities. */
11734 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11735 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11736 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11738 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11739 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11741 if (! HONOR_NANS (arg00
)
11742 && ! HONOR_INFINITIES (element_mode (arg00
))
11743 && operand_equal_p (arg00
, arg01
, 0))
11745 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11747 if (cosfn
!= NULL_TREE
)
11749 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11750 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11751 build_real (type
, dconst1
),
11757 /* Optimize pow(x,c)/x as pow(x,c-1). */
11758 if (fcode0
== BUILT_IN_POW
11759 || fcode0
== BUILT_IN_POWF
11760 || fcode0
== BUILT_IN_POWL
)
11762 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11763 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11764 if (TREE_CODE (arg01
) == REAL_CST
11765 && !TREE_OVERFLOW (arg01
)
11766 && operand_equal_p (arg1
, arg00
, 0))
11768 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11772 c
= TREE_REAL_CST (arg01
);
11773 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11774 arg
= build_real (type
, c
);
11775 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11779 /* Optimize a/root(b/c) into a*root(c/b). */
11780 if (BUILTIN_ROOT_P (fcode1
))
11782 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11784 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11786 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11787 tree b
= TREE_OPERAND (rootarg
, 0);
11788 tree c
= TREE_OPERAND (rootarg
, 1);
11790 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11792 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11793 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11797 /* Optimize x/expN(y) into x*expN(-y). */
11798 if (BUILTIN_EXPONENT_P (fcode1
))
11800 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11801 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11802 arg1
= build_call_expr_loc (loc
,
11804 fold_convert_loc (loc
, type
, arg
));
11805 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11808 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11809 if (fcode1
== BUILT_IN_POW
11810 || fcode1
== BUILT_IN_POWF
11811 || fcode1
== BUILT_IN_POWL
)
11813 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11814 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11815 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11816 tree neg11
= fold_convert_loc (loc
, type
,
11817 negate_expr (arg11
));
11818 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11819 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11824 case TRUNC_DIV_EXPR
:
11825 /* Optimize (X & (-A)) / A where A is a power of 2,
11827 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11828 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11829 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11831 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11832 arg1
, TREE_OPERAND (arg0
, 1));
11833 if (sum
&& integer_zerop (sum
)) {
11834 tree pow2
= build_int_cst (integer_type_node
,
11835 wi::exact_log2 (arg1
));
11836 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11837 TREE_OPERAND (arg0
, 0), pow2
);
11843 case FLOOR_DIV_EXPR
:
11844 /* Simplify A / (B << N) where A and B are positive and B is
11845 a power of 2, to A >> (N + log2(B)). */
11846 strict_overflow_p
= false;
11847 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11848 && (TYPE_UNSIGNED (type
)
11849 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11851 tree sval
= TREE_OPERAND (arg1
, 0);
11852 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11854 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11855 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11856 wi::exact_log2 (sval
));
11858 if (strict_overflow_p
)
11859 fold_overflow_warning (("assuming signed overflow does not "
11860 "occur when simplifying A / (B << N)"),
11861 WARN_STRICT_OVERFLOW_MISC
);
11863 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11865 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11866 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11872 case ROUND_DIV_EXPR
:
11873 case CEIL_DIV_EXPR
:
11874 case EXACT_DIV_EXPR
:
11875 if (integer_zerop (arg1
))
11878 /* Convert -A / -B to A / B when the type is signed and overflow is
11880 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11881 && TREE_CODE (arg0
) == NEGATE_EXPR
11882 && negate_expr_p (arg1
))
11884 if (INTEGRAL_TYPE_P (type
))
11885 fold_overflow_warning (("assuming signed overflow does not occur "
11886 "when distributing negation across "
11888 WARN_STRICT_OVERFLOW_MISC
);
11889 return fold_build2_loc (loc
, code
, type
,
11890 fold_convert_loc (loc
, type
,
11891 TREE_OPERAND (arg0
, 0)),
11892 fold_convert_loc (loc
, type
,
11893 negate_expr (arg1
)));
11895 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11896 && TREE_CODE (arg1
) == NEGATE_EXPR
11897 && negate_expr_p (arg0
))
11899 if (INTEGRAL_TYPE_P (type
))
11900 fold_overflow_warning (("assuming signed overflow does not occur "
11901 "when distributing negation across "
11903 WARN_STRICT_OVERFLOW_MISC
);
11904 return fold_build2_loc (loc
, code
, type
,
11905 fold_convert_loc (loc
, type
,
11906 negate_expr (arg0
)),
11907 fold_convert_loc (loc
, type
,
11908 TREE_OPERAND (arg1
, 0)));
11911 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11912 operation, EXACT_DIV_EXPR.
11914 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11915 At one time others generated faster code, it's not clear if they do
11916 after the last round to changes to the DIV code in expmed.c. */
11917 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11918 && multiple_of_p (type
, arg0
, arg1
))
11919 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11921 strict_overflow_p
= false;
11922 if (TREE_CODE (arg1
) == INTEGER_CST
11923 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11924 &strict_overflow_p
)))
11926 if (strict_overflow_p
)
11927 fold_overflow_warning (("assuming signed overflow does not occur "
11928 "when simplifying division"),
11929 WARN_STRICT_OVERFLOW_MISC
);
11930 return fold_convert_loc (loc
, type
, tem
);
11935 case CEIL_MOD_EXPR
:
11936 case FLOOR_MOD_EXPR
:
11937 case ROUND_MOD_EXPR
:
11938 case TRUNC_MOD_EXPR
:
11939 strict_overflow_p
= false;
11940 if (TREE_CODE (arg1
) == INTEGER_CST
11941 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11942 &strict_overflow_p
)))
11944 if (strict_overflow_p
)
11945 fold_overflow_warning (("assuming signed overflow does not occur "
11946 "when simplifying modulus"),
11947 WARN_STRICT_OVERFLOW_MISC
);
11948 return fold_convert_loc (loc
, type
, tem
);
11957 /* Since negative shift count is not well-defined,
11958 don't try to compute it in the compiler. */
11959 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11962 prec
= element_precision (type
);
11964 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11965 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
11966 && tree_to_uhwi (arg1
) < prec
11967 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11968 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
11970 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11971 + tree_to_uhwi (arg1
));
11973 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11974 being well defined. */
11977 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11979 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11980 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
11981 TREE_OPERAND (arg0
, 0));
11986 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11987 build_int_cst (TREE_TYPE (arg1
), low
));
11990 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11991 into x & ((unsigned)-1 >> c) for unsigned types. */
11992 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11993 || (TYPE_UNSIGNED (type
)
11994 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11995 && tree_fits_uhwi_p (arg1
)
11996 && tree_to_uhwi (arg1
) < prec
11997 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11998 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12000 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12001 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12007 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12009 lshift
= build_minus_one_cst (type
);
12010 lshift
= const_binop (code
, lshift
, arg1
);
12012 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12016 /* If we have a rotate of a bit operation with the rotate count and
12017 the second operand of the bit operation both constant,
12018 permute the two operations. */
12019 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12020 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12021 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12022 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12023 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12024 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12025 fold_build2_loc (loc
, code
, type
,
12026 TREE_OPERAND (arg0
, 0), arg1
),
12027 fold_build2_loc (loc
, code
, type
,
12028 TREE_OPERAND (arg0
, 1), arg1
));
12030 /* Two consecutive rotates adding up to the some integer
12031 multiple of the precision of the type can be ignored. */
12032 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12033 && TREE_CODE (arg0
) == RROTATE_EXPR
12034 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12035 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12037 return TREE_OPERAND (arg0
, 0);
12039 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12040 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12041 if the latter can be further optimized. */
12042 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12043 && TREE_CODE (arg0
) == BIT_AND_EXPR
12044 && TREE_CODE (arg1
) == INTEGER_CST
12045 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12047 tree mask
= fold_build2_loc (loc
, code
, type
,
12048 fold_convert_loc (loc
, type
,
12049 TREE_OPERAND (arg0
, 1)),
12051 tree shift
= fold_build2_loc (loc
, code
, type
,
12052 fold_convert_loc (loc
, type
,
12053 TREE_OPERAND (arg0
, 0)),
12055 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12063 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12069 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12074 case TRUTH_ANDIF_EXPR
:
12075 /* Note that the operands of this must be ints
12076 and their values must be 0 or 1.
12077 ("true" is a fixed value perhaps depending on the language.) */
12078 /* If first arg is constant zero, return it. */
12079 if (integer_zerop (arg0
))
12080 return fold_convert_loc (loc
, type
, arg0
);
12081 case TRUTH_AND_EXPR
:
12082 /* If either arg is constant true, drop it. */
12083 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12084 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12085 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12086 /* Preserve sequence points. */
12087 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12088 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12089 /* If second arg is constant zero, result is zero, but first arg
12090 must be evaluated. */
12091 if (integer_zerop (arg1
))
12092 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12093 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12094 case will be handled here. */
12095 if (integer_zerop (arg0
))
12096 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12098 /* !X && X is always false. */
12099 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12100 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12101 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12102 /* X && !X is always false. */
12103 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12104 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12105 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12107 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12108 means A >= Y && A != MAX, but in this case we know that
12111 if (!TREE_SIDE_EFFECTS (arg0
)
12112 && !TREE_SIDE_EFFECTS (arg1
))
12114 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12115 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12116 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12118 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12119 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12120 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12123 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12129 case TRUTH_ORIF_EXPR
:
12130 /* Note that the operands of this must be ints
12131 and their values must be 0 or true.
12132 ("true" is a fixed value perhaps depending on the language.) */
12133 /* If first arg is constant true, return it. */
12134 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12135 return fold_convert_loc (loc
, type
, arg0
);
12136 case TRUTH_OR_EXPR
:
12137 /* If either arg is constant zero, drop it. */
12138 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12139 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12140 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12141 /* Preserve sequence points. */
12142 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12143 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12144 /* If second arg is constant true, result is true, but we must
12145 evaluate first arg. */
12146 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12147 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12148 /* Likewise for first arg, but note this only occurs here for
12150 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12151 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12153 /* !X || X is always true. */
12154 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12155 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12156 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12157 /* X || !X is always true. */
12158 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12159 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12160 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12162 /* (X && !Y) || (!X && Y) is X ^ Y */
12163 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12164 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12166 tree a0
, a1
, l0
, l1
, n0
, n1
;
12168 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12169 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12171 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12172 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12174 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12175 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12177 if ((operand_equal_p (n0
, a0
, 0)
12178 && operand_equal_p (n1
, a1
, 0))
12179 || (operand_equal_p (n0
, a1
, 0)
12180 && operand_equal_p (n1
, a0
, 0)))
12181 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12184 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12190 case TRUTH_XOR_EXPR
:
12191 /* If the second arg is constant zero, drop it. */
12192 if (integer_zerop (arg1
))
12193 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12194 /* If the second arg is constant true, this is a logical inversion. */
12195 if (integer_onep (arg1
))
12197 tem
= invert_truthvalue_loc (loc
, arg0
);
12198 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12200 /* Identical arguments cancel to zero. */
12201 if (operand_equal_p (arg0
, arg1
, 0))
12202 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12204 /* !X ^ X is always true. */
12205 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12206 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12207 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12209 /* X ^ !X is always true. */
12210 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12211 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12212 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12221 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12222 if (tem
!= NULL_TREE
)
12225 /* bool_var != 0 becomes bool_var. */
12226 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12227 && code
== NE_EXPR
)
12228 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12230 /* bool_var == 1 becomes bool_var. */
12231 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12232 && code
== EQ_EXPR
)
12233 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12235 /* bool_var != 1 becomes !bool_var. */
12236 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12237 && code
== NE_EXPR
)
12238 return fold_convert_loc (loc
, type
,
12239 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12240 TREE_TYPE (arg0
), arg0
));
12242 /* bool_var == 0 becomes !bool_var. */
12243 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12244 && code
== EQ_EXPR
)
12245 return fold_convert_loc (loc
, type
,
12246 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12247 TREE_TYPE (arg0
), arg0
));
12249 /* !exp != 0 becomes !exp */
12250 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12251 && code
== NE_EXPR
)
12252 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12254 /* If this is an equality comparison of the address of two non-weak,
12255 unaliased symbols neither of which are extern (since we do not
12256 have access to attributes for externs), then we know the result. */
12257 if (TREE_CODE (arg0
) == ADDR_EXPR
12258 && DECL_P (TREE_OPERAND (arg0
, 0))
12259 && TREE_CODE (arg1
) == ADDR_EXPR
12260 && DECL_P (TREE_OPERAND (arg1
, 0)))
12264 if (decl_in_symtab_p (TREE_OPERAND (arg0
, 0))
12265 && decl_in_symtab_p (TREE_OPERAND (arg1
, 0)))
12266 equal
= symtab_node::get_create (TREE_OPERAND (arg0
, 0))
12267 ->equal_address_to (symtab_node::get_create
12268 (TREE_OPERAND (arg1
, 0)));
12270 equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12272 return constant_boolean_node (equal
12273 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12277 /* Similarly for a NEGATE_EXPR. */
12278 if (TREE_CODE (arg0
) == NEGATE_EXPR
12279 && TREE_CODE (arg1
) == INTEGER_CST
12280 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12282 && TREE_CODE (tem
) == INTEGER_CST
12283 && !TREE_OVERFLOW (tem
))
12284 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12286 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12287 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12288 && TREE_CODE (arg1
) == INTEGER_CST
12289 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12290 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12291 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12292 fold_convert_loc (loc
,
12295 TREE_OPERAND (arg0
, 1)));
12297 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12298 if ((TREE_CODE (arg0
) == PLUS_EXPR
12299 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12300 || TREE_CODE (arg0
) == MINUS_EXPR
)
12301 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12304 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12305 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12307 tree val
= TREE_OPERAND (arg0
, 1);
12308 return omit_two_operands_loc (loc
, type
,
12309 fold_build2_loc (loc
, code
, type
,
12311 build_int_cst (TREE_TYPE (val
),
12313 TREE_OPERAND (arg0
, 0), arg1
);
12316 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12317 if (TREE_CODE (arg0
) == MINUS_EXPR
12318 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12319 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12322 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12324 return omit_two_operands_loc (loc
, type
,
12326 ? boolean_true_node
: boolean_false_node
,
12327 TREE_OPERAND (arg0
, 1), arg1
);
12330 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12331 if (TREE_CODE (arg0
) == ABS_EXPR
12332 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12333 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12335 /* If this is an EQ or NE comparison with zero and ARG0 is
12336 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12337 two operations, but the latter can be done in one less insn
12338 on machines that have only two-operand insns or on which a
12339 constant cannot be the first operand. */
12340 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12341 && integer_zerop (arg1
))
12343 tree arg00
= TREE_OPERAND (arg0
, 0);
12344 tree arg01
= TREE_OPERAND (arg0
, 1);
12345 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12346 && integer_onep (TREE_OPERAND (arg00
, 0)))
12348 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12349 arg01
, TREE_OPERAND (arg00
, 1));
12350 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12351 build_int_cst (TREE_TYPE (arg0
), 1));
12352 return fold_build2_loc (loc
, code
, type
,
12353 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12356 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12357 && integer_onep (TREE_OPERAND (arg01
, 0)))
12359 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12360 arg00
, TREE_OPERAND (arg01
, 1));
12361 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12362 build_int_cst (TREE_TYPE (arg0
), 1));
12363 return fold_build2_loc (loc
, code
, type
,
12364 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12369 /* If this is an NE or EQ comparison of zero against the result of a
12370 signed MOD operation whose second operand is a power of 2, make
12371 the MOD operation unsigned since it is simpler and equivalent. */
12372 if (integer_zerop (arg1
)
12373 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12374 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12375 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12376 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12377 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12378 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12380 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12381 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12382 fold_convert_loc (loc
, newtype
,
12383 TREE_OPERAND (arg0
, 0)),
12384 fold_convert_loc (loc
, newtype
,
12385 TREE_OPERAND (arg0
, 1)));
12387 return fold_build2_loc (loc
, code
, type
, newmod
,
12388 fold_convert_loc (loc
, newtype
, arg1
));
12391 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12392 C1 is a valid shift constant, and C2 is a power of two, i.e.
12394 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12395 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12396 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12398 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12399 && integer_zerop (arg1
))
12401 tree itype
= TREE_TYPE (arg0
);
12402 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12403 prec
= TYPE_PRECISION (itype
);
12405 /* Check for a valid shift count. */
12406 if (wi::ltu_p (arg001
, prec
))
12408 tree arg01
= TREE_OPERAND (arg0
, 1);
12409 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12410 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12411 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12412 can be rewritten as (X & (C2 << C1)) != 0. */
12413 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12415 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12416 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12417 return fold_build2_loc (loc
, code
, type
, tem
,
12418 fold_convert_loc (loc
, itype
, arg1
));
12420 /* Otherwise, for signed (arithmetic) shifts,
12421 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12422 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12423 else if (!TYPE_UNSIGNED (itype
))
12424 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12425 arg000
, build_int_cst (itype
, 0));
12426 /* Otherwise, of unsigned (logical) shifts,
12427 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12428 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12430 return omit_one_operand_loc (loc
, type
,
12431 code
== EQ_EXPR
? integer_one_node
12432 : integer_zero_node
,
12437 /* If we have (A & C) == C where C is a power of 2, convert this into
12438 (A & C) != 0. Similarly for NE_EXPR. */
12439 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12440 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12441 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12442 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12443 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12444 integer_zero_node
));
12446 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12447 bit, then fold the expression into A < 0 or A >= 0. */
12448 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12452 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12453 Similarly for NE_EXPR. */
12454 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12455 && TREE_CODE (arg1
) == INTEGER_CST
12456 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12458 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12459 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12460 TREE_OPERAND (arg0
, 1));
12462 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12463 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12465 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12466 if (integer_nonzerop (dandnotc
))
12467 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12470 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12471 Similarly for NE_EXPR. */
12472 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12473 && TREE_CODE (arg1
) == INTEGER_CST
12474 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12476 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12478 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12479 TREE_OPERAND (arg0
, 1),
12480 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12481 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12482 if (integer_nonzerop (candnotd
))
12483 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12486 /* If this is a comparison of a field, we may be able to simplify it. */
12487 if ((TREE_CODE (arg0
) == COMPONENT_REF
12488 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12489 /* Handle the constant case even without -O
12490 to make sure the warnings are given. */
12491 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12493 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12498 /* Optimize comparisons of strlen vs zero to a compare of the
12499 first character of the string vs zero. To wit,
12500 strlen(ptr) == 0 => *ptr == 0
12501 strlen(ptr) != 0 => *ptr != 0
12502 Other cases should reduce to one of these two (or a constant)
12503 due to the return value of strlen being unsigned. */
12504 if (TREE_CODE (arg0
) == CALL_EXPR
12505 && integer_zerop (arg1
))
12507 tree fndecl
= get_callee_fndecl (arg0
);
12510 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12511 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12512 && call_expr_nargs (arg0
) == 1
12513 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12515 tree iref
= build_fold_indirect_ref_loc (loc
,
12516 CALL_EXPR_ARG (arg0
, 0));
12517 return fold_build2_loc (loc
, code
, type
, iref
,
12518 build_int_cst (TREE_TYPE (iref
), 0));
12522 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12523 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12524 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12525 && integer_zerop (arg1
)
12526 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12528 tree arg00
= TREE_OPERAND (arg0
, 0);
12529 tree arg01
= TREE_OPERAND (arg0
, 1);
12530 tree itype
= TREE_TYPE (arg00
);
12531 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
12533 if (TYPE_UNSIGNED (itype
))
12535 itype
= signed_type_for (itype
);
12536 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12538 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12539 type
, arg00
, build_zero_cst (itype
));
12543 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12544 if (integer_zerop (arg1
)
12545 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12546 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12547 TREE_OPERAND (arg0
, 1));
12549 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12550 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12551 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12552 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12553 build_zero_cst (TREE_TYPE (arg0
)));
12554 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12555 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12556 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12557 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12558 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12559 build_zero_cst (TREE_TYPE (arg0
)));
12561 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12562 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12563 && TREE_CODE (arg1
) == INTEGER_CST
12564 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12565 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12566 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12567 TREE_OPERAND (arg0
, 1), arg1
));
12569 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12570 (X & C) == 0 when C is a single bit. */
12571 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12572 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12573 && integer_zerop (arg1
)
12574 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12576 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12577 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12578 TREE_OPERAND (arg0
, 1));
12579 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12581 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12585 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12586 constant C is a power of two, i.e. a single bit. */
12587 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12588 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12589 && integer_zerop (arg1
)
12590 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12591 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12592 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12594 tree arg00
= TREE_OPERAND (arg0
, 0);
12595 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12596 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12599 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12600 when is C is a power of two, i.e. a single bit. */
12601 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12602 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12603 && integer_zerop (arg1
)
12604 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12605 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12606 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12608 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12609 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12610 arg000
, TREE_OPERAND (arg0
, 1));
12611 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12612 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12615 if (integer_zerop (arg1
)
12616 && tree_expr_nonzero_p (arg0
))
12618 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12619 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12622 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12623 if (TREE_CODE (arg0
) == NEGATE_EXPR
12624 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12625 return fold_build2_loc (loc
, code
, type
,
12626 TREE_OPERAND (arg0
, 0),
12627 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12628 TREE_OPERAND (arg1
, 0)));
12630 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12631 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12632 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12634 tree arg00
= TREE_OPERAND (arg0
, 0);
12635 tree arg01
= TREE_OPERAND (arg0
, 1);
12636 tree arg10
= TREE_OPERAND (arg1
, 0);
12637 tree arg11
= TREE_OPERAND (arg1
, 1);
12638 tree itype
= TREE_TYPE (arg0
);
12640 if (operand_equal_p (arg01
, arg11
, 0))
12641 return fold_build2_loc (loc
, code
, type
,
12642 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12643 fold_build2_loc (loc
,
12644 BIT_XOR_EXPR
, itype
,
12647 build_zero_cst (itype
));
12649 if (operand_equal_p (arg01
, arg10
, 0))
12650 return fold_build2_loc (loc
, code
, type
,
12651 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12652 fold_build2_loc (loc
,
12653 BIT_XOR_EXPR
, itype
,
12656 build_zero_cst (itype
));
12658 if (operand_equal_p (arg00
, arg11
, 0))
12659 return fold_build2_loc (loc
, code
, type
,
12660 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12661 fold_build2_loc (loc
,
12662 BIT_XOR_EXPR
, itype
,
12665 build_zero_cst (itype
));
12667 if (operand_equal_p (arg00
, arg10
, 0))
12668 return fold_build2_loc (loc
, code
, type
,
12669 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12670 fold_build2_loc (loc
,
12671 BIT_XOR_EXPR
, itype
,
12674 build_zero_cst (itype
));
12677 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12678 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12680 tree arg00
= TREE_OPERAND (arg0
, 0);
12681 tree arg01
= TREE_OPERAND (arg0
, 1);
12682 tree arg10
= TREE_OPERAND (arg1
, 0);
12683 tree arg11
= TREE_OPERAND (arg1
, 1);
12684 tree itype
= TREE_TYPE (arg0
);
12686 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12687 operand_equal_p guarantees no side-effects so we don't need
12688 to use omit_one_operand on Z. */
12689 if (operand_equal_p (arg01
, arg11
, 0))
12690 return fold_build2_loc (loc
, code
, type
, arg00
,
12691 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12693 if (operand_equal_p (arg01
, arg10
, 0))
12694 return fold_build2_loc (loc
, code
, type
, arg00
,
12695 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12697 if (operand_equal_p (arg00
, arg11
, 0))
12698 return fold_build2_loc (loc
, code
, type
, arg01
,
12699 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12701 if (operand_equal_p (arg00
, arg10
, 0))
12702 return fold_build2_loc (loc
, code
, type
, arg01
,
12703 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12706 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12707 if (TREE_CODE (arg01
) == INTEGER_CST
12708 && TREE_CODE (arg11
) == INTEGER_CST
)
12710 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12711 fold_convert_loc (loc
, itype
, arg11
));
12712 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12713 return fold_build2_loc (loc
, code
, type
, tem
,
12714 fold_convert_loc (loc
, itype
, arg10
));
12718 /* Attempt to simplify equality/inequality comparisons of complex
12719 values. Only lower the comparison if the result is known or
12720 can be simplified to a single scalar comparison. */
12721 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12722 || TREE_CODE (arg0
) == COMPLEX_CST
)
12723 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12724 || TREE_CODE (arg1
) == COMPLEX_CST
))
12726 tree real0
, imag0
, real1
, imag1
;
12729 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12731 real0
= TREE_OPERAND (arg0
, 0);
12732 imag0
= TREE_OPERAND (arg0
, 1);
12736 real0
= TREE_REALPART (arg0
);
12737 imag0
= TREE_IMAGPART (arg0
);
12740 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12742 real1
= TREE_OPERAND (arg1
, 0);
12743 imag1
= TREE_OPERAND (arg1
, 1);
12747 real1
= TREE_REALPART (arg1
);
12748 imag1
= TREE_IMAGPART (arg1
);
12751 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12752 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12754 if (integer_zerop (rcond
))
12756 if (code
== EQ_EXPR
)
12757 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12759 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12763 if (code
== NE_EXPR
)
12764 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12766 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12770 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12771 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12773 if (integer_zerop (icond
))
12775 if (code
== EQ_EXPR
)
12776 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12778 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12782 if (code
== NE_EXPR
)
12783 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12785 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12796 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12797 if (tem
!= NULL_TREE
)
12800 /* Transform comparisons of the form X +- C CMP X. */
12801 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12802 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12803 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12804 && !HONOR_SNANS (arg0
))
12805 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12806 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12808 tree arg01
= TREE_OPERAND (arg0
, 1);
12809 enum tree_code code0
= TREE_CODE (arg0
);
12812 if (TREE_CODE (arg01
) == REAL_CST
)
12813 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12815 is_positive
= tree_int_cst_sgn (arg01
);
12817 /* (X - c) > X becomes false. */
12818 if (code
== GT_EXPR
12819 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12820 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12822 if (TREE_CODE (arg01
) == INTEGER_CST
12823 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12824 fold_overflow_warning (("assuming signed overflow does not "
12825 "occur when assuming that (X - c) > X "
12826 "is always false"),
12827 WARN_STRICT_OVERFLOW_ALL
);
12828 return constant_boolean_node (0, type
);
12831 /* Likewise (X + c) < X becomes false. */
12832 if (code
== LT_EXPR
12833 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12834 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12836 if (TREE_CODE (arg01
) == INTEGER_CST
12837 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12838 fold_overflow_warning (("assuming signed overflow does not "
12839 "occur when assuming that "
12840 "(X + c) < X is always false"),
12841 WARN_STRICT_OVERFLOW_ALL
);
12842 return constant_boolean_node (0, type
);
12845 /* Convert (X - c) <= X to true. */
12846 if (!HONOR_NANS (arg1
)
12848 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12849 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12851 if (TREE_CODE (arg01
) == INTEGER_CST
12852 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12853 fold_overflow_warning (("assuming signed overflow does not "
12854 "occur when assuming that "
12855 "(X - c) <= X is always true"),
12856 WARN_STRICT_OVERFLOW_ALL
);
12857 return constant_boolean_node (1, type
);
12860 /* Convert (X + c) >= X to true. */
12861 if (!HONOR_NANS (arg1
)
12863 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12864 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12866 if (TREE_CODE (arg01
) == INTEGER_CST
12867 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12868 fold_overflow_warning (("assuming signed overflow does not "
12869 "occur when assuming that "
12870 "(X + c) >= X is always true"),
12871 WARN_STRICT_OVERFLOW_ALL
);
12872 return constant_boolean_node (1, type
);
12875 if (TREE_CODE (arg01
) == INTEGER_CST
)
12877 /* Convert X + c > X and X - c < X to true for integers. */
12878 if (code
== GT_EXPR
12879 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12880 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12882 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12883 fold_overflow_warning (("assuming signed overflow does "
12884 "not occur when assuming that "
12885 "(X + c) > X is always true"),
12886 WARN_STRICT_OVERFLOW_ALL
);
12887 return constant_boolean_node (1, type
);
12890 if (code
== LT_EXPR
12891 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12892 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12894 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12895 fold_overflow_warning (("assuming signed overflow does "
12896 "not occur when assuming that "
12897 "(X - c) < X is always true"),
12898 WARN_STRICT_OVERFLOW_ALL
);
12899 return constant_boolean_node (1, type
);
12902 /* Convert X + c <= X and X - c >= X to false for integers. */
12903 if (code
== LE_EXPR
12904 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12905 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12907 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12908 fold_overflow_warning (("assuming signed overflow does "
12909 "not occur when assuming that "
12910 "(X + c) <= X is always false"),
12911 WARN_STRICT_OVERFLOW_ALL
);
12912 return constant_boolean_node (0, type
);
12915 if (code
== GE_EXPR
12916 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12917 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12920 fold_overflow_warning (("assuming signed overflow does "
12921 "not occur when assuming that "
12922 "(X - c) >= X is always false"),
12923 WARN_STRICT_OVERFLOW_ALL
);
12924 return constant_boolean_node (0, type
);
12929 /* Comparisons with the highest or lowest possible integer of
12930 the specified precision will have known values. */
12932 tree arg1_type
= TREE_TYPE (arg1
);
12933 unsigned int prec
= TYPE_PRECISION (arg1_type
);
12935 if (TREE_CODE (arg1
) == INTEGER_CST
12936 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12938 wide_int max
= wi::max_value (arg1_type
);
12939 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
12940 wide_int min
= wi::min_value (arg1_type
);
12942 if (wi::eq_p (arg1
, max
))
12946 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12949 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12952 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12955 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12957 /* The GE_EXPR and LT_EXPR cases above are not normally
12958 reached because of previous transformations. */
12963 else if (wi::eq_p (arg1
, max
- 1))
12967 arg1
= const_binop (PLUS_EXPR
, arg1
,
12968 build_int_cst (TREE_TYPE (arg1
), 1));
12969 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12970 fold_convert_loc (loc
,
12971 TREE_TYPE (arg1
), arg0
),
12974 arg1
= const_binop (PLUS_EXPR
, arg1
,
12975 build_int_cst (TREE_TYPE (arg1
), 1));
12976 return fold_build2_loc (loc
, NE_EXPR
, type
,
12977 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12983 else if (wi::eq_p (arg1
, min
))
12987 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12990 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12993 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12996 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13001 else if (wi::eq_p (arg1
, min
+ 1))
13005 arg1
= const_binop (MINUS_EXPR
, arg1
,
13006 build_int_cst (TREE_TYPE (arg1
), 1));
13007 return fold_build2_loc (loc
, NE_EXPR
, type
,
13008 fold_convert_loc (loc
,
13009 TREE_TYPE (arg1
), arg0
),
13012 arg1
= const_binop (MINUS_EXPR
, arg1
,
13013 build_int_cst (TREE_TYPE (arg1
), 1));
13014 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13015 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13022 else if (wi::eq_p (arg1
, signed_max
)
13023 && TYPE_UNSIGNED (arg1_type
)
13024 /* We will flip the signedness of the comparison operator
13025 associated with the mode of arg1, so the sign bit is
13026 specified by this mode. Check that arg1 is the signed
13027 max associated with this sign bit. */
13028 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13029 /* signed_type does not work on pointer types. */
13030 && INTEGRAL_TYPE_P (arg1_type
))
13032 /* The following case also applies to X < signed_max+1
13033 and X >= signed_max+1 because previous transformations. */
13034 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13036 tree st
= signed_type_for (arg1_type
);
13037 return fold_build2_loc (loc
,
13038 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13039 type
, fold_convert_loc (loc
, st
, arg0
),
13040 build_int_cst (st
, 0));
13046 /* If we are comparing an ABS_EXPR with a constant, we can
13047 convert all the cases into explicit comparisons, but they may
13048 well not be faster than doing the ABS and one comparison.
13049 But ABS (X) <= C is a range comparison, which becomes a subtraction
13050 and a comparison, and is probably faster. */
13051 if (code
== LE_EXPR
13052 && TREE_CODE (arg1
) == INTEGER_CST
13053 && TREE_CODE (arg0
) == ABS_EXPR
13054 && ! TREE_SIDE_EFFECTS (arg0
)
13055 && (0 != (tem
= negate_expr (arg1
)))
13056 && TREE_CODE (tem
) == INTEGER_CST
13057 && !TREE_OVERFLOW (tem
))
13058 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13059 build2 (GE_EXPR
, type
,
13060 TREE_OPERAND (arg0
, 0), tem
),
13061 build2 (LE_EXPR
, type
,
13062 TREE_OPERAND (arg0
, 0), arg1
));
13064 /* Convert ABS_EXPR<x> >= 0 to true. */
13065 strict_overflow_p
= false;
13066 if (code
== GE_EXPR
13067 && (integer_zerop (arg1
)
13068 || (! HONOR_NANS (arg0
)
13069 && real_zerop (arg1
)))
13070 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13072 if (strict_overflow_p
)
13073 fold_overflow_warning (("assuming signed overflow does not occur "
13074 "when simplifying comparison of "
13075 "absolute value and zero"),
13076 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13077 return omit_one_operand_loc (loc
, type
,
13078 constant_boolean_node (true, type
),
13082 /* Convert ABS_EXPR<x> < 0 to false. */
13083 strict_overflow_p
= false;
13084 if (code
== LT_EXPR
13085 && (integer_zerop (arg1
) || real_zerop (arg1
))
13086 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13088 if (strict_overflow_p
)
13089 fold_overflow_warning (("assuming signed overflow does not occur "
13090 "when simplifying comparison of "
13091 "absolute value and zero"),
13092 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13093 return omit_one_operand_loc (loc
, type
,
13094 constant_boolean_node (false, type
),
13098 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13099 and similarly for >= into !=. */
13100 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13101 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13102 && TREE_CODE (arg1
) == LSHIFT_EXPR
13103 && integer_onep (TREE_OPERAND (arg1
, 0)))
13104 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13105 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13106 TREE_OPERAND (arg1
, 1)),
13107 build_zero_cst (TREE_TYPE (arg0
)));
13109 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13110 otherwise Y might be >= # of bits in X's type and thus e.g.
13111 (unsigned char) (1 << Y) for Y 15 might be 0.
13112 If the cast is widening, then 1 << Y should have unsigned type,
13113 otherwise if Y is number of bits in the signed shift type minus 1,
13114 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13115 31 might be 0xffffffff80000000. */
13116 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13117 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13118 && CONVERT_EXPR_P (arg1
)
13119 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13120 && (element_precision (TREE_TYPE (arg1
))
13121 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13122 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13123 || (element_precision (TREE_TYPE (arg1
))
13124 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13125 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13127 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13128 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13129 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13130 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13131 build_zero_cst (TREE_TYPE (arg0
)));
13136 case UNORDERED_EXPR
:
13144 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13146 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13147 if (t1
!= NULL_TREE
)
13151 /* If the first operand is NaN, the result is constant. */
13152 if (TREE_CODE (arg0
) == REAL_CST
13153 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13154 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13156 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13157 ? integer_zero_node
13158 : integer_one_node
;
13159 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13162 /* If the second operand is NaN, the result is constant. */
13163 if (TREE_CODE (arg1
) == REAL_CST
13164 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13165 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13167 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13168 ? integer_zero_node
13169 : integer_one_node
;
13170 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13173 /* Simplify unordered comparison of something with itself. */
13174 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13175 && operand_equal_p (arg0
, arg1
, 0))
13176 return constant_boolean_node (1, type
);
13178 if (code
== LTGT_EXPR
13179 && !flag_trapping_math
13180 && operand_equal_p (arg0
, arg1
, 0))
13181 return constant_boolean_node (0, type
);
13183 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13185 tree targ0
= strip_float_extensions (arg0
);
13186 tree targ1
= strip_float_extensions (arg1
);
13187 tree newtype
= TREE_TYPE (targ0
);
13189 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13190 newtype
= TREE_TYPE (targ1
);
13192 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13193 return fold_build2_loc (loc
, code
, type
,
13194 fold_convert_loc (loc
, newtype
, targ0
),
13195 fold_convert_loc (loc
, newtype
, targ1
));
13200 case COMPOUND_EXPR
:
13201 /* When pedantic, a compound expression can be neither an lvalue
13202 nor an integer constant expression. */
13203 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13205 /* Don't let (0, 0) be null pointer constant. */
13206 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13207 : fold_convert_loc (loc
, type
, arg1
);
13208 return pedantic_non_lvalue_loc (loc
, tem
);
13211 /* An ASSERT_EXPR should never be passed to fold_binary. */
13212 gcc_unreachable ();
13216 } /* switch (code) */
13219 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13220 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13224 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13226 switch (TREE_CODE (*tp
))
13232 *walk_subtrees
= 0;
13234 /* ... fall through ... */
13241 /* Return whether the sub-tree ST contains a label which is accessible from
13242 outside the sub-tree. */
13245 contains_label_p (tree st
)
13248 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13251 /* Fold a ternary expression of code CODE and type TYPE with operands
13252 OP0, OP1, and OP2. Return the folded expression if folding is
13253 successful. Otherwise, return NULL_TREE. */
13256 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13257 tree op0
, tree op1
, tree op2
)
13260 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13261 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13263 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13264 && TREE_CODE_LENGTH (code
) == 3);
13266 /* If this is a commutative operation, and OP0 is a constant, move it
13267 to OP1 to reduce the number of tests below. */
13268 if (commutative_ternary_tree_code (code
)
13269 && tree_swap_operands_p (op0
, op1
, true))
13270 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13272 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13276 /* Strip any conversions that don't change the mode. This is safe
13277 for every expression, except for a comparison expression because
13278 its signedness is derived from its operands. So, in the latter
13279 case, only strip conversions that don't change the signedness.
13281 Note that this is done as an internal manipulation within the
13282 constant folder, in order to find the simplest representation of
13283 the arguments so that their form can be studied. In any cases,
13284 the appropriate type conversions should be put back in the tree
13285 that will get out of the constant folder. */
13306 case COMPONENT_REF
:
13307 if (TREE_CODE (arg0
) == CONSTRUCTOR
13308 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13310 unsigned HOST_WIDE_INT idx
;
13312 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13319 case VEC_COND_EXPR
:
13320 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13321 so all simple results must be passed through pedantic_non_lvalue. */
13322 if (TREE_CODE (arg0
) == INTEGER_CST
)
13324 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13325 tem
= integer_zerop (arg0
) ? op2
: op1
;
13326 /* Only optimize constant conditions when the selected branch
13327 has the same type as the COND_EXPR. This avoids optimizing
13328 away "c ? x : throw", where the throw has a void type.
13329 Avoid throwing away that operand which contains label. */
13330 if ((!TREE_SIDE_EFFECTS (unused_op
)
13331 || !contains_label_p (unused_op
))
13332 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13333 || VOID_TYPE_P (type
)))
13334 return pedantic_non_lvalue_loc (loc
, tem
);
13337 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13339 if ((TREE_CODE (arg1
) == VECTOR_CST
13340 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13341 && (TREE_CODE (arg2
) == VECTOR_CST
13342 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13344 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13345 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13346 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13347 for (i
= 0; i
< nelts
; i
++)
13349 tree val
= VECTOR_CST_ELT (arg0
, i
);
13350 if (integer_all_onesp (val
))
13352 else if (integer_zerop (val
))
13353 sel
[i
] = nelts
+ i
;
13354 else /* Currently unreachable. */
13357 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13358 if (t
!= NULL_TREE
)
13363 /* If we have A op B ? A : C, we may be able to convert this to a
13364 simpler expression, depending on the operation and the values
13365 of B and C. Signed zeros prevent all of these transformations,
13366 for reasons given above each one.
13368 Also try swapping the arguments and inverting the conditional. */
13369 if (COMPARISON_CLASS_P (arg0
)
13370 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13371 arg1
, TREE_OPERAND (arg0
, 1))
13372 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
13374 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13379 if (COMPARISON_CLASS_P (arg0
)
13380 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13382 TREE_OPERAND (arg0
, 1))
13383 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
13385 location_t loc0
= expr_location_or (arg0
, loc
);
13386 tem
= fold_invert_truthvalue (loc0
, arg0
);
13387 if (tem
&& COMPARISON_CLASS_P (tem
))
13389 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13395 /* If the second operand is simpler than the third, swap them
13396 since that produces better jump optimization results. */
13397 if (truth_value_p (TREE_CODE (arg0
))
13398 && tree_swap_operands_p (op1
, op2
, false))
13400 location_t loc0
= expr_location_or (arg0
, loc
);
13401 /* See if this can be inverted. If it can't, possibly because
13402 it was a floating-point inequality comparison, don't do
13404 tem
= fold_invert_truthvalue (loc0
, arg0
);
13406 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13409 /* Convert A ? 1 : 0 to simply A. */
13410 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13411 : (integer_onep (op1
)
13412 && !VECTOR_TYPE_P (type
)))
13413 && integer_zerop (op2
)
13414 /* If we try to convert OP0 to our type, the
13415 call to fold will try to move the conversion inside
13416 a COND, which will recurse. In that case, the COND_EXPR
13417 is probably the best choice, so leave it alone. */
13418 && type
== TREE_TYPE (arg0
))
13419 return pedantic_non_lvalue_loc (loc
, arg0
);
13421 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13422 over COND_EXPR in cases such as floating point comparisons. */
13423 if (integer_zerop (op1
)
13424 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13425 : (integer_onep (op2
)
13426 && !VECTOR_TYPE_P (type
)))
13427 && truth_value_p (TREE_CODE (arg0
)))
13428 return pedantic_non_lvalue_loc (loc
,
13429 fold_convert_loc (loc
, type
,
13430 invert_truthvalue_loc (loc
,
13433 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13434 if (TREE_CODE (arg0
) == LT_EXPR
13435 && integer_zerop (TREE_OPERAND (arg0
, 1))
13436 && integer_zerop (op2
)
13437 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13439 /* sign_bit_p looks through both zero and sign extensions,
13440 but for this optimization only sign extensions are
13442 tree tem2
= TREE_OPERAND (arg0
, 0);
13443 while (tem
!= tem2
)
13445 if (TREE_CODE (tem2
) != NOP_EXPR
13446 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13451 tem2
= TREE_OPERAND (tem2
, 0);
13453 /* sign_bit_p only checks ARG1 bits within A's precision.
13454 If <sign bit of A> has wider type than A, bits outside
13455 of A's precision in <sign bit of A> need to be checked.
13456 If they are all 0, this optimization needs to be done
13457 in unsigned A's type, if they are all 1 in signed A's type,
13458 otherwise this can't be done. */
13460 && TYPE_PRECISION (TREE_TYPE (tem
))
13461 < TYPE_PRECISION (TREE_TYPE (arg1
))
13462 && TYPE_PRECISION (TREE_TYPE (tem
))
13463 < TYPE_PRECISION (type
))
13465 int inner_width
, outer_width
;
13468 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13469 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13470 if (outer_width
> TYPE_PRECISION (type
))
13471 outer_width
= TYPE_PRECISION (type
);
13473 wide_int mask
= wi::shifted_mask
13474 (inner_width
, outer_width
- inner_width
, false,
13475 TYPE_PRECISION (TREE_TYPE (arg1
)));
13477 wide_int common
= mask
& arg1
;
13478 if (common
== mask
)
13480 tem_type
= signed_type_for (TREE_TYPE (tem
));
13481 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13483 else if (common
== 0)
13485 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13486 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13494 fold_convert_loc (loc
, type
,
13495 fold_build2_loc (loc
, BIT_AND_EXPR
,
13496 TREE_TYPE (tem
), tem
,
13497 fold_convert_loc (loc
,
13502 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13503 already handled above. */
13504 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13505 && integer_onep (TREE_OPERAND (arg0
, 1))
13506 && integer_zerop (op2
)
13507 && integer_pow2p (arg1
))
13509 tree tem
= TREE_OPERAND (arg0
, 0);
13511 if (TREE_CODE (tem
) == RSHIFT_EXPR
13512 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13513 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13514 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13515 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13516 TREE_OPERAND (tem
, 0), arg1
);
13519 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13520 is probably obsolete because the first operand should be a
13521 truth value (that's why we have the two cases above), but let's
13522 leave it in until we can confirm this for all front-ends. */
13523 if (integer_zerop (op2
)
13524 && TREE_CODE (arg0
) == NE_EXPR
13525 && integer_zerop (TREE_OPERAND (arg0
, 1))
13526 && integer_pow2p (arg1
)
13527 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13528 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13529 arg1
, OEP_ONLY_CONST
))
13530 return pedantic_non_lvalue_loc (loc
,
13531 fold_convert_loc (loc
, type
,
13532 TREE_OPERAND (arg0
, 0)));
13534 /* Disable the transformations below for vectors, since
13535 fold_binary_op_with_conditional_arg may undo them immediately,
13536 yielding an infinite loop. */
13537 if (code
== VEC_COND_EXPR
)
13540 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13541 if (integer_zerop (op2
)
13542 && truth_value_p (TREE_CODE (arg0
))
13543 && truth_value_p (TREE_CODE (arg1
))
13544 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13545 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13546 : TRUTH_ANDIF_EXPR
,
13547 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
13549 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13550 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13551 && truth_value_p (TREE_CODE (arg0
))
13552 && truth_value_p (TREE_CODE (arg1
))
13553 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13555 location_t loc0
= expr_location_or (arg0
, loc
);
13556 /* Only perform transformation if ARG0 is easily inverted. */
13557 tem
= fold_invert_truthvalue (loc0
, arg0
);
13559 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13562 type
, fold_convert_loc (loc
, type
, tem
),
13566 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13567 if (integer_zerop (arg1
)
13568 && truth_value_p (TREE_CODE (arg0
))
13569 && truth_value_p (TREE_CODE (op2
))
13570 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13572 location_t loc0
= expr_location_or (arg0
, loc
);
13573 /* Only perform transformation if ARG0 is easily inverted. */
13574 tem
= fold_invert_truthvalue (loc0
, arg0
);
13576 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13577 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13578 type
, fold_convert_loc (loc
, type
, tem
),
13582 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13583 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13584 && truth_value_p (TREE_CODE (arg0
))
13585 && truth_value_p (TREE_CODE (op2
))
13586 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13587 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13588 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13589 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13594 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13595 of fold_ternary on them. */
13596 gcc_unreachable ();
13598 case BIT_FIELD_REF
:
13599 if ((TREE_CODE (arg0
) == VECTOR_CST
13600 || (TREE_CODE (arg0
) == CONSTRUCTOR
13601 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
13602 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13603 || (TREE_CODE (type
) == VECTOR_TYPE
13604 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
13606 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13607 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
13608 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13609 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13612 && (idx
% width
) == 0
13613 && (n
% width
) == 0
13614 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13619 if (TREE_CODE (arg0
) == VECTOR_CST
)
13622 return VECTOR_CST_ELT (arg0
, idx
);
13624 tree
*vals
= XALLOCAVEC (tree
, n
);
13625 for (unsigned i
= 0; i
< n
; ++i
)
13626 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
13627 return build_vector (type
, vals
);
13630 /* Constructor elements can be subvectors. */
13631 unsigned HOST_WIDE_INT k
= 1;
13632 if (CONSTRUCTOR_NELTS (arg0
) != 0)
13634 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
13635 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
13636 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
13639 /* We keep an exact subset of the constructor elements. */
13640 if ((idx
% k
) == 0 && (n
% k
) == 0)
13642 if (CONSTRUCTOR_NELTS (arg0
) == 0)
13643 return build_constructor (type
, NULL
);
13648 if (idx
< CONSTRUCTOR_NELTS (arg0
))
13649 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13650 return build_zero_cst (type
);
13653 vec
<constructor_elt
, va_gc
> *vals
;
13654 vec_alloc (vals
, n
);
13655 for (unsigned i
= 0;
13656 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
13658 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
13660 (arg0
, idx
+ i
)->value
);
13661 return build_constructor (type
, vals
);
13663 /* The bitfield references a single constructor element. */
13664 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
13666 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
13667 return build_zero_cst (type
);
13669 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
13671 return fold_build3_loc (loc
, code
, type
,
13672 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
13673 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
13678 /* A bit-field-ref that referenced the full argument can be stripped. */
13679 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13680 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
13681 && integer_zerop (op2
))
13682 return fold_convert_loc (loc
, type
, arg0
);
13684 /* On constants we can use native encode/interpret to constant
13685 fold (nearly) all BIT_FIELD_REFs. */
13686 if (CONSTANT_CLASS_P (arg0
)
13687 && can_native_interpret_type_p (type
)
13688 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
13689 /* This limitation should not be necessary, we just need to
13690 round this up to mode size. */
13691 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
13692 /* Need bit-shifting of the buffer to relax the following. */
13693 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
13695 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13696 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13697 unsigned HOST_WIDE_INT clen
;
13698 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
13699 /* ??? We cannot tell native_encode_expr to start at
13700 some random byte only. So limit us to a reasonable amount
13704 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
13705 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
13707 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
13709 tree v
= native_interpret_expr (type
,
13710 b
+ bitpos
/ BITS_PER_UNIT
,
13711 bitsize
/ BITS_PER_UNIT
);
13721 /* For integers we can decompose the FMA if possible. */
13722 if (TREE_CODE (arg0
) == INTEGER_CST
13723 && TREE_CODE (arg1
) == INTEGER_CST
)
13724 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13725 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13726 if (integer_zerop (arg2
))
13727 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13729 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13731 case VEC_PERM_EXPR
:
13732 if (TREE_CODE (arg2
) == VECTOR_CST
)
13734 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
13735 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
13736 unsigned char *sel2
= sel
+ nelts
;
13737 bool need_mask_canon
= false;
13738 bool need_mask_canon2
= false;
13739 bool all_in_vec0
= true;
13740 bool all_in_vec1
= true;
13741 bool maybe_identity
= true;
13742 bool single_arg
= (op0
== op1
);
13743 bool changed
= false;
13745 mask2
= 2 * nelts
- 1;
13746 mask
= single_arg
? (nelts
- 1) : mask2
;
13747 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
13748 for (i
= 0; i
< nelts
; i
++)
13750 tree val
= VECTOR_CST_ELT (arg2
, i
);
13751 if (TREE_CODE (val
) != INTEGER_CST
)
13754 /* Make sure that the perm value is in an acceptable
13757 need_mask_canon
|= wi::gtu_p (t
, mask
);
13758 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
13759 sel
[i
] = t
.to_uhwi () & mask
;
13760 sel2
[i
] = t
.to_uhwi () & mask2
;
13762 if (sel
[i
] < nelts
)
13763 all_in_vec1
= false;
13765 all_in_vec0
= false;
13767 if ((sel
[i
] & (nelts
-1)) != i
)
13768 maybe_identity
= false;
13771 if (maybe_identity
)
13781 else if (all_in_vec1
)
13784 for (i
= 0; i
< nelts
; i
++)
13786 need_mask_canon
= true;
13789 if ((TREE_CODE (op0
) == VECTOR_CST
13790 || TREE_CODE (op0
) == CONSTRUCTOR
)
13791 && (TREE_CODE (op1
) == VECTOR_CST
13792 || TREE_CODE (op1
) == CONSTRUCTOR
))
13794 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
13795 if (t
!= NULL_TREE
)
13799 if (op0
== op1
&& !single_arg
)
13802 /* Some targets are deficient and fail to expand a single
13803 argument permutation while still allowing an equivalent
13804 2-argument version. */
13805 if (need_mask_canon
&& arg2
== op2
13806 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
13807 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
13809 need_mask_canon
= need_mask_canon2
;
13813 if (need_mask_canon
&& arg2
== op2
)
13815 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
13816 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
13817 for (i
= 0; i
< nelts
; i
++)
13818 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
13819 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
13824 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
13830 } /* switch (code) */
13833 /* Perform constant folding and related simplification of EXPR.
13834 The related simplifications include x*1 => x, x*0 => 0, etc.,
13835 and application of the associative law.
13836 NOP_EXPR conversions may be removed freely (as long as we
13837 are careful not to change the type of the overall expression).
13838 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13839 but we can constant-fold them if they have constant operands. */
13841 #ifdef ENABLE_FOLD_CHECKING
13842 # define fold(x) fold_1 (x)
13843 static tree
fold_1 (tree
);
13849 const tree t
= expr
;
13850 enum tree_code code
= TREE_CODE (t
);
13851 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13853 location_t loc
= EXPR_LOCATION (expr
);
13855 /* Return right away if a constant. */
13856 if (kind
== tcc_constant
)
13859 /* CALL_EXPR-like objects with variable numbers of operands are
13860 treated specially. */
13861 if (kind
== tcc_vl_exp
)
13863 if (code
== CALL_EXPR
)
13865 tem
= fold_call_expr (loc
, expr
, false);
13866 return tem
? tem
: expr
;
13871 if (IS_EXPR_CODE_CLASS (kind
))
13873 tree type
= TREE_TYPE (t
);
13874 tree op0
, op1
, op2
;
13876 switch (TREE_CODE_LENGTH (code
))
13879 op0
= TREE_OPERAND (t
, 0);
13880 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13881 return tem
? tem
: expr
;
13883 op0
= TREE_OPERAND (t
, 0);
13884 op1
= TREE_OPERAND (t
, 1);
13885 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13886 return tem
? tem
: expr
;
13888 op0
= TREE_OPERAND (t
, 0);
13889 op1
= TREE_OPERAND (t
, 1);
13890 op2
= TREE_OPERAND (t
, 2);
13891 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13892 return tem
? tem
: expr
;
13902 tree op0
= TREE_OPERAND (t
, 0);
13903 tree op1
= TREE_OPERAND (t
, 1);
13905 if (TREE_CODE (op1
) == INTEGER_CST
13906 && TREE_CODE (op0
) == CONSTRUCTOR
13907 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13909 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
13910 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
13911 unsigned HOST_WIDE_INT begin
= 0;
13913 /* Find a matching index by means of a binary search. */
13914 while (begin
!= end
)
13916 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13917 tree index
= (*elts
)[middle
].index
;
13919 if (TREE_CODE (index
) == INTEGER_CST
13920 && tree_int_cst_lt (index
, op1
))
13921 begin
= middle
+ 1;
13922 else if (TREE_CODE (index
) == INTEGER_CST
13923 && tree_int_cst_lt (op1
, index
))
13925 else if (TREE_CODE (index
) == RANGE_EXPR
13926 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13927 begin
= middle
+ 1;
13928 else if (TREE_CODE (index
) == RANGE_EXPR
13929 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13932 return (*elts
)[middle
].value
;
13939 /* Return a VECTOR_CST if possible. */
13942 tree type
= TREE_TYPE (t
);
13943 if (TREE_CODE (type
) != VECTOR_TYPE
)
13946 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
13947 unsigned HOST_WIDE_INT idx
, pos
= 0;
13950 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
13952 if (!CONSTANT_CLASS_P (value
))
13954 if (TREE_CODE (value
) == VECTOR_CST
)
13956 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
13957 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
13960 vec
[pos
++] = value
;
13962 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
13963 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
13965 return build_vector (type
, vec
);
13969 return fold (DECL_INITIAL (t
));
13973 } /* switch (code) */
13976 #ifdef ENABLE_FOLD_CHECKING
13979 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
13980 hash_table
<pointer_hash
<const tree_node
> > *);
13981 static void fold_check_failed (const_tree
, const_tree
);
13982 void print_fold_checksum (const_tree
);
13984 /* When --enable-checking=fold, compute a digest of expr before
13985 and after actual fold call to see if fold did not accidentally
13986 change original expr. */
13992 struct md5_ctx ctx
;
13993 unsigned char checksum_before
[16], checksum_after
[16];
13994 hash_table
<pointer_hash
<const tree_node
> > ht (32);
13996 md5_init_ctx (&ctx
);
13997 fold_checksum_tree (expr
, &ctx
, &ht
);
13998 md5_finish_ctx (&ctx
, checksum_before
);
14001 ret
= fold_1 (expr
);
14003 md5_init_ctx (&ctx
);
14004 fold_checksum_tree (expr
, &ctx
, &ht
);
14005 md5_finish_ctx (&ctx
, checksum_after
);
14007 if (memcmp (checksum_before
, checksum_after
, 16))
14008 fold_check_failed (expr
, ret
);
14014 print_fold_checksum (const_tree expr
)
14016 struct md5_ctx ctx
;
14017 unsigned char checksum
[16], cnt
;
14018 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14020 md5_init_ctx (&ctx
);
14021 fold_checksum_tree (expr
, &ctx
, &ht
);
14022 md5_finish_ctx (&ctx
, checksum
);
14023 for (cnt
= 0; cnt
< 16; ++cnt
)
14024 fprintf (stderr
, "%02x", checksum
[cnt
]);
14025 putc ('\n', stderr
);
14029 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14031 internal_error ("fold check: original tree changed by fold");
14035 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14036 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14038 const tree_node
**slot
;
14039 enum tree_code code
;
14040 union tree_node buf
;
14046 slot
= ht
->find_slot (expr
, INSERT
);
14050 code
= TREE_CODE (expr
);
14051 if (TREE_CODE_CLASS (code
) == tcc_declaration
14052 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
14054 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14055 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14056 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14057 buf
.decl_with_vis
.symtab_node
= NULL
;
14058 expr
= (tree
) &buf
;
14060 else if (TREE_CODE_CLASS (code
) == tcc_type
14061 && (TYPE_POINTER_TO (expr
)
14062 || TYPE_REFERENCE_TO (expr
)
14063 || TYPE_CACHED_VALUES_P (expr
)
14064 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14065 || TYPE_NEXT_VARIANT (expr
)))
14067 /* Allow these fields to be modified. */
14069 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14070 expr
= tmp
= (tree
) &buf
;
14071 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14072 TYPE_POINTER_TO (tmp
) = NULL
;
14073 TYPE_REFERENCE_TO (tmp
) = NULL
;
14074 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14075 if (TYPE_CACHED_VALUES_P (tmp
))
14077 TYPE_CACHED_VALUES_P (tmp
) = 0;
14078 TYPE_CACHED_VALUES (tmp
) = NULL
;
14081 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14082 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14083 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14084 if (TREE_CODE_CLASS (code
) != tcc_type
14085 && TREE_CODE_CLASS (code
) != tcc_declaration
14086 && code
!= TREE_LIST
14087 && code
!= SSA_NAME
14088 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14089 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14090 switch (TREE_CODE_CLASS (code
))
14096 md5_process_bytes (TREE_STRING_POINTER (expr
),
14097 TREE_STRING_LENGTH (expr
), ctx
);
14100 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14101 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14104 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14105 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14111 case tcc_exceptional
:
14115 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14116 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14117 expr
= TREE_CHAIN (expr
);
14118 goto recursive_label
;
14121 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14122 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14128 case tcc_expression
:
14129 case tcc_reference
:
14130 case tcc_comparison
:
14133 case tcc_statement
:
14135 len
= TREE_OPERAND_LENGTH (expr
);
14136 for (i
= 0; i
< len
; ++i
)
14137 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14139 case tcc_declaration
:
14140 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14141 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14142 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14144 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14145 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14146 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14147 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14148 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14151 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14153 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14155 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14156 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14158 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14162 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14163 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14164 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14165 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14166 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14167 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14168 if (INTEGRAL_TYPE_P (expr
)
14169 || SCALAR_FLOAT_TYPE_P (expr
))
14171 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14172 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14174 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14175 if (TREE_CODE (expr
) == RECORD_TYPE
14176 || TREE_CODE (expr
) == UNION_TYPE
14177 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14178 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14179 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14186 /* Helper function for outputting the checksum of a tree T. When
14187 debugging with gdb, you can "define mynext" to be "next" followed
14188 by "call debug_fold_checksum (op0)", then just trace down till the
14191 DEBUG_FUNCTION
void
14192 debug_fold_checksum (const_tree t
)
14195 unsigned char checksum
[16];
14196 struct md5_ctx ctx
;
14197 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14199 md5_init_ctx (&ctx
);
14200 fold_checksum_tree (t
, &ctx
, &ht
);
14201 md5_finish_ctx (&ctx
, checksum
);
14204 for (i
= 0; i
< 16; i
++)
14205 fprintf (stderr
, "%d ", checksum
[i
]);
14207 fprintf (stderr
, "\n");
14212 /* Fold a unary tree expression with code CODE of type TYPE with an
14213 operand OP0. LOC is the location of the resulting expression.
14214 Return a folded expression if successful. Otherwise, return a tree
14215 expression with code CODE of type TYPE with an operand OP0. */
14218 fold_build1_stat_loc (location_t loc
,
14219 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14222 #ifdef ENABLE_FOLD_CHECKING
14223 unsigned char checksum_before
[16], checksum_after
[16];
14224 struct md5_ctx ctx
;
14225 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14227 md5_init_ctx (&ctx
);
14228 fold_checksum_tree (op0
, &ctx
, &ht
);
14229 md5_finish_ctx (&ctx
, checksum_before
);
14233 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14235 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14237 #ifdef ENABLE_FOLD_CHECKING
14238 md5_init_ctx (&ctx
);
14239 fold_checksum_tree (op0
, &ctx
, &ht
);
14240 md5_finish_ctx (&ctx
, checksum_after
);
14242 if (memcmp (checksum_before
, checksum_after
, 16))
14243 fold_check_failed (op0
, tem
);
14248 /* Fold a binary tree expression with code CODE of type TYPE with
14249 operands OP0 and OP1. LOC is the location of the resulting
14250 expression. Return a folded expression if successful. Otherwise,
14251 return a tree expression with code CODE of type TYPE with operands
14255 fold_build2_stat_loc (location_t loc
,
14256 enum tree_code code
, tree type
, tree op0
, tree op1
14260 #ifdef ENABLE_FOLD_CHECKING
14261 unsigned char checksum_before_op0
[16],
14262 checksum_before_op1
[16],
14263 checksum_after_op0
[16],
14264 checksum_after_op1
[16];
14265 struct md5_ctx ctx
;
14266 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14268 md5_init_ctx (&ctx
);
14269 fold_checksum_tree (op0
, &ctx
, &ht
);
14270 md5_finish_ctx (&ctx
, checksum_before_op0
);
14273 md5_init_ctx (&ctx
);
14274 fold_checksum_tree (op1
, &ctx
, &ht
);
14275 md5_finish_ctx (&ctx
, checksum_before_op1
);
14279 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14281 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14283 #ifdef ENABLE_FOLD_CHECKING
14284 md5_init_ctx (&ctx
);
14285 fold_checksum_tree (op0
, &ctx
, &ht
);
14286 md5_finish_ctx (&ctx
, checksum_after_op0
);
14289 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14290 fold_check_failed (op0
, tem
);
14292 md5_init_ctx (&ctx
);
14293 fold_checksum_tree (op1
, &ctx
, &ht
);
14294 md5_finish_ctx (&ctx
, checksum_after_op1
);
14296 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14297 fold_check_failed (op1
, tem
);
14302 /* Fold a ternary tree expression with code CODE of type TYPE with
14303 operands OP0, OP1, and OP2. Return a folded expression if
14304 successful. Otherwise, return a tree expression with code CODE of
14305 type TYPE with operands OP0, OP1, and OP2. */
14308 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14309 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14312 #ifdef ENABLE_FOLD_CHECKING
14313 unsigned char checksum_before_op0
[16],
14314 checksum_before_op1
[16],
14315 checksum_before_op2
[16],
14316 checksum_after_op0
[16],
14317 checksum_after_op1
[16],
14318 checksum_after_op2
[16];
14319 struct md5_ctx ctx
;
14320 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14322 md5_init_ctx (&ctx
);
14323 fold_checksum_tree (op0
, &ctx
, &ht
);
14324 md5_finish_ctx (&ctx
, checksum_before_op0
);
14327 md5_init_ctx (&ctx
);
14328 fold_checksum_tree (op1
, &ctx
, &ht
);
14329 md5_finish_ctx (&ctx
, checksum_before_op1
);
14332 md5_init_ctx (&ctx
);
14333 fold_checksum_tree (op2
, &ctx
, &ht
);
14334 md5_finish_ctx (&ctx
, checksum_before_op2
);
14338 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14339 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14341 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14343 #ifdef ENABLE_FOLD_CHECKING
14344 md5_init_ctx (&ctx
);
14345 fold_checksum_tree (op0
, &ctx
, &ht
);
14346 md5_finish_ctx (&ctx
, checksum_after_op0
);
14349 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14350 fold_check_failed (op0
, tem
);
14352 md5_init_ctx (&ctx
);
14353 fold_checksum_tree (op1
, &ctx
, &ht
);
14354 md5_finish_ctx (&ctx
, checksum_after_op1
);
14357 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14358 fold_check_failed (op1
, tem
);
14360 md5_init_ctx (&ctx
);
14361 fold_checksum_tree (op2
, &ctx
, &ht
);
14362 md5_finish_ctx (&ctx
, checksum_after_op2
);
14364 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14365 fold_check_failed (op2
, tem
);
14370 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14371 arguments in ARGARRAY, and a null static chain.
14372 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14373 of type TYPE from the given operands as constructed by build_call_array. */
14376 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14377 int nargs
, tree
*argarray
)
14380 #ifdef ENABLE_FOLD_CHECKING
14381 unsigned char checksum_before_fn
[16],
14382 checksum_before_arglist
[16],
14383 checksum_after_fn
[16],
14384 checksum_after_arglist
[16];
14385 struct md5_ctx ctx
;
14386 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14389 md5_init_ctx (&ctx
);
14390 fold_checksum_tree (fn
, &ctx
, &ht
);
14391 md5_finish_ctx (&ctx
, checksum_before_fn
);
14394 md5_init_ctx (&ctx
);
14395 for (i
= 0; i
< nargs
; i
++)
14396 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14397 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14401 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14403 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14405 #ifdef ENABLE_FOLD_CHECKING
14406 md5_init_ctx (&ctx
);
14407 fold_checksum_tree (fn
, &ctx
, &ht
);
14408 md5_finish_ctx (&ctx
, checksum_after_fn
);
14411 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14412 fold_check_failed (fn
, tem
);
14414 md5_init_ctx (&ctx
);
14415 for (i
= 0; i
< nargs
; i
++)
14416 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14417 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14419 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14420 fold_check_failed (NULL_TREE
, tem
);
14425 /* Perform constant folding and related simplification of initializer
14426 expression EXPR. These behave identically to "fold_buildN" but ignore
14427 potential run-time traps and exceptions that fold must preserve. */
14429 #define START_FOLD_INIT \
14430 int saved_signaling_nans = flag_signaling_nans;\
14431 int saved_trapping_math = flag_trapping_math;\
14432 int saved_rounding_math = flag_rounding_math;\
14433 int saved_trapv = flag_trapv;\
14434 int saved_folding_initializer = folding_initializer;\
14435 flag_signaling_nans = 0;\
14436 flag_trapping_math = 0;\
14437 flag_rounding_math = 0;\
14439 folding_initializer = 1;
14441 #define END_FOLD_INIT \
14442 flag_signaling_nans = saved_signaling_nans;\
14443 flag_trapping_math = saved_trapping_math;\
14444 flag_rounding_math = saved_rounding_math;\
14445 flag_trapv = saved_trapv;\
14446 folding_initializer = saved_folding_initializer;
14449 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14450 tree type
, tree op
)
14455 result
= fold_build1_loc (loc
, code
, type
, op
);
14462 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14463 tree type
, tree op0
, tree op1
)
14468 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14475 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14476 int nargs
, tree
*argarray
)
14481 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14487 #undef START_FOLD_INIT
14488 #undef END_FOLD_INIT
14490 /* Determine if first argument is a multiple of second argument. Return 0 if
14491 it is not, or we cannot easily determined it to be.
14493 An example of the sort of thing we care about (at this point; this routine
14494 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14495 fold cases do now) is discovering that
14497 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14503 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14505 This code also handles discovering that
14507 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14509 is a multiple of 8 so we don't have to worry about dealing with a
14510 possible remainder.
14512 Note that we *look* inside a SAVE_EXPR only to determine how it was
14513 calculated; it is not safe for fold to do much of anything else with the
14514 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14515 at run time. For example, the latter example above *cannot* be implemented
14516 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14517 evaluation time of the original SAVE_EXPR is not necessarily the same at
14518 the time the new expression is evaluated. The only optimization of this
14519 sort that would be valid is changing
14521 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14525 SAVE_EXPR (I) * SAVE_EXPR (J)
14527 (where the same SAVE_EXPR (J) is used in the original and the
14528 transformed version). */
14531 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14533 if (operand_equal_p (top
, bottom
, 0))
14536 if (TREE_CODE (type
) != INTEGER_TYPE
)
14539 switch (TREE_CODE (top
))
14542 /* Bitwise and provides a power of two multiple. If the mask is
14543 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14544 if (!integer_pow2p (bottom
))
14549 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14550 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14554 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14555 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14558 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14562 op1
= TREE_OPERAND (top
, 1);
14563 /* const_binop may not detect overflow correctly,
14564 so check for it explicitly here. */
14565 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
14566 && 0 != (t1
= fold_convert (type
,
14567 const_binop (LSHIFT_EXPR
,
14570 && !TREE_OVERFLOW (t1
))
14571 return multiple_of_p (type
, t1
, bottom
);
14576 /* Can't handle conversions from non-integral or wider integral type. */
14577 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14578 || (TYPE_PRECISION (type
)
14579 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14582 /* .. fall through ... */
14585 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14588 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14589 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14592 if (TREE_CODE (bottom
) != INTEGER_CST
14593 || integer_zerop (bottom
)
14594 || (TYPE_UNSIGNED (type
)
14595 && (tree_int_cst_sgn (top
) < 0
14596 || tree_int_cst_sgn (bottom
) < 0)))
14598 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14606 /* Return true if CODE or TYPE is known to be non-negative. */
14609 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14611 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14612 && truth_value_p (code
))
14613 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14614 have a signed:1 type (where the value is -1 and 0). */
14619 /* Return true if (CODE OP0) is known to be non-negative. If the return
14620 value is based on the assumption that signed overflow is undefined,
14621 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14622 *STRICT_OVERFLOW_P. */
14625 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14626 bool *strict_overflow_p
)
14628 if (TYPE_UNSIGNED (type
))
14634 /* We can't return 1 if flag_wrapv is set because
14635 ABS_EXPR<INT_MIN> = INT_MIN. */
14636 if (!ANY_INTEGRAL_TYPE_P (type
))
14638 if (TYPE_OVERFLOW_UNDEFINED (type
))
14640 *strict_overflow_p
= true;
14645 case NON_LVALUE_EXPR
:
14647 case FIX_TRUNC_EXPR
:
14648 return tree_expr_nonnegative_warnv_p (op0
,
14649 strict_overflow_p
);
14653 tree inner_type
= TREE_TYPE (op0
);
14654 tree outer_type
= type
;
14656 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14658 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14659 return tree_expr_nonnegative_warnv_p (op0
,
14660 strict_overflow_p
);
14661 if (INTEGRAL_TYPE_P (inner_type
))
14663 if (TYPE_UNSIGNED (inner_type
))
14665 return tree_expr_nonnegative_warnv_p (op0
,
14666 strict_overflow_p
);
14669 else if (INTEGRAL_TYPE_P (outer_type
))
14671 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14672 return tree_expr_nonnegative_warnv_p (op0
,
14673 strict_overflow_p
);
14674 if (INTEGRAL_TYPE_P (inner_type
))
14675 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14676 && TYPE_UNSIGNED (inner_type
);
14682 return tree_simple_nonnegative_warnv_p (code
, type
);
14685 /* We don't know sign of `t', so be conservative and return false. */
14689 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14690 value is based on the assumption that signed overflow is undefined,
14691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14692 *STRICT_OVERFLOW_P. */
14695 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14696 tree op1
, bool *strict_overflow_p
)
14698 if (TYPE_UNSIGNED (type
))
14703 case POINTER_PLUS_EXPR
:
14705 if (FLOAT_TYPE_P (type
))
14706 return (tree_expr_nonnegative_warnv_p (op0
,
14708 && tree_expr_nonnegative_warnv_p (op1
,
14709 strict_overflow_p
));
14711 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14712 both unsigned and at least 2 bits shorter than the result. */
14713 if (TREE_CODE (type
) == INTEGER_TYPE
14714 && TREE_CODE (op0
) == NOP_EXPR
14715 && TREE_CODE (op1
) == NOP_EXPR
)
14717 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14718 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14719 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14720 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14722 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14723 TYPE_PRECISION (inner2
)) + 1;
14724 return prec
< TYPE_PRECISION (type
);
14730 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14732 /* x * x is always non-negative for floating point x
14733 or without overflow. */
14734 if (operand_equal_p (op0
, op1
, 0)
14735 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
14736 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
14738 if (ANY_INTEGRAL_TYPE_P (type
)
14739 && TYPE_OVERFLOW_UNDEFINED (type
))
14740 *strict_overflow_p
= true;
14745 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14746 both unsigned and their total bits is shorter than the result. */
14747 if (TREE_CODE (type
) == INTEGER_TYPE
14748 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14749 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14751 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14752 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14754 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14755 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14758 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14759 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14761 if (TREE_CODE (op0
) == INTEGER_CST
)
14762 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14764 if (TREE_CODE (op1
) == INTEGER_CST
)
14765 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14767 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14768 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14770 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14771 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
14772 : TYPE_PRECISION (inner0
);
14774 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14775 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
14776 : TYPE_PRECISION (inner1
);
14778 return precision0
+ precision1
< TYPE_PRECISION (type
);
14785 return (tree_expr_nonnegative_warnv_p (op0
,
14787 || tree_expr_nonnegative_warnv_p (op1
,
14788 strict_overflow_p
));
14794 case TRUNC_DIV_EXPR
:
14795 case CEIL_DIV_EXPR
:
14796 case FLOOR_DIV_EXPR
:
14797 case ROUND_DIV_EXPR
:
14798 return (tree_expr_nonnegative_warnv_p (op0
,
14800 && tree_expr_nonnegative_warnv_p (op1
,
14801 strict_overflow_p
));
14803 case TRUNC_MOD_EXPR
:
14804 case CEIL_MOD_EXPR
:
14805 case FLOOR_MOD_EXPR
:
14806 case ROUND_MOD_EXPR
:
14807 return tree_expr_nonnegative_warnv_p (op0
,
14808 strict_overflow_p
);
14810 return tree_simple_nonnegative_warnv_p (code
, type
);
14813 /* We don't know sign of `t', so be conservative and return false. */
14817 /* Return true if T is known to be non-negative. If the return
14818 value is based on the assumption that signed overflow is undefined,
14819 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14820 *STRICT_OVERFLOW_P. */
14823 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14825 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14828 switch (TREE_CODE (t
))
14831 return tree_int_cst_sgn (t
) >= 0;
14834 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14837 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14840 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14842 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14843 strict_overflow_p
));
14845 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14848 /* We don't know sign of `t', so be conservative and return false. */
14852 /* Return true if T is known to be non-negative. If the return
14853 value is based on the assumption that signed overflow is undefined,
14854 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14855 *STRICT_OVERFLOW_P. */
14858 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14859 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14861 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14862 switch (DECL_FUNCTION_CODE (fndecl
))
14864 CASE_FLT_FN (BUILT_IN_ACOS
):
14865 CASE_FLT_FN (BUILT_IN_ACOSH
):
14866 CASE_FLT_FN (BUILT_IN_CABS
):
14867 CASE_FLT_FN (BUILT_IN_COSH
):
14868 CASE_FLT_FN (BUILT_IN_ERFC
):
14869 CASE_FLT_FN (BUILT_IN_EXP
):
14870 CASE_FLT_FN (BUILT_IN_EXP10
):
14871 CASE_FLT_FN (BUILT_IN_EXP2
):
14872 CASE_FLT_FN (BUILT_IN_FABS
):
14873 CASE_FLT_FN (BUILT_IN_FDIM
):
14874 CASE_FLT_FN (BUILT_IN_HYPOT
):
14875 CASE_FLT_FN (BUILT_IN_POW10
):
14876 CASE_INT_FN (BUILT_IN_FFS
):
14877 CASE_INT_FN (BUILT_IN_PARITY
):
14878 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14879 CASE_INT_FN (BUILT_IN_CLZ
):
14880 CASE_INT_FN (BUILT_IN_CLRSB
):
14881 case BUILT_IN_BSWAP32
:
14882 case BUILT_IN_BSWAP64
:
14886 CASE_FLT_FN (BUILT_IN_SQRT
):
14887 /* sqrt(-0.0) is -0.0. */
14888 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
14890 return tree_expr_nonnegative_warnv_p (arg0
,
14891 strict_overflow_p
);
14893 CASE_FLT_FN (BUILT_IN_ASINH
):
14894 CASE_FLT_FN (BUILT_IN_ATAN
):
14895 CASE_FLT_FN (BUILT_IN_ATANH
):
14896 CASE_FLT_FN (BUILT_IN_CBRT
):
14897 CASE_FLT_FN (BUILT_IN_CEIL
):
14898 CASE_FLT_FN (BUILT_IN_ERF
):
14899 CASE_FLT_FN (BUILT_IN_EXPM1
):
14900 CASE_FLT_FN (BUILT_IN_FLOOR
):
14901 CASE_FLT_FN (BUILT_IN_FMOD
):
14902 CASE_FLT_FN (BUILT_IN_FREXP
):
14903 CASE_FLT_FN (BUILT_IN_ICEIL
):
14904 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14905 CASE_FLT_FN (BUILT_IN_IRINT
):
14906 CASE_FLT_FN (BUILT_IN_IROUND
):
14907 CASE_FLT_FN (BUILT_IN_LCEIL
):
14908 CASE_FLT_FN (BUILT_IN_LDEXP
):
14909 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14910 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14911 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14912 CASE_FLT_FN (BUILT_IN_LLRINT
):
14913 CASE_FLT_FN (BUILT_IN_LLROUND
):
14914 CASE_FLT_FN (BUILT_IN_LRINT
):
14915 CASE_FLT_FN (BUILT_IN_LROUND
):
14916 CASE_FLT_FN (BUILT_IN_MODF
):
14917 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14918 CASE_FLT_FN (BUILT_IN_RINT
):
14919 CASE_FLT_FN (BUILT_IN_ROUND
):
14920 CASE_FLT_FN (BUILT_IN_SCALB
):
14921 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14922 CASE_FLT_FN (BUILT_IN_SCALBN
):
14923 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14924 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14925 CASE_FLT_FN (BUILT_IN_SINH
):
14926 CASE_FLT_FN (BUILT_IN_TANH
):
14927 CASE_FLT_FN (BUILT_IN_TRUNC
):
14928 /* True if the 1st argument is nonnegative. */
14929 return tree_expr_nonnegative_warnv_p (arg0
,
14930 strict_overflow_p
);
14932 CASE_FLT_FN (BUILT_IN_FMAX
):
14933 /* True if the 1st OR 2nd arguments are nonnegative. */
14934 return (tree_expr_nonnegative_warnv_p (arg0
,
14936 || (tree_expr_nonnegative_warnv_p (arg1
,
14937 strict_overflow_p
)));
14939 CASE_FLT_FN (BUILT_IN_FMIN
):
14940 /* True if the 1st AND 2nd arguments are nonnegative. */
14941 return (tree_expr_nonnegative_warnv_p (arg0
,
14943 && (tree_expr_nonnegative_warnv_p (arg1
,
14944 strict_overflow_p
)));
14946 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14947 /* True if the 2nd argument is nonnegative. */
14948 return tree_expr_nonnegative_warnv_p (arg1
,
14949 strict_overflow_p
);
14951 CASE_FLT_FN (BUILT_IN_POWI
):
14952 /* True if the 1st argument is nonnegative or the second
14953 argument is an even integer. */
14954 if (TREE_CODE (arg1
) == INTEGER_CST
14955 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14957 return tree_expr_nonnegative_warnv_p (arg0
,
14958 strict_overflow_p
);
14960 CASE_FLT_FN (BUILT_IN_POW
):
14961 /* True if the 1st argument is nonnegative or the second
14962 argument is an even integer valued real. */
14963 if (TREE_CODE (arg1
) == REAL_CST
)
14968 c
= TREE_REAL_CST (arg1
);
14969 n
= real_to_integer (&c
);
14972 REAL_VALUE_TYPE cint
;
14973 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14974 if (real_identical (&c
, &cint
))
14978 return tree_expr_nonnegative_warnv_p (arg0
,
14979 strict_overflow_p
);
14984 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14988 /* Return true if T is known to be non-negative. If the return
14989 value is based on the assumption that signed overflow is undefined,
14990 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14991 *STRICT_OVERFLOW_P. */
14994 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14996 enum tree_code code
= TREE_CODE (t
);
14997 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15004 tree temp
= TARGET_EXPR_SLOT (t
);
15005 t
= TARGET_EXPR_INITIAL (t
);
15007 /* If the initializer is non-void, then it's a normal expression
15008 that will be assigned to the slot. */
15009 if (!VOID_TYPE_P (t
))
15010 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15012 /* Otherwise, the initializer sets the slot in some way. One common
15013 way is an assignment statement at the end of the initializer. */
15016 if (TREE_CODE (t
) == BIND_EXPR
)
15017 t
= expr_last (BIND_EXPR_BODY (t
));
15018 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15019 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15020 t
= expr_last (TREE_OPERAND (t
, 0));
15021 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15026 if (TREE_CODE (t
) == MODIFY_EXPR
15027 && TREE_OPERAND (t
, 0) == temp
)
15028 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15029 strict_overflow_p
);
15036 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15037 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15039 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15040 get_callee_fndecl (t
),
15043 strict_overflow_p
);
15045 case COMPOUND_EXPR
:
15047 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15048 strict_overflow_p
);
15050 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15051 strict_overflow_p
);
15053 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15054 strict_overflow_p
);
15057 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15061 /* We don't know sign of `t', so be conservative and return false. */
15065 /* Return true if T is known to be non-negative. If the return
15066 value is based on the assumption that signed overflow is undefined,
15067 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15068 *STRICT_OVERFLOW_P. */
15071 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15073 enum tree_code code
;
15074 if (t
== error_mark_node
)
15077 code
= TREE_CODE (t
);
15078 switch (TREE_CODE_CLASS (code
))
15081 case tcc_comparison
:
15082 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15084 TREE_OPERAND (t
, 0),
15085 TREE_OPERAND (t
, 1),
15086 strict_overflow_p
);
15089 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15091 TREE_OPERAND (t
, 0),
15092 strict_overflow_p
);
15095 case tcc_declaration
:
15096 case tcc_reference
:
15097 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15105 case TRUTH_AND_EXPR
:
15106 case TRUTH_OR_EXPR
:
15107 case TRUTH_XOR_EXPR
:
15108 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15110 TREE_OPERAND (t
, 0),
15111 TREE_OPERAND (t
, 1),
15112 strict_overflow_p
);
15113 case TRUTH_NOT_EXPR
:
15114 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15116 TREE_OPERAND (t
, 0),
15117 strict_overflow_p
);
15124 case WITH_SIZE_EXPR
:
15126 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15129 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15133 /* Return true if `t' is known to be non-negative. Handle warnings
15134 about undefined signed overflow. */
15137 tree_expr_nonnegative_p (tree t
)
15139 bool ret
, strict_overflow_p
;
15141 strict_overflow_p
= false;
15142 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15143 if (strict_overflow_p
)
15144 fold_overflow_warning (("assuming signed overflow does not occur when "
15145 "determining that expression is always "
15147 WARN_STRICT_OVERFLOW_MISC
);
15152 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15153 For floating point we further ensure that T is not denormal.
15154 Similar logic is present in nonzero_address in rtlanal.h.
15156 If the return value is based on the assumption that signed overflow
15157 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15158 change *STRICT_OVERFLOW_P. */
15161 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15162 bool *strict_overflow_p
)
15167 return tree_expr_nonzero_warnv_p (op0
,
15168 strict_overflow_p
);
15172 tree inner_type
= TREE_TYPE (op0
);
15173 tree outer_type
= type
;
15175 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15176 && tree_expr_nonzero_warnv_p (op0
,
15177 strict_overflow_p
));
15181 case NON_LVALUE_EXPR
:
15182 return tree_expr_nonzero_warnv_p (op0
,
15183 strict_overflow_p
);
15192 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15193 For floating point we further ensure that T is not denormal.
15194 Similar logic is present in nonzero_address in rtlanal.h.
15196 If the return value is based on the assumption that signed overflow
15197 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15198 change *STRICT_OVERFLOW_P. */
15201 tree_binary_nonzero_warnv_p (enum tree_code code
,
15204 tree op1
, bool *strict_overflow_p
)
15206 bool sub_strict_overflow_p
;
15209 case POINTER_PLUS_EXPR
:
15211 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
15213 /* With the presence of negative values it is hard
15214 to say something. */
15215 sub_strict_overflow_p
= false;
15216 if (!tree_expr_nonnegative_warnv_p (op0
,
15217 &sub_strict_overflow_p
)
15218 || !tree_expr_nonnegative_warnv_p (op1
,
15219 &sub_strict_overflow_p
))
15221 /* One of operands must be positive and the other non-negative. */
15222 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15223 overflows, on a twos-complement machine the sum of two
15224 nonnegative numbers can never be zero. */
15225 return (tree_expr_nonzero_warnv_p (op0
,
15227 || tree_expr_nonzero_warnv_p (op1
,
15228 strict_overflow_p
));
15233 if (TYPE_OVERFLOW_UNDEFINED (type
))
15235 if (tree_expr_nonzero_warnv_p (op0
,
15237 && tree_expr_nonzero_warnv_p (op1
,
15238 strict_overflow_p
))
15240 *strict_overflow_p
= true;
15247 sub_strict_overflow_p
= false;
15248 if (tree_expr_nonzero_warnv_p (op0
,
15249 &sub_strict_overflow_p
)
15250 && tree_expr_nonzero_warnv_p (op1
,
15251 &sub_strict_overflow_p
))
15253 if (sub_strict_overflow_p
)
15254 *strict_overflow_p
= true;
15259 sub_strict_overflow_p
= false;
15260 if (tree_expr_nonzero_warnv_p (op0
,
15261 &sub_strict_overflow_p
))
15263 if (sub_strict_overflow_p
)
15264 *strict_overflow_p
= true;
15266 /* When both operands are nonzero, then MAX must be too. */
15267 if (tree_expr_nonzero_warnv_p (op1
,
15268 strict_overflow_p
))
15271 /* MAX where operand 0 is positive is positive. */
15272 return tree_expr_nonnegative_warnv_p (op0
,
15273 strict_overflow_p
);
15275 /* MAX where operand 1 is positive is positive. */
15276 else if (tree_expr_nonzero_warnv_p (op1
,
15277 &sub_strict_overflow_p
)
15278 && tree_expr_nonnegative_warnv_p (op1
,
15279 &sub_strict_overflow_p
))
15281 if (sub_strict_overflow_p
)
15282 *strict_overflow_p
= true;
15288 return (tree_expr_nonzero_warnv_p (op1
,
15290 || tree_expr_nonzero_warnv_p (op0
,
15291 strict_overflow_p
));
15300 /* Return true when T is an address and is known to be nonzero.
15301 For floating point we further ensure that T is not denormal.
15302 Similar logic is present in nonzero_address in rtlanal.h.
15304 If the return value is based on the assumption that signed overflow
15305 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15306 change *STRICT_OVERFLOW_P. */
15309 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15311 bool sub_strict_overflow_p
;
15312 switch (TREE_CODE (t
))
15315 return !integer_zerop (t
);
15319 tree base
= TREE_OPERAND (t
, 0);
15321 if (!DECL_P (base
))
15322 base
= get_base_address (base
);
15327 /* For objects in symbol table check if we know they are non-zero.
15328 Don't do anything for variables and functions before symtab is built;
15329 it is quite possible that they will be declared weak later. */
15330 if (DECL_P (base
) && decl_in_symtab_p (base
))
15332 struct symtab_node
*symbol
;
15334 symbol
= symtab_node::get_create (base
);
15336 return symbol
->nonzero_address ();
15341 /* Function local objects are never NULL. */
15343 && (DECL_CONTEXT (base
)
15344 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15345 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15348 /* Constants are never weak. */
15349 if (CONSTANT_CLASS_P (base
))
15356 sub_strict_overflow_p
= false;
15357 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15358 &sub_strict_overflow_p
)
15359 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15360 &sub_strict_overflow_p
))
15362 if (sub_strict_overflow_p
)
15363 *strict_overflow_p
= true;
15374 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15375 attempt to fold the expression to a constant without modifying TYPE,
15378 If the expression could be simplified to a constant, then return
15379 the constant. If the expression would not be simplified to a
15380 constant, then return NULL_TREE. */
15383 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15385 tree tem
= fold_binary (code
, type
, op0
, op1
);
15386 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15389 /* Given the components of a unary expression CODE, TYPE and OP0,
15390 attempt to fold the expression to a constant without modifying
15393 If the expression could be simplified to a constant, then return
15394 the constant. If the expression would not be simplified to a
15395 constant, then return NULL_TREE. */
15398 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15400 tree tem
= fold_unary (code
, type
, op0
);
15401 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15404 /* If EXP represents referencing an element in a constant string
15405 (either via pointer arithmetic or array indexing), return the
15406 tree representing the value accessed, otherwise return NULL. */
15409 fold_read_from_constant_string (tree exp
)
15411 if ((TREE_CODE (exp
) == INDIRECT_REF
15412 || TREE_CODE (exp
) == ARRAY_REF
)
15413 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15415 tree exp1
= TREE_OPERAND (exp
, 0);
15418 location_t loc
= EXPR_LOCATION (exp
);
15420 if (TREE_CODE (exp
) == INDIRECT_REF
)
15421 string
= string_constant (exp1
, &index
);
15424 tree low_bound
= array_ref_low_bound (exp
);
15425 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15427 /* Optimize the special-case of a zero lower bound.
15429 We convert the low_bound to sizetype to avoid some problems
15430 with constant folding. (E.g. suppose the lower bound is 1,
15431 and its mode is QI. Without the conversion,l (ARRAY
15432 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15433 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15434 if (! integer_zerop (low_bound
))
15435 index
= size_diffop_loc (loc
, index
,
15436 fold_convert_loc (loc
, sizetype
, low_bound
));
15442 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15443 && TREE_CODE (string
) == STRING_CST
15444 && TREE_CODE (index
) == INTEGER_CST
15445 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15446 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15448 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15449 return build_int_cst_type (TREE_TYPE (exp
),
15450 (TREE_STRING_POINTER (string
)
15451 [TREE_INT_CST_LOW (index
)]));
15456 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15457 an integer constant, real, or fixed-point constant.
15459 TYPE is the type of the result. */
15462 fold_negate_const (tree arg0
, tree type
)
15464 tree t
= NULL_TREE
;
15466 switch (TREE_CODE (arg0
))
15471 wide_int val
= wi::neg (arg0
, &overflow
);
15472 t
= force_fit_type (type
, val
, 1,
15473 (overflow
| TREE_OVERFLOW (arg0
))
15474 && !TYPE_UNSIGNED (type
));
15479 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15484 FIXED_VALUE_TYPE f
;
15485 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15486 &(TREE_FIXED_CST (arg0
)), NULL
,
15487 TYPE_SATURATING (type
));
15488 t
= build_fixed (type
, f
);
15489 /* Propagate overflow flags. */
15490 if (overflow_p
| TREE_OVERFLOW (arg0
))
15491 TREE_OVERFLOW (t
) = 1;
15496 gcc_unreachable ();
15502 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15503 an integer constant or real constant.
15505 TYPE is the type of the result. */
15508 fold_abs_const (tree arg0
, tree type
)
15510 tree t
= NULL_TREE
;
15512 switch (TREE_CODE (arg0
))
15516 /* If the value is unsigned or non-negative, then the absolute value
15517 is the same as the ordinary value. */
15518 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
15521 /* If the value is negative, then the absolute value is
15526 wide_int val
= wi::neg (arg0
, &overflow
);
15527 t
= force_fit_type (type
, val
, -1,
15528 overflow
| TREE_OVERFLOW (arg0
));
15534 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15535 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15541 gcc_unreachable ();
15547 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15548 constant. TYPE is the type of the result. */
15551 fold_not_const (const_tree arg0
, tree type
)
15553 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15555 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
15558 /* Given CODE, a relational operator, the target type, TYPE and two
15559 constant operands OP0 and OP1, return the result of the
15560 relational operation. If the result is not a compile time
15561 constant, then return NULL_TREE. */
15564 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15566 int result
, invert
;
15568 /* From here on, the only cases we handle are when the result is
15569 known to be a constant. */
15571 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15573 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15574 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15576 /* Handle the cases where either operand is a NaN. */
15577 if (real_isnan (c0
) || real_isnan (c1
))
15587 case UNORDERED_EXPR
:
15601 if (flag_trapping_math
)
15607 gcc_unreachable ();
15610 return constant_boolean_node (result
, type
);
15613 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15616 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15618 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15619 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15620 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15623 /* Handle equality/inequality of complex constants. */
15624 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15626 tree rcond
= fold_relational_const (code
, type
,
15627 TREE_REALPART (op0
),
15628 TREE_REALPART (op1
));
15629 tree icond
= fold_relational_const (code
, type
,
15630 TREE_IMAGPART (op0
),
15631 TREE_IMAGPART (op1
));
15632 if (code
== EQ_EXPR
)
15633 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15634 else if (code
== NE_EXPR
)
15635 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15640 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15642 unsigned count
= VECTOR_CST_NELTS (op0
);
15643 tree
*elts
= XALLOCAVEC (tree
, count
);
15644 gcc_assert (VECTOR_CST_NELTS (op1
) == count
15645 && TYPE_VECTOR_SUBPARTS (type
) == count
);
15647 for (unsigned i
= 0; i
< count
; i
++)
15649 tree elem_type
= TREE_TYPE (type
);
15650 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15651 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15653 tree tem
= fold_relational_const (code
, elem_type
,
15656 if (tem
== NULL_TREE
)
15659 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
15662 return build_vector (type
, elts
);
15665 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15667 To compute GT, swap the arguments and do LT.
15668 To compute GE, do LT and invert the result.
15669 To compute LE, swap the arguments, do LT and invert the result.
15670 To compute NE, do EQ and invert the result.
15672 Therefore, the code below must handle only EQ and LT. */
15674 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15679 code
= swap_tree_comparison (code
);
15682 /* Note that it is safe to invert for real values here because we
15683 have already handled the one case that it matters. */
15686 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15689 code
= invert_tree_comparison (code
, false);
15692 /* Compute a result for LT or EQ if args permit;
15693 Otherwise return T. */
15694 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15696 if (code
== EQ_EXPR
)
15697 result
= tree_int_cst_equal (op0
, op1
);
15699 result
= tree_int_cst_lt (op0
, op1
);
15706 return constant_boolean_node (result
, type
);
15709 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15710 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15714 fold_build_cleanup_point_expr (tree type
, tree expr
)
15716 /* If the expression does not have side effects then we don't have to wrap
15717 it with a cleanup point expression. */
15718 if (!TREE_SIDE_EFFECTS (expr
))
15721 /* If the expression is a return, check to see if the expression inside the
15722 return has no side effects or the right hand side of the modify expression
15723 inside the return. If either don't have side effects set we don't need to
15724 wrap the expression in a cleanup point expression. Note we don't check the
15725 left hand side of the modify because it should always be a return decl. */
15726 if (TREE_CODE (expr
) == RETURN_EXPR
)
15728 tree op
= TREE_OPERAND (expr
, 0);
15729 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15731 op
= TREE_OPERAND (op
, 1);
15732 if (!TREE_SIDE_EFFECTS (op
))
15736 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15739 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15740 of an indirection through OP0, or NULL_TREE if no simplification is
15744 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15750 subtype
= TREE_TYPE (sub
);
15751 if (!POINTER_TYPE_P (subtype
))
15754 if (TREE_CODE (sub
) == ADDR_EXPR
)
15756 tree op
= TREE_OPERAND (sub
, 0);
15757 tree optype
= TREE_TYPE (op
);
15758 /* *&CONST_DECL -> to the value of the const decl. */
15759 if (TREE_CODE (op
) == CONST_DECL
)
15760 return DECL_INITIAL (op
);
15761 /* *&p => p; make sure to handle *&"str"[cst] here. */
15762 if (type
== optype
)
15764 tree fop
= fold_read_from_constant_string (op
);
15770 /* *(foo *)&fooarray => fooarray[0] */
15771 else if (TREE_CODE (optype
) == ARRAY_TYPE
15772 && type
== TREE_TYPE (optype
)
15773 && (!in_gimple_form
15774 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15776 tree type_domain
= TYPE_DOMAIN (optype
);
15777 tree min_val
= size_zero_node
;
15778 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15779 min_val
= TYPE_MIN_VALUE (type_domain
);
15781 && TREE_CODE (min_val
) != INTEGER_CST
)
15783 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15784 NULL_TREE
, NULL_TREE
);
15786 /* *(foo *)&complexfoo => __real__ complexfoo */
15787 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15788 && type
== TREE_TYPE (optype
))
15789 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15790 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15791 else if (TREE_CODE (optype
) == VECTOR_TYPE
15792 && type
== TREE_TYPE (optype
))
15794 tree part_width
= TYPE_SIZE (type
);
15795 tree index
= bitsize_int (0);
15796 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15800 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15801 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15803 tree op00
= TREE_OPERAND (sub
, 0);
15804 tree op01
= TREE_OPERAND (sub
, 1);
15807 if (TREE_CODE (op00
) == ADDR_EXPR
)
15810 op00
= TREE_OPERAND (op00
, 0);
15811 op00type
= TREE_TYPE (op00
);
15813 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15814 if (TREE_CODE (op00type
) == VECTOR_TYPE
15815 && type
== TREE_TYPE (op00type
))
15817 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
15818 tree part_width
= TYPE_SIZE (type
);
15819 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
15820 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15821 tree index
= bitsize_int (indexi
);
15823 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
15824 return fold_build3_loc (loc
,
15825 BIT_FIELD_REF
, type
, op00
,
15826 part_width
, index
);
15829 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15830 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15831 && type
== TREE_TYPE (op00type
))
15833 tree size
= TYPE_SIZE_UNIT (type
);
15834 if (tree_int_cst_equal (size
, op01
))
15835 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15837 /* ((foo *)&fooarray)[1] => fooarray[1] */
15838 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15839 && type
== TREE_TYPE (op00type
))
15841 tree type_domain
= TYPE_DOMAIN (op00type
);
15842 tree min_val
= size_zero_node
;
15843 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15844 min_val
= TYPE_MIN_VALUE (type_domain
);
15845 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15846 TYPE_SIZE_UNIT (type
));
15847 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15848 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15849 NULL_TREE
, NULL_TREE
);
15854 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15855 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15856 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15857 && (!in_gimple_form
15858 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15861 tree min_val
= size_zero_node
;
15862 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15863 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15864 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15865 min_val
= TYPE_MIN_VALUE (type_domain
);
15867 && TREE_CODE (min_val
) != INTEGER_CST
)
15869 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15876 /* Builds an expression for an indirection through T, simplifying some
15880 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15882 tree type
= TREE_TYPE (TREE_TYPE (t
));
15883 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15888 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15891 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15894 fold_indirect_ref_loc (location_t loc
, tree t
)
15896 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15904 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15905 whose result is ignored. The type of the returned tree need not be
15906 the same as the original expression. */
15909 fold_ignored_result (tree t
)
15911 if (!TREE_SIDE_EFFECTS (t
))
15912 return integer_zero_node
;
15915 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15918 t
= TREE_OPERAND (t
, 0);
15922 case tcc_comparison
:
15923 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15924 t
= TREE_OPERAND (t
, 0);
15925 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15926 t
= TREE_OPERAND (t
, 1);
15931 case tcc_expression
:
15932 switch (TREE_CODE (t
))
15934 case COMPOUND_EXPR
:
15935 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15937 t
= TREE_OPERAND (t
, 0);
15941 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15942 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15944 t
= TREE_OPERAND (t
, 0);
15957 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15960 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15962 tree div
= NULL_TREE
;
15967 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15968 have to do anything. Only do this when we are not given a const,
15969 because in that case, this check is more expensive than just
15971 if (TREE_CODE (value
) != INTEGER_CST
)
15973 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15975 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15979 /* If divisor is a power of two, simplify this to bit manipulation. */
15980 if (divisor
== (divisor
& -divisor
))
15982 if (TREE_CODE (value
) == INTEGER_CST
)
15984 wide_int val
= value
;
15987 if ((val
& (divisor
- 1)) == 0)
15990 overflow_p
= TREE_OVERFLOW (value
);
15991 val
+= divisor
- 1;
15992 val
&= - (int) divisor
;
15996 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16002 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16003 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16004 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
16005 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16011 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16012 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16013 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16019 /* Likewise, but round down. */
16022 round_down_loc (location_t loc
, tree value
, int divisor
)
16024 tree div
= NULL_TREE
;
16026 gcc_assert (divisor
> 0);
16030 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16031 have to do anything. Only do this when we are not given a const,
16032 because in that case, this check is more expensive than just
16034 if (TREE_CODE (value
) != INTEGER_CST
)
16036 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16038 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16042 /* If divisor is a power of two, simplify this to bit manipulation. */
16043 if (divisor
== (divisor
& -divisor
))
16047 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16048 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16053 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16054 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16055 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16061 /* Returns the pointer to the base of the object addressed by EXP and
16062 extracts the information about the offset of the access, storing it
16063 to PBITPOS and POFFSET. */
16066 split_address_to_core_and_offset (tree exp
,
16067 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16071 int unsignedp
, volatilep
;
16072 HOST_WIDE_INT bitsize
;
16073 location_t loc
= EXPR_LOCATION (exp
);
16075 if (TREE_CODE (exp
) == ADDR_EXPR
)
16077 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16078 poffset
, &mode
, &unsignedp
, &volatilep
,
16080 core
= build_fold_addr_expr_loc (loc
, core
);
16086 *poffset
= NULL_TREE
;
16092 /* Returns true if addresses of E1 and E2 differ by a constant, false
16093 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16096 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16099 HOST_WIDE_INT bitpos1
, bitpos2
;
16100 tree toffset1
, toffset2
, tdiff
, type
;
16102 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16103 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16105 if (bitpos1
% BITS_PER_UNIT
!= 0
16106 || bitpos2
% BITS_PER_UNIT
!= 0
16107 || !operand_equal_p (core1
, core2
, 0))
16110 if (toffset1
&& toffset2
)
16112 type
= TREE_TYPE (toffset1
);
16113 if (type
!= TREE_TYPE (toffset2
))
16114 toffset2
= fold_convert (type
, toffset2
);
16116 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16117 if (!cst_and_fits_in_hwi (tdiff
))
16120 *diff
= int_cst_value (tdiff
);
16122 else if (toffset1
|| toffset2
)
16124 /* If only one of the offsets is non-constant, the difference cannot
16131 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16135 /* Simplify the floating point expression EXP when the sign of the
16136 result is not significant. Return NULL_TREE if no simplification
16140 fold_strip_sign_ops (tree exp
)
16143 location_t loc
= EXPR_LOCATION (exp
);
16145 switch (TREE_CODE (exp
))
16149 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16150 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16154 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp
)))
16156 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16157 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16158 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16159 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16160 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16161 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16164 case COMPOUND_EXPR
:
16165 arg0
= TREE_OPERAND (exp
, 0);
16166 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16168 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16172 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16173 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16175 return fold_build3_loc (loc
,
16176 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16177 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16178 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16183 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16186 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16187 /* Strip copysign function call, return the 1st argument. */
16188 arg0
= CALL_EXPR_ARG (exp
, 0);
16189 arg1
= CALL_EXPR_ARG (exp
, 1);
16190 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16193 /* Strip sign ops from the argument of "odd" math functions. */
16194 if (negate_mathfn_p (fcode
))
16196 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16198 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);
16211 /* Return OFF converted to a pointer offset type suitable as offset for
16212 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16214 convert_to_ptrofftype_loc (location_t loc
, tree off
)
16216 return fold_convert_loc (loc
, sizetype
, off
);
16219 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16221 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
16223 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16224 ptr
, convert_to_ptrofftype_loc (loc
, off
));
16227 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16229 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
16231 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
16232 ptr
, size_int (off
));