1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
66 #include "hard-reg-set.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
73 #include "gimple-expr.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
81 #include "plugin-api.h"
84 #include "generic-match.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
89 int folding_initializer
= 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code
{
113 static bool negate_mathfn_p (enum built_in_function
);
114 static bool negate_expr_p (tree
);
115 static tree
negate_expr (tree
);
116 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
117 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
118 static tree
const_binop (enum tree_code
, tree
, tree
);
119 static enum comparison_code
comparison_to_compcode (enum tree_code
);
120 static enum tree_code
compcode_to_comparison (enum comparison_code
);
121 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
122 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
123 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
124 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
125 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
126 static tree
make_bit_field_ref (location_t
, tree
, tree
,
127 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
128 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
130 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
132 machine_mode
*, int *, int *,
134 static tree
sign_bit_p (tree
, const_tree
);
135 static int simple_operand_p (const_tree
);
136 static bool simple_operand_p_2 (tree
);
137 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
138 static tree
range_predecessor (tree
);
139 static tree
range_successor (tree
);
140 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
142 static tree
unextend (tree
, int, int, tree
);
143 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
145 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
146 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
147 static tree
fold_binary_op_with_conditional_arg (location_t
,
148 enum tree_code
, tree
,
151 static tree
fold_mathfn_compare (location_t
,
152 enum built_in_function
, enum tree_code
,
154 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
155 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
156 static bool reorder_operands_p (const_tree
, const_tree
);
157 static tree
fold_negate_const (tree
, tree
);
158 static tree
fold_not_const (const_tree
, tree
);
159 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
160 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
166 expr_location_or (tree t
, location_t loc
)
168 location_t tloc
= EXPR_LOCATION (t
);
169 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
176 protected_set_expr_location_unshare (tree x
, location_t loc
)
178 if (CAN_HAVE_LOCATION_P (x
)
179 && EXPR_LOCATION (x
) != loc
180 && !(TREE_CODE (x
) == SAVE_EXPR
181 || TREE_CODE (x
) == TARGET_EXPR
182 || TREE_CODE (x
) == BIND_EXPR
))
185 SET_EXPR_LOCATION (x
, loc
);
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
195 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
199 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
201 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
215 static int fold_deferring_overflow_warnings
;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning
;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings
;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
248 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
253 gcc_assert (fold_deferring_overflow_warnings
> 0);
254 --fold_deferring_overflow_warnings
;
255 if (fold_deferring_overflow_warnings
> 0)
257 if (fold_deferred_overflow_warning
!= NULL
259 && code
< (int) fold_deferred_overflow_code
)
260 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
264 warnmsg
= fold_deferred_overflow_warning
;
265 fold_deferred_overflow_warning
= NULL
;
267 if (!issue
|| warnmsg
== NULL
)
270 if (gimple_no_warning_p (stmt
))
273 /* Use the smallest code level when deciding to issue the
275 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
276 code
= fold_deferred_overflow_code
;
278 if (!issue_strict_overflow_warning (code
))
282 locus
= input_location
;
284 locus
= gimple_location (stmt
);
285 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
288 /* Stop deferring overflow warnings, ignoring any deferred
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL
, 0);
297 /* Whether we are deferring overflow warnings. */
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings
> 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
309 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
311 if (fold_deferring_overflow_warnings
> 0)
313 if (fold_deferred_overflow_warning
== NULL
314 || wc
< fold_deferred_overflow_code
)
316 fold_deferred_overflow_warning
= gmsgid
;
317 fold_deferred_overflow_code
= wc
;
320 else if (issue_strict_overflow_warning (wc
))
321 warning (OPT_Wstrict_overflow
, gmsgid
);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
328 negate_mathfn_p (enum built_in_function code
)
332 CASE_FLT_FN (BUILT_IN_ASIN
):
333 CASE_FLT_FN (BUILT_IN_ASINH
):
334 CASE_FLT_FN (BUILT_IN_ATAN
):
335 CASE_FLT_FN (BUILT_IN_ATANH
):
336 CASE_FLT_FN (BUILT_IN_CASIN
):
337 CASE_FLT_FN (BUILT_IN_CASINH
):
338 CASE_FLT_FN (BUILT_IN_CATAN
):
339 CASE_FLT_FN (BUILT_IN_CATANH
):
340 CASE_FLT_FN (BUILT_IN_CBRT
):
341 CASE_FLT_FN (BUILT_IN_CPROJ
):
342 CASE_FLT_FN (BUILT_IN_CSIN
):
343 CASE_FLT_FN (BUILT_IN_CSINH
):
344 CASE_FLT_FN (BUILT_IN_CTAN
):
345 CASE_FLT_FN (BUILT_IN_CTANH
):
346 CASE_FLT_FN (BUILT_IN_ERF
):
347 CASE_FLT_FN (BUILT_IN_LLROUND
):
348 CASE_FLT_FN (BUILT_IN_LROUND
):
349 CASE_FLT_FN (BUILT_IN_ROUND
):
350 CASE_FLT_FN (BUILT_IN_SIN
):
351 CASE_FLT_FN (BUILT_IN_SINH
):
352 CASE_FLT_FN (BUILT_IN_TAN
):
353 CASE_FLT_FN (BUILT_IN_TANH
):
354 CASE_FLT_FN (BUILT_IN_TRUNC
):
357 CASE_FLT_FN (BUILT_IN_LLRINT
):
358 CASE_FLT_FN (BUILT_IN_LRINT
):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
360 CASE_FLT_FN (BUILT_IN_RINT
):
361 return !flag_rounding_math
;
369 /* Check whether we may negate an integer constant T without causing
373 may_negate_without_overflow_p (const_tree t
)
377 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
379 type
= TREE_TYPE (t
);
380 if (TYPE_UNSIGNED (type
))
383 return !wi::only_sign_bit_p (t
);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
390 negate_expr_p (tree t
)
397 type
= TREE_TYPE (t
);
400 switch (TREE_CODE (t
))
403 if (TYPE_OVERFLOW_WRAPS (type
))
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t
);
409 return (INTEGRAL_TYPE_P (type
)
410 && TYPE_OVERFLOW_WRAPS (type
));
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
422 return negate_expr_p (TREE_REALPART (t
))
423 && negate_expr_p (TREE_IMAGPART (t
));
427 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
430 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
432 for (i
= 0; i
< count
; i
++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
440 return negate_expr_p (TREE_OPERAND (t
, 0))
441 && negate_expr_p (TREE_OPERAND (t
, 1));
444 return negate_expr_p (TREE_OPERAND (t
, 0));
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t
, 1))
452 && reorder_operands_p (TREE_OPERAND (t
, 0),
453 TREE_OPERAND (t
, 1)))
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t
, 0));
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
462 && reorder_operands_p (TREE_OPERAND (t
, 0),
463 TREE_OPERAND (t
, 1));
466 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
473 return negate_expr_p (TREE_OPERAND (t
, 1))
474 || negate_expr_p (TREE_OPERAND (t
, 0));
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
500 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
502 return negate_expr_p (TREE_OPERAND (t
, 1));
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type
) == REAL_TYPE
)
508 tree tem
= strip_float_extensions (t
);
510 return negate_expr_p (tem
);
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t
)))
517 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
524 tree op1
= TREE_OPERAND (t
, 1);
525 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
542 fold_negate_expr (location_t loc
, tree t
)
544 tree type
= TREE_TYPE (t
);
547 switch (TREE_CODE (t
))
549 /* Convert - (~A) to A + 1. */
551 if (INTEGRAL_TYPE_P (type
))
552 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
553 build_one_cst (type
));
557 tem
= fold_negate_const (t
, type
);
558 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
559 || !TYPE_OVERFLOW_TRAPS (type
))
564 tem
= fold_negate_const (t
, type
);
565 /* Two's complement FP formats, such as c4x, may overflow. */
566 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
571 tem
= fold_negate_const (t
, type
);
576 tree rpart
= negate_expr (TREE_REALPART (t
));
577 tree ipart
= negate_expr (TREE_IMAGPART (t
));
579 if ((TREE_CODE (rpart
) == REAL_CST
580 && TREE_CODE (ipart
) == REAL_CST
)
581 || (TREE_CODE (rpart
) == INTEGER_CST
582 && TREE_CODE (ipart
) == INTEGER_CST
))
583 return build_complex (type
, rpart
, ipart
);
589 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
590 tree
*elts
= XALLOCAVEC (tree
, count
);
592 for (i
= 0; i
< count
; i
++)
594 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
595 if (elts
[i
] == NULL_TREE
)
599 return build_vector (type
, elts
);
603 if (negate_expr_p (t
))
604 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
605 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
606 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
610 if (negate_expr_p (t
))
611 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
612 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
616 return TREE_OPERAND (t
, 0);
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t
, 1))
624 && reorder_operands_p (TREE_OPERAND (t
, 0),
625 TREE_OPERAND (t
, 1)))
627 tem
= negate_expr (TREE_OPERAND (t
, 1));
628 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
629 tem
, TREE_OPERAND (t
, 0));
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t
, 0)))
635 tem
= negate_expr (TREE_OPERAND (t
, 0));
636 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
637 tem
, TREE_OPERAND (t
, 1));
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
646 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
647 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
648 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
652 if (TYPE_UNSIGNED (type
))
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
660 tem
= TREE_OPERAND (t
, 1);
661 if (negate_expr_p (tem
))
662 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
663 TREE_OPERAND (t
, 0), negate_expr (tem
));
664 tem
= TREE_OPERAND (t
, 0);
665 if (negate_expr_p (tem
))
666 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
667 negate_expr (tem
), TREE_OPERAND (t
, 1));
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
679 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
681 const char * const warnmsg
= G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem
= TREE_OPERAND (t
, 1);
684 if (negate_expr_p (tem
))
686 if (INTEGRAL_TYPE_P (type
)
687 && (TREE_CODE (tem
) != INTEGER_CST
688 || integer_onep (tem
)))
689 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
690 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
691 TREE_OPERAND (t
, 0), negate_expr (tem
));
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem
= TREE_OPERAND (t
, 0);
700 if ((INTEGRAL_TYPE_P (type
)
701 && (TREE_CODE (tem
) == NEGATE_EXPR
702 || (TREE_CODE (tem
) == INTEGER_CST
703 && may_negate_without_overflow_p (tem
))))
704 || !INTEGRAL_TYPE_P (type
))
705 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
706 negate_expr (tem
), TREE_OPERAND (t
, 1));
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type
) == REAL_TYPE
)
714 tem
= strip_float_extensions (t
);
715 if (tem
!= t
&& negate_expr_p (tem
))
716 return fold_convert_loc (loc
, type
, negate_expr (tem
));
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t
))
723 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
727 fndecl
= get_callee_fndecl (t
);
728 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
729 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
737 tree op1
= TREE_OPERAND (t
, 1);
738 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
740 tree ntype
= TYPE_UNSIGNED (type
)
741 ? signed_type_for (type
)
742 : unsigned_type_for (type
);
743 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
744 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
745 return fold_convert_loc (loc
, type
, temp
);
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
770 loc
= EXPR_LOCATION (t
);
771 type
= TREE_TYPE (t
);
774 tem
= fold_negate_expr (loc
, t
);
776 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
777 return fold_convert_loc (loc
, type
, tem
);
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
795 If IN is itself a literal or constant, return it as appropriate.
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
801 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
802 tree
*minus_litp
, int negate_p
)
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in
);
813 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
814 || TREE_CODE (in
) == FIXED_CST
)
816 else if (TREE_CODE (in
) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
824 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
826 tree op0
= TREE_OPERAND (in
, 0);
827 tree op1
= TREE_OPERAND (in
, 1);
828 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
829 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
833 || TREE_CODE (op0
) == FIXED_CST
)
834 *litp
= op0
, op0
= 0;
835 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
836 || TREE_CODE (op1
) == FIXED_CST
)
837 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
839 if (op0
!= 0 && TREE_CONSTANT (op0
))
840 *conp
= op0
, op0
= 0;
841 else if (op1
!= 0 && TREE_CONSTANT (op1
))
842 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0
!= 0 && op1
!= 0)
851 var
= op1
, neg_var_p
= neg1_p
;
853 /* Now do any needed negations. */
855 *minus_litp
= *litp
, *litp
= 0;
857 *conp
= negate_expr (*conp
);
859 var
= negate_expr (var
);
861 else if (TREE_CODE (in
) == BIT_NOT_EXPR
862 && code
== PLUS_EXPR
)
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp
= build_one_cst (TREE_TYPE (in
));
866 var
= negate_expr (TREE_OPERAND (in
, 0));
868 else if (TREE_CONSTANT (in
))
876 *minus_litp
= *litp
, *litp
= 0;
877 else if (*minus_litp
)
878 *litp
= *minus_litp
, *minus_litp
= 0;
879 *conp
= negate_expr (*conp
);
880 var
= negate_expr (var
);
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
892 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
903 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
905 if (code
== PLUS_EXPR
)
907 if (TREE_CODE (t1
) == NEGATE_EXPR
)
908 return build2_loc (loc
, MINUS_EXPR
, type
,
909 fold_convert_loc (loc
, type
, t2
),
910 fold_convert_loc (loc
, type
,
911 TREE_OPERAND (t1
, 0)));
912 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
913 return build2_loc (loc
, MINUS_EXPR
, type
,
914 fold_convert_loc (loc
, type
, t1
),
915 fold_convert_loc (loc
, type
,
916 TREE_OPERAND (t2
, 0)));
917 else if (integer_zerop (t2
))
918 return fold_convert_loc (loc
, type
, t1
);
920 else if (code
== MINUS_EXPR
)
922 if (integer_zerop (t2
))
923 return fold_convert_loc (loc
, type
, t1
);
926 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
927 fold_convert_loc (loc
, type
, t2
));
930 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
931 fold_convert_loc (loc
, type
, t2
));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
938 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
940 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
942 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
957 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
958 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
959 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
968 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
973 tree type
= TREE_TYPE (arg1
);
974 signop sign
= TYPE_SIGN (type
);
975 bool overflow
= false;
977 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
978 TYPE_SIGN (TREE_TYPE (parg2
)));
983 res
= wi::bit_or (arg1
, arg2
);
987 res
= wi::bit_xor (arg1
, arg2
);
991 res
= wi::bit_and (arg1
, arg2
);
996 if (wi::neg_p (arg2
))
999 if (code
== RSHIFT_EXPR
)
1005 if (code
== RSHIFT_EXPR
)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res
= wi::rshift (arg1
, arg2
, sign
);
1011 res
= wi::lshift (arg1
, arg2
);
1016 if (wi::neg_p (arg2
))
1019 if (code
== RROTATE_EXPR
)
1020 code
= LROTATE_EXPR
;
1022 code
= RROTATE_EXPR
;
1025 if (code
== RROTATE_EXPR
)
1026 res
= wi::rrotate (arg1
, arg2
);
1028 res
= wi::lrotate (arg1
, arg2
);
1032 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1036 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1040 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1043 case MULT_HIGHPART_EXPR
:
1044 res
= wi::mul_high (arg1
, arg2
, sign
);
1047 case TRUNC_DIV_EXPR
:
1048 case EXACT_DIV_EXPR
:
1051 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1054 case FLOOR_DIV_EXPR
:
1057 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1063 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1066 case ROUND_DIV_EXPR
:
1069 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1072 case TRUNC_MOD_EXPR
:
1075 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1078 case FLOOR_MOD_EXPR
:
1081 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1087 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1090 case ROUND_MOD_EXPR
:
1093 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1097 res
= wi::min (arg1
, arg2
, sign
);
1101 res
= wi::max (arg1
, arg2
, sign
);
1108 t
= force_fit_type (type
, res
, overflowable
,
1109 (((sign
== SIGNED
|| overflowable
== -1)
1111 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1117 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1119 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1128 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1130 /* Sanity check for the recursive cases. */
1137 if (TREE_CODE (arg1
) == INTEGER_CST
)
1138 return int_const_binop (code
, arg1
, arg2
);
1140 if (TREE_CODE (arg1
) == REAL_CST
)
1145 REAL_VALUE_TYPE value
;
1146 REAL_VALUE_TYPE result
;
1150 /* The following codes are handled by real_arithmetic. */
1165 d1
= TREE_REAL_CST (arg1
);
1166 d2
= TREE_REAL_CST (arg2
);
1168 type
= TREE_TYPE (arg1
);
1169 mode
= TYPE_MODE (type
);
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode
)
1174 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code
== RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2
, dconst0
)
1181 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1
))
1188 else if (REAL_VALUE_ISNAN (d2
))
1191 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1192 real_convert (&result
, mode
, &value
);
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode
)
1198 && REAL_VALUE_ISINF (result
)
1199 && !REAL_VALUE_ISINF (d1
)
1200 && !REAL_VALUE_ISINF (d2
))
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1209 && (inexact
|| !real_identical (&result
, &value
)))
1212 t
= build_real (type
, result
);
1214 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1218 if (TREE_CODE (arg1
) == FIXED_CST
)
1220 FIXED_VALUE_TYPE f1
;
1221 FIXED_VALUE_TYPE f2
;
1222 FIXED_VALUE_TYPE result
;
1227 /* The following codes are handled by fixed_arithmetic. */
1233 case TRUNC_DIV_EXPR
:
1234 f2
= TREE_FIXED_CST (arg2
);
1241 f2
.data
.high
= w2
.elt (1);
1242 f2
.data
.low
= w2
.elt (0);
1251 f1
= TREE_FIXED_CST (arg1
);
1252 type
= TREE_TYPE (arg1
);
1253 sat_p
= TYPE_SATURATING (type
);
1254 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1255 t
= build_fixed (type
, result
);
1256 /* Propagate overflow flags. */
1257 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1258 TREE_OVERFLOW (t
) = 1;
1262 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1264 tree type
= TREE_TYPE (arg1
);
1265 tree r1
= TREE_REALPART (arg1
);
1266 tree i1
= TREE_IMAGPART (arg1
);
1267 tree r2
= TREE_REALPART (arg2
);
1268 tree i2
= TREE_IMAGPART (arg2
);
1275 real
= const_binop (code
, r1
, r2
);
1276 imag
= const_binop (code
, i1
, i2
);
1280 if (COMPLEX_FLOAT_TYPE_P (type
))
1281 return do_mpc_arg2 (arg1
, arg2
, type
,
1282 /* do_nonfinite= */ folding_initializer
,
1285 real
= const_binop (MINUS_EXPR
,
1286 const_binop (MULT_EXPR
, r1
, r2
),
1287 const_binop (MULT_EXPR
, i1
, i2
));
1288 imag
= const_binop (PLUS_EXPR
,
1289 const_binop (MULT_EXPR
, r1
, i2
),
1290 const_binop (MULT_EXPR
, i1
, r2
));
1294 if (COMPLEX_FLOAT_TYPE_P (type
))
1295 return do_mpc_arg2 (arg1
, arg2
, type
,
1296 /* do_nonfinite= */ folding_initializer
,
1299 case TRUNC_DIV_EXPR
:
1301 case FLOOR_DIV_EXPR
:
1302 case ROUND_DIV_EXPR
:
1303 if (flag_complex_method
== 0)
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1313 = const_binop (PLUS_EXPR
,
1314 const_binop (MULT_EXPR
, r2
, r2
),
1315 const_binop (MULT_EXPR
, i2
, i2
));
1317 = const_binop (PLUS_EXPR
,
1318 const_binop (MULT_EXPR
, r1
, r2
),
1319 const_binop (MULT_EXPR
, i1
, i2
));
1321 = const_binop (MINUS_EXPR
,
1322 const_binop (MULT_EXPR
, i1
, r2
),
1323 const_binop (MULT_EXPR
, r1
, i2
));
1325 real
= const_binop (code
, t1
, magsquared
);
1326 imag
= const_binop (code
, t2
, magsquared
);
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1336 fold_abs_const (r2
, TREE_TYPE (type
)),
1337 fold_abs_const (i2
, TREE_TYPE (type
)));
1339 if (integer_nonzerop (compare
))
1341 /* In the TRUE branch, we compute
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1348 tree ratio
= const_binop (code
, r2
, i2
);
1349 tree div
= const_binop (PLUS_EXPR
, i2
,
1350 const_binop (MULT_EXPR
, r2
, ratio
));
1351 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1352 real
= const_binop (PLUS_EXPR
, real
, i1
);
1353 real
= const_binop (code
, real
, div
);
1355 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1356 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1357 imag
= const_binop (code
, imag
, div
);
1361 /* In the FALSE branch, we compute
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1368 tree ratio
= const_binop (code
, i2
, r2
);
1369 tree div
= const_binop (PLUS_EXPR
, r2
,
1370 const_binop (MULT_EXPR
, i2
, ratio
));
1372 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1373 real
= const_binop (PLUS_EXPR
, real
, r1
);
1374 real
= const_binop (code
, real
, div
);
1376 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1377 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1378 imag
= const_binop (code
, imag
, div
);
1388 return build_complex (type
, real
, imag
);
1391 if (TREE_CODE (arg1
) == VECTOR_CST
1392 && TREE_CODE (arg2
) == VECTOR_CST
)
1394 tree type
= TREE_TYPE (arg1
);
1395 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1396 tree
*elts
= XALLOCAVEC (tree
, count
);
1398 for (i
= 0; i
< count
; i
++)
1400 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1401 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1403 elts
[i
] = const_binop (code
, elem1
, elem2
);
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts
[i
] == NULL_TREE
)
1411 return build_vector (type
, elts
);
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1
) == VECTOR_CST
1416 && TREE_CODE (arg2
) == INTEGER_CST
)
1418 tree type
= TREE_TYPE (arg1
);
1419 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1420 tree
*elts
= XALLOCAVEC (tree
, count
);
1422 if (code
== VEC_RSHIFT_EXPR
)
1424 if (!tree_fits_uhwi_p (arg2
))
1427 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1428 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1429 unsigned HOST_WIDE_INT innerc
1430 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1431 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1433 int offset
= shiftc
/ innerc
;
1434 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1435 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1436 vector element, but last element if BYTES_BIG_ENDIAN. */
1437 if (BYTES_BIG_ENDIAN
)
1439 tree zero
= build_zero_cst (TREE_TYPE (type
));
1440 for (i
= 0; i
< count
; i
++)
1442 if (i
+ offset
< 0 || i
+ offset
>= count
)
1445 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1449 for (i
= 0; i
< count
; i
++)
1451 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1453 elts
[i
] = const_binop (code
, elem1
, arg2
);
1455 /* It is possible that const_binop cannot handle the given
1456 code and return NULL_TREE */
1457 if (elts
[i
] == NULL_TREE
)
1461 return build_vector (type
, elts
);
1466 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1467 indicates which particular sizetype to create. */
1470 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1472 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1475 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1476 is a tree code. The type of the result is taken from the operands.
1477 Both must be equivalent integer types, ala int_binop_types_match_p.
1478 If the operands are constant, so is the result. */
1481 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1483 tree type
= TREE_TYPE (arg0
);
1485 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1486 return error_mark_node
;
1488 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1494 /* And some specific cases even faster than that. */
1495 if (code
== PLUS_EXPR
)
1497 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1499 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1502 else if (code
== MINUS_EXPR
)
1504 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1507 else if (code
== MULT_EXPR
)
1509 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1513 /* Handle general case of two integer constants. For sizetype
1514 constant calculations we always want to know about overflow,
1515 even in the unsigned case. */
1516 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1519 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1522 /* Given two values, either both of sizetype or both of bitsizetype,
1523 compute the difference between the two values. Return the value
1524 in signed type corresponding to the type of the operands. */
1527 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1529 tree type
= TREE_TYPE (arg0
);
1532 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1535 /* If the type is already signed, just do the simple thing. */
1536 if (!TYPE_UNSIGNED (type
))
1537 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1539 if (type
== sizetype
)
1541 else if (type
== bitsizetype
)
1542 ctype
= sbitsizetype
;
1544 ctype
= signed_type_for (type
);
1546 /* If either operand is not a constant, do the conversions to the signed
1547 type and subtract. The hardware will do the right thing with any
1548 overflow in the subtraction. */
1549 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1550 return size_binop_loc (loc
, MINUS_EXPR
,
1551 fold_convert_loc (loc
, ctype
, arg0
),
1552 fold_convert_loc (loc
, ctype
, arg1
));
1554 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1555 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1556 overflow) and negate (which can't either). Special-case a result
1557 of zero while we're here. */
1558 if (tree_int_cst_equal (arg0
, arg1
))
1559 return build_int_cst (ctype
, 0);
1560 else if (tree_int_cst_lt (arg1
, arg0
))
1561 return fold_convert_loc (loc
, ctype
,
1562 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1564 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1565 fold_convert_loc (loc
, ctype
,
1566 size_binop_loc (loc
,
1571 /* A subroutine of fold_convert_const handling conversions of an
1572 INTEGER_CST to another integer type. */
1575 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1577 /* Given an integer constant, make new constant with new type,
1578 appropriately sign-extended or truncated. Use widest_int
1579 so that any extension is done according ARG1's type. */
1580 return force_fit_type (type
, wi::to_widest (arg1
),
1581 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1582 TREE_OVERFLOW (arg1
));
1585 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1586 to an integer type. */
1589 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1591 bool overflow
= false;
1594 /* The following code implements the floating point to integer
1595 conversion rules required by the Java Language Specification,
1596 that IEEE NaNs are mapped to zero and values that overflow
1597 the target precision saturate, i.e. values greater than
1598 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1599 are mapped to INT_MIN. These semantics are allowed by the
1600 C and C++ standards that simply state that the behavior of
1601 FP-to-integer conversion is unspecified upon overflow. */
1605 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1609 case FIX_TRUNC_EXPR
:
1610 real_trunc (&r
, VOIDmode
, &x
);
1617 /* If R is NaN, return zero and show we have an overflow. */
1618 if (REAL_VALUE_ISNAN (r
))
1621 val
= wi::zero (TYPE_PRECISION (type
));
1624 /* See if R is less than the lower bound or greater than the
1629 tree lt
= TYPE_MIN_VALUE (type
);
1630 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1631 if (REAL_VALUES_LESS (r
, l
))
1640 tree ut
= TYPE_MAX_VALUE (type
);
1643 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1644 if (REAL_VALUES_LESS (u
, r
))
1653 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1655 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1659 /* A subroutine of fold_convert_const handling conversions of a
1660 FIXED_CST to an integer type. */
1663 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1666 double_int temp
, temp_trunc
;
1669 /* Right shift FIXED_CST to temp by fbit. */
1670 temp
= TREE_FIXED_CST (arg1
).data
;
1671 mode
= TREE_FIXED_CST (arg1
).mode
;
1672 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1674 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1675 HOST_BITS_PER_DOUBLE_INT
,
1676 SIGNED_FIXED_POINT_MODE_P (mode
));
1678 /* Left shift temp to temp_trunc by fbit. */
1679 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1680 HOST_BITS_PER_DOUBLE_INT
,
1681 SIGNED_FIXED_POINT_MODE_P (mode
));
1685 temp
= double_int_zero
;
1686 temp_trunc
= double_int_zero
;
1689 /* If FIXED_CST is negative, we need to round the value toward 0.
1690 By checking if the fractional bits are not zero to add 1 to temp. */
1691 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1692 && temp_trunc
.is_negative ()
1693 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1694 temp
+= double_int_one
;
1696 /* Given a fixed-point constant, make new constant with new type,
1697 appropriately sign-extended or truncated. */
1698 t
= force_fit_type (type
, temp
, -1,
1699 (temp
.is_negative ()
1700 && (TYPE_UNSIGNED (type
)
1701 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1702 | TREE_OVERFLOW (arg1
));
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to another floating point type. */
1711 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1713 REAL_VALUE_TYPE value
;
1716 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1717 t
= build_real (type
, value
);
1719 /* If converting an infinity or NAN to a representation that doesn't
1720 have one, set the overflow bit so that we can produce some kind of
1721 error message at the appropriate point if necessary. It's not the
1722 most user-friendly message, but it's better than nothing. */
1723 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1724 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1725 TREE_OVERFLOW (t
) = 1;
1726 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1727 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1728 TREE_OVERFLOW (t
) = 1;
1729 /* Regular overflow, conversion produced an infinity in a mode that
1730 can't represent them. */
1731 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1732 && REAL_VALUE_ISINF (value
)
1733 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1734 TREE_OVERFLOW (t
) = 1;
1736 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1740 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1741 to a floating point type. */
1744 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1746 REAL_VALUE_TYPE value
;
1749 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1750 t
= build_real (type
, value
);
1752 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to another fixed-point type. */
1760 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1762 FIXED_VALUE_TYPE value
;
1766 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1767 TYPE_SATURATING (type
));
1768 t
= build_fixed (type
, value
);
1770 /* Propagate overflow flags. */
1771 if (overflow_p
| TREE_OVERFLOW (arg1
))
1772 TREE_OVERFLOW (t
) = 1;
1776 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1777 to a fixed-point type. */
1780 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1782 FIXED_VALUE_TYPE value
;
1787 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1789 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1790 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1791 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1793 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1795 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1796 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1797 TYPE_SATURATING (type
));
1798 t
= build_fixed (type
, value
);
1800 /* Propagate overflow flags. */
1801 if (overflow_p
| TREE_OVERFLOW (arg1
))
1802 TREE_OVERFLOW (t
) = 1;
1806 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1807 to a fixed-point type. */
1810 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1812 FIXED_VALUE_TYPE value
;
1816 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1817 &TREE_REAL_CST (arg1
),
1818 TYPE_SATURATING (type
));
1819 t
= build_fixed (type
, value
);
1821 /* Propagate overflow flags. */
1822 if (overflow_p
| TREE_OVERFLOW (arg1
))
1823 TREE_OVERFLOW (t
) = 1;
1827 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1828 type TYPE. If no simplification can be done return NULL_TREE. */
1831 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1833 if (TREE_TYPE (arg1
) == type
)
1836 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1837 || TREE_CODE (type
) == OFFSET_TYPE
)
1839 if (TREE_CODE (arg1
) == INTEGER_CST
)
1840 return fold_convert_const_int_from_int (type
, arg1
);
1841 else if (TREE_CODE (arg1
) == REAL_CST
)
1842 return fold_convert_const_int_from_real (code
, type
, arg1
);
1843 else if (TREE_CODE (arg1
) == FIXED_CST
)
1844 return fold_convert_const_int_from_fixed (type
, arg1
);
1846 else if (TREE_CODE (type
) == REAL_TYPE
)
1848 if (TREE_CODE (arg1
) == INTEGER_CST
)
1849 return build_real_from_int_cst (type
, arg1
);
1850 else if (TREE_CODE (arg1
) == REAL_CST
)
1851 return fold_convert_const_real_from_real (type
, arg1
);
1852 else if (TREE_CODE (arg1
) == FIXED_CST
)
1853 return fold_convert_const_real_from_fixed (type
, arg1
);
1855 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1857 if (TREE_CODE (arg1
) == FIXED_CST
)
1858 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1859 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1860 return fold_convert_const_fixed_from_int (type
, arg1
);
1861 else if (TREE_CODE (arg1
) == REAL_CST
)
1862 return fold_convert_const_fixed_from_real (type
, arg1
);
1867 /* Construct a vector of zero elements of vector type TYPE. */
1870 build_zero_vector (tree type
)
1874 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1875 return build_vector_from_val (type
, t
);
1878 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1881 fold_convertible_p (const_tree type
, const_tree arg
)
1883 tree orig
= TREE_TYPE (arg
);
1888 if (TREE_CODE (arg
) == ERROR_MARK
1889 || TREE_CODE (type
) == ERROR_MARK
1890 || TREE_CODE (orig
) == ERROR_MARK
)
1893 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1896 switch (TREE_CODE (type
))
1898 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1899 case POINTER_TYPE
: case REFERENCE_TYPE
:
1901 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1902 || TREE_CODE (orig
) == OFFSET_TYPE
)
1904 return (TREE_CODE (orig
) == VECTOR_TYPE
1905 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1908 case FIXED_POINT_TYPE
:
1912 return TREE_CODE (type
) == TREE_CODE (orig
);
1919 /* Convert expression ARG to type TYPE. Used by the middle-end for
1920 simple conversions in preference to calling the front-end's convert. */
1923 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1925 tree orig
= TREE_TYPE (arg
);
1931 if (TREE_CODE (arg
) == ERROR_MARK
1932 || TREE_CODE (type
) == ERROR_MARK
1933 || TREE_CODE (orig
) == ERROR_MARK
)
1934 return error_mark_node
;
1936 switch (TREE_CODE (type
))
1939 case REFERENCE_TYPE
:
1940 /* Handle conversions between pointers to different address spaces. */
1941 if (POINTER_TYPE_P (orig
)
1942 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1943 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1944 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1947 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1949 if (TREE_CODE (arg
) == INTEGER_CST
)
1951 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1952 if (tem
!= NULL_TREE
)
1955 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1956 || TREE_CODE (orig
) == OFFSET_TYPE
)
1957 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1958 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1959 return fold_convert_loc (loc
, type
,
1960 fold_build1_loc (loc
, REALPART_EXPR
,
1961 TREE_TYPE (orig
), arg
));
1962 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1964 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1967 if (TREE_CODE (arg
) == INTEGER_CST
)
1969 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1970 if (tem
!= NULL_TREE
)
1973 else if (TREE_CODE (arg
) == REAL_CST
)
1975 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1976 if (tem
!= NULL_TREE
)
1979 else if (TREE_CODE (arg
) == FIXED_CST
)
1981 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1982 if (tem
!= NULL_TREE
)
1986 switch (TREE_CODE (orig
))
1989 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1990 case POINTER_TYPE
: case REFERENCE_TYPE
:
1991 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1994 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1996 case FIXED_POINT_TYPE
:
1997 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2000 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2001 return fold_convert_loc (loc
, type
, tem
);
2007 case FIXED_POINT_TYPE
:
2008 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2009 || TREE_CODE (arg
) == REAL_CST
)
2011 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2012 if (tem
!= NULL_TREE
)
2013 goto fold_convert_exit
;
2016 switch (TREE_CODE (orig
))
2018 case FIXED_POINT_TYPE
:
2023 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2026 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2027 return fold_convert_loc (loc
, type
, tem
);
2034 switch (TREE_CODE (orig
))
2037 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2038 case POINTER_TYPE
: case REFERENCE_TYPE
:
2040 case FIXED_POINT_TYPE
:
2041 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2042 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2043 fold_convert_loc (loc
, TREE_TYPE (type
),
2044 integer_zero_node
));
2049 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2051 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2052 TREE_OPERAND (arg
, 0));
2053 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2054 TREE_OPERAND (arg
, 1));
2055 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2058 arg
= save_expr (arg
);
2059 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2060 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2061 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2062 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2063 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2071 if (integer_zerop (arg
))
2072 return build_zero_vector (type
);
2073 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2074 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2075 || TREE_CODE (orig
) == VECTOR_TYPE
);
2076 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2079 tem
= fold_ignored_result (arg
);
2080 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2083 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2084 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2088 protected_set_expr_location_unshare (tem
, loc
);
2092 /* Return false if expr can be assumed not to be an lvalue, true
2096 maybe_lvalue_p (const_tree x
)
2098 /* We only need to wrap lvalue tree codes. */
2099 switch (TREE_CODE (x
))
2112 case ARRAY_RANGE_REF
:
2118 case PREINCREMENT_EXPR
:
2119 case PREDECREMENT_EXPR
:
2121 case TRY_CATCH_EXPR
:
2122 case WITH_CLEANUP_EXPR
:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2143 non_lvalue_loc (location_t loc
, tree x
)
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2150 if (! maybe_lvalue_p (x
))
2152 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2155 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2156 Zero means allow extended lvalues. */
2158 int pedantic_lvalues
;
2160 /* When pedantic, return an expr equal to X but certainly not valid as a
2161 pedantic lvalue. Otherwise, return X. */
2164 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2166 if (pedantic_lvalues
)
2167 return non_lvalue_loc (loc
, x
);
2169 return protected_set_expr_location_unshare (x
, loc
);
2172 /* Given a tree comparison code, return the code that is the logical inverse.
2173 It is generally not safe to do this for floating-point comparisons, except
2174 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2175 ERROR_MARK in this case. */
2178 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2180 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2181 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2191 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2193 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2195 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2197 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2211 return UNORDERED_EXPR
;
2212 case UNORDERED_EXPR
:
2213 return ORDERED_EXPR
;
2219 /* Similar, but return the comparison that results if the operands are
2220 swapped. This is safe for floating-point. */
2223 swap_tree_comparison (enum tree_code code
)
2230 case UNORDERED_EXPR
:
2256 /* Convert a comparison tree code from an enum tree_code representation
2257 into a compcode bit-based encoding. This function is the inverse of
2258 compcode_to_comparison. */
2260 static enum comparison_code
2261 comparison_to_compcode (enum tree_code code
)
2278 return COMPCODE_ORD
;
2279 case UNORDERED_EXPR
:
2280 return COMPCODE_UNORD
;
2282 return COMPCODE_UNLT
;
2284 return COMPCODE_UNEQ
;
2286 return COMPCODE_UNLE
;
2288 return COMPCODE_UNGT
;
2290 return COMPCODE_LTGT
;
2292 return COMPCODE_UNGE
;
2298 /* Convert a compcode bit-based encoding of a comparison operator back
2299 to GCC's enum tree_code representation. This function is the
2300 inverse of comparison_to_compcode. */
2302 static enum tree_code
2303 compcode_to_comparison (enum comparison_code code
)
2320 return ORDERED_EXPR
;
2321 case COMPCODE_UNORD
:
2322 return UNORDERED_EXPR
;
2340 /* Return a tree for the comparison which is the combination of
2341 doing the AND or OR (depending on CODE) of the two operations LCODE
2342 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2343 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2344 if this makes the transformation invalid. */
2347 combine_comparisons (location_t loc
,
2348 enum tree_code code
, enum tree_code lcode
,
2349 enum tree_code rcode
, tree truth_type
,
2350 tree ll_arg
, tree lr_arg
)
2352 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2353 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2354 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2359 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2360 compcode
= lcompcode
& rcompcode
;
2363 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2364 compcode
= lcompcode
| rcompcode
;
2373 /* Eliminate unordered comparisons, as well as LTGT and ORD
2374 which are not used unless the mode has NaNs. */
2375 compcode
&= ~COMPCODE_UNORD
;
2376 if (compcode
== COMPCODE_LTGT
)
2377 compcode
= COMPCODE_NE
;
2378 else if (compcode
== COMPCODE_ORD
)
2379 compcode
= COMPCODE_TRUE
;
2381 else if (flag_trapping_math
)
2383 /* Check that the original operation and the optimized ones will trap
2384 under the same condition. */
2385 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2386 && (lcompcode
!= COMPCODE_EQ
)
2387 && (lcompcode
!= COMPCODE_ORD
);
2388 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2389 && (rcompcode
!= COMPCODE_EQ
)
2390 && (rcompcode
!= COMPCODE_ORD
);
2391 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2392 && (compcode
!= COMPCODE_EQ
)
2393 && (compcode
!= COMPCODE_ORD
);
2395 /* In a short-circuited boolean expression the LHS might be
2396 such that the RHS, if evaluated, will never trap. For
2397 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2398 if neither x nor y is NaN. (This is a mixed blessing: for
2399 example, the expression above will never trap, hence
2400 optimizing it to x < y would be invalid). */
2401 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2402 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2405 /* If the comparison was short-circuited, and only the RHS
2406 trapped, we may now generate a spurious trap. */
2408 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2411 /* If we changed the conditions that cause a trap, we lose. */
2412 if ((ltrap
|| rtrap
) != trap
)
2416 if (compcode
== COMPCODE_TRUE
)
2417 return constant_boolean_node (true, truth_type
);
2418 else if (compcode
== COMPCODE_FALSE
)
2419 return constant_boolean_node (false, truth_type
);
2422 enum tree_code tcode
;
2424 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2425 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2429 /* Return nonzero if two operands (typically of the same tree node)
2430 are necessarily equal. If either argument has side-effects this
2431 function returns zero. FLAGS modifies behavior as follows:
2433 If OEP_ONLY_CONST is set, only return nonzero for constants.
2434 This function tests whether the operands are indistinguishable;
2435 it does not test whether they are equal using C's == operation.
2436 The distinction is important for IEEE floating point, because
2437 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2438 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2440 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2441 even though it may hold multiple values during a function.
2442 This is because a GCC tree node guarantees that nothing else is
2443 executed between the evaluation of its "operands" (which may often
2444 be evaluated in arbitrary order). Hence if the operands themselves
2445 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2446 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2447 unset means assuming isochronic (or instantaneous) tree equivalence.
2448 Unless comparing arbitrary expression trees, such as from different
2449 statements, this flag can usually be left unset.
2451 If OEP_PURE_SAME is set, then pure functions with identical arguments
2452 are considered the same. It is used when the caller has other ways
2453 to ensure that global memory is unchanged in between. */
2456 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2458 /* If either is ERROR_MARK, they aren't equal. */
2459 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2460 || TREE_TYPE (arg0
) == error_mark_node
2461 || TREE_TYPE (arg1
) == error_mark_node
)
2464 /* Similar, if either does not have a type (like a released SSA name),
2465 they aren't equal. */
2466 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2469 /* Check equality of integer constants before bailing out due to
2470 precision differences. */
2471 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2472 return tree_int_cst_equal (arg0
, arg1
);
2474 /* If both types don't have the same signedness, then we can't consider
2475 them equal. We must check this before the STRIP_NOPS calls
2476 because they may change the signedness of the arguments. As pointers
2477 strictly don't have a signedness, require either two pointers or
2478 two non-pointers as well. */
2479 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2480 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2483 /* We cannot consider pointers to different address space equal. */
2484 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2485 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2486 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2489 /* If both types don't have the same precision, then it is not safe
2491 if (element_precision (TREE_TYPE (arg0
))
2492 != element_precision (TREE_TYPE (arg1
)))
2498 /* In case both args are comparisons but with different comparison
2499 code, try to swap the comparison operands of one arg to produce
2500 a match and compare that variant. */
2501 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2502 && COMPARISON_CLASS_P (arg0
)
2503 && COMPARISON_CLASS_P (arg1
))
2505 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2507 if (TREE_CODE (arg0
) == swap_code
)
2508 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2509 TREE_OPERAND (arg1
, 1), flags
)
2510 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2511 TREE_OPERAND (arg1
, 0), flags
);
2514 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2515 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2516 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2519 /* This is needed for conversions and for COMPONENT_REF.
2520 Might as well play it safe and always test this. */
2521 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2522 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2523 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2526 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2527 We don't care about side effects in that case because the SAVE_EXPR
2528 takes care of that for us. In all other cases, two expressions are
2529 equal if they have no side effects. If we have two identical
2530 expressions with side effects that should be treated the same due
2531 to the only side effects being identical SAVE_EXPR's, that will
2532 be detected in the recursive calls below.
2533 If we are taking an invariant address of two identical objects
2534 they are necessarily equal as well. */
2535 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2536 && (TREE_CODE (arg0
) == SAVE_EXPR
2537 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2538 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2541 /* Next handle constant cases, those for which we can return 1 even
2542 if ONLY_CONST is set. */
2543 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2544 switch (TREE_CODE (arg0
))
2547 return tree_int_cst_equal (arg0
, arg1
);
2550 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2551 TREE_FIXED_CST (arg1
));
2554 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2555 TREE_REAL_CST (arg1
)))
2559 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2561 /* If we do not distinguish between signed and unsigned zero,
2562 consider them equal. */
2563 if (real_zerop (arg0
) && real_zerop (arg1
))
2572 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2575 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2577 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2578 VECTOR_CST_ELT (arg1
, i
), flags
))
2585 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2587 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2591 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2592 && ! memcmp (TREE_STRING_POINTER (arg0
),
2593 TREE_STRING_POINTER (arg1
),
2594 TREE_STRING_LENGTH (arg0
)));
2597 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2598 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2599 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2604 if (flags
& OEP_ONLY_CONST
)
2607 /* Define macros to test an operand from arg0 and arg1 for equality and a
2608 variant that allows null and views null as being different from any
2609 non-null value. In the latter case, if either is null, the both
2610 must be; otherwise, do the normal comparison. */
2611 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2612 TREE_OPERAND (arg1, N), flags)
2614 #define OP_SAME_WITH_NULL(N) \
2615 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2616 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2618 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2621 /* Two conversions are equal only if signedness and modes match. */
2622 switch (TREE_CODE (arg0
))
2625 case FIX_TRUNC_EXPR
:
2626 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2627 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2637 case tcc_comparison
:
2639 if (OP_SAME (0) && OP_SAME (1))
2642 /* For commutative ops, allow the other order. */
2643 return (commutative_tree_code (TREE_CODE (arg0
))
2644 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2645 TREE_OPERAND (arg1
, 1), flags
)
2646 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2647 TREE_OPERAND (arg1
, 0), flags
));
2650 /* If either of the pointer (or reference) expressions we are
2651 dereferencing contain a side effect, these cannot be equal,
2652 but their addresses can be. */
2653 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2654 && (TREE_SIDE_EFFECTS (arg0
)
2655 || TREE_SIDE_EFFECTS (arg1
)))
2658 switch (TREE_CODE (arg0
))
2661 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2668 case TARGET_MEM_REF
:
2669 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2670 /* Require equal extra operands and then fall through to MEM_REF
2671 handling of the two common operands. */
2672 if (!OP_SAME_WITH_NULL (2)
2673 || !OP_SAME_WITH_NULL (3)
2674 || !OP_SAME_WITH_NULL (4))
2678 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2679 /* Require equal access sizes, and similar pointer types.
2680 We can have incomplete types for array references of
2681 variable-sized arrays from the Fortran frontend
2682 though. Also verify the types are compatible. */
2683 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2684 || (TYPE_SIZE (TREE_TYPE (arg0
))
2685 && TYPE_SIZE (TREE_TYPE (arg1
))
2686 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2687 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2688 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2689 && alias_ptr_types_compatible_p
2690 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2691 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2692 && OP_SAME (0) && OP_SAME (1));
2695 case ARRAY_RANGE_REF
:
2696 /* Operands 2 and 3 may be null.
2697 Compare the array index by value if it is constant first as we
2698 may have different types but same value here. */
2701 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2702 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2703 TREE_OPERAND (arg1
, 1))
2705 && OP_SAME_WITH_NULL (2)
2706 && OP_SAME_WITH_NULL (3));
2709 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2710 may be NULL when we're called to compare MEM_EXPRs. */
2711 if (!OP_SAME_WITH_NULL (0)
2714 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2715 return OP_SAME_WITH_NULL (2);
2720 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2721 return OP_SAME (1) && OP_SAME (2);
2727 case tcc_expression
:
2728 switch (TREE_CODE (arg0
))
2731 case TRUTH_NOT_EXPR
:
2734 case TRUTH_ANDIF_EXPR
:
2735 case TRUTH_ORIF_EXPR
:
2736 return OP_SAME (0) && OP_SAME (1);
2739 case WIDEN_MULT_PLUS_EXPR
:
2740 case WIDEN_MULT_MINUS_EXPR
:
2743 /* The multiplcation operands are commutative. */
2746 case TRUTH_AND_EXPR
:
2748 case TRUTH_XOR_EXPR
:
2749 if (OP_SAME (0) && OP_SAME (1))
2752 /* Otherwise take into account this is a commutative operation. */
2753 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2754 TREE_OPERAND (arg1
, 1), flags
)
2755 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2756 TREE_OPERAND (arg1
, 0), flags
));
2761 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2768 switch (TREE_CODE (arg0
))
2771 /* If the CALL_EXPRs call different functions, then they
2772 clearly can not be equal. */
2773 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2778 unsigned int cef
= call_expr_flags (arg0
);
2779 if (flags
& OEP_PURE_SAME
)
2780 cef
&= ECF_CONST
| ECF_PURE
;
2787 /* Now see if all the arguments are the same. */
2789 const_call_expr_arg_iterator iter0
, iter1
;
2791 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2792 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2794 a0
= next_const_call_expr_arg (&iter0
),
2795 a1
= next_const_call_expr_arg (&iter1
))
2796 if (! operand_equal_p (a0
, a1
, flags
))
2799 /* If we get here and both argument lists are exhausted
2800 then the CALL_EXPRs are equal. */
2801 return ! (a0
|| a1
);
2807 case tcc_declaration
:
2808 /* Consider __builtin_sqrt equal to sqrt. */
2809 return (TREE_CODE (arg0
) == FUNCTION_DECL
2810 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2811 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2812 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2819 #undef OP_SAME_WITH_NULL
2822 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2823 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2825 When in doubt, return 0. */
2828 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2830 int unsignedp1
, unsignedpo
;
2831 tree primarg0
, primarg1
, primother
;
2832 unsigned int correct_width
;
2834 if (operand_equal_p (arg0
, arg1
, 0))
2837 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2838 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2841 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2842 and see if the inner values are the same. This removes any
2843 signedness comparison, which doesn't matter here. */
2844 primarg0
= arg0
, primarg1
= arg1
;
2845 STRIP_NOPS (primarg0
);
2846 STRIP_NOPS (primarg1
);
2847 if (operand_equal_p (primarg0
, primarg1
, 0))
2850 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2851 actual comparison operand, ARG0.
2853 First throw away any conversions to wider types
2854 already present in the operands. */
2856 primarg1
= get_narrower (arg1
, &unsignedp1
);
2857 primother
= get_narrower (other
, &unsignedpo
);
2859 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2860 if (unsignedp1
== unsignedpo
2861 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2862 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2864 tree type
= TREE_TYPE (arg0
);
2866 /* Make sure shorter operand is extended the right way
2867 to match the longer operand. */
2868 primarg1
= fold_convert (signed_or_unsigned_type_for
2869 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2871 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2878 /* See if ARG is an expression that is either a comparison or is performing
2879 arithmetic on comparisons. The comparisons must only be comparing
2880 two different values, which will be stored in *CVAL1 and *CVAL2; if
2881 they are nonzero it means that some operands have already been found.
2882 No variables may be used anywhere else in the expression except in the
2883 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2884 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2886 If this is true, return 1. Otherwise, return zero. */
2889 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2891 enum tree_code code
= TREE_CODE (arg
);
2892 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2894 /* We can handle some of the tcc_expression cases here. */
2895 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2897 else if (tclass
== tcc_expression
2898 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2899 || code
== COMPOUND_EXPR
))
2900 tclass
= tcc_binary
;
2902 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2903 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2905 /* If we've already found a CVAL1 or CVAL2, this expression is
2906 two complex to handle. */
2907 if (*cval1
|| *cval2
)
2917 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2920 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2921 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2922 cval1
, cval2
, save_p
));
2927 case tcc_expression
:
2928 if (code
== COND_EXPR
)
2929 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2930 cval1
, cval2
, save_p
)
2931 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2932 cval1
, cval2
, save_p
)
2933 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2934 cval1
, cval2
, save_p
));
2937 case tcc_comparison
:
2938 /* First see if we can handle the first operand, then the second. For
2939 the second operand, we know *CVAL1 can't be zero. It must be that
2940 one side of the comparison is each of the values; test for the
2941 case where this isn't true by failing if the two operands
2944 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2945 TREE_OPERAND (arg
, 1), 0))
2949 *cval1
= TREE_OPERAND (arg
, 0);
2950 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2952 else if (*cval2
== 0)
2953 *cval2
= TREE_OPERAND (arg
, 0);
2954 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2959 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2961 else if (*cval2
== 0)
2962 *cval2
= TREE_OPERAND (arg
, 1);
2963 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2975 /* ARG is a tree that is known to contain just arithmetic operations and
2976 comparisons. Evaluate the operations in the tree substituting NEW0 for
2977 any occurrence of OLD0 as an operand of a comparison and likewise for
2981 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2982 tree old1
, tree new1
)
2984 tree type
= TREE_TYPE (arg
);
2985 enum tree_code code
= TREE_CODE (arg
);
2986 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2988 /* We can handle some of the tcc_expression cases here. */
2989 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2991 else if (tclass
== tcc_expression
2992 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2993 tclass
= tcc_binary
;
2998 return fold_build1_loc (loc
, code
, type
,
2999 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3000 old0
, new0
, old1
, new1
));
3003 return fold_build2_loc (loc
, code
, type
,
3004 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3005 old0
, new0
, old1
, new1
),
3006 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3007 old0
, new0
, old1
, new1
));
3009 case tcc_expression
:
3013 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3017 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3021 return fold_build3_loc (loc
, code
, type
,
3022 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3023 old0
, new0
, old1
, new1
),
3024 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3025 old0
, new0
, old1
, new1
),
3026 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3027 old0
, new0
, old1
, new1
));
3031 /* Fall through - ??? */
3033 case tcc_comparison
:
3035 tree arg0
= TREE_OPERAND (arg
, 0);
3036 tree arg1
= TREE_OPERAND (arg
, 1);
3038 /* We need to check both for exact equality and tree equality. The
3039 former will be true if the operand has a side-effect. In that
3040 case, we know the operand occurred exactly once. */
3042 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3044 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3047 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3049 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3052 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3060 /* Return a tree for the case when the result of an expression is RESULT
3061 converted to TYPE and OMITTED was previously an operand of the expression
3062 but is now not needed (e.g., we folded OMITTED * 0).
3064 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3065 the conversion of RESULT to TYPE. */
3068 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3070 tree t
= fold_convert_loc (loc
, type
, result
);
3072 /* If the resulting operand is an empty statement, just return the omitted
3073 statement casted to void. */
3074 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3075 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3076 fold_ignored_result (omitted
));
3078 if (TREE_SIDE_EFFECTS (omitted
))
3079 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3080 fold_ignored_result (omitted
), t
);
3082 return non_lvalue_loc (loc
, t
);
3085 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3088 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3091 tree t
= fold_convert_loc (loc
, type
, result
);
3093 /* If the resulting operand is an empty statement, just return the omitted
3094 statement casted to void. */
3095 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3096 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3097 fold_ignored_result (omitted
));
3099 if (TREE_SIDE_EFFECTS (omitted
))
3100 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3101 fold_ignored_result (omitted
), t
);
3103 return pedantic_non_lvalue_loc (loc
, t
);
3106 /* Return a tree for the case when the result of an expression is RESULT
3107 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3108 of the expression but are now not needed.
3110 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3111 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3112 evaluated before OMITTED2. Otherwise, if neither has side effects,
3113 just do the conversion of RESULT to TYPE. */
3116 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3117 tree omitted1
, tree omitted2
)
3119 tree t
= fold_convert_loc (loc
, type
, result
);
3121 if (TREE_SIDE_EFFECTS (omitted2
))
3122 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3123 if (TREE_SIDE_EFFECTS (omitted1
))
3124 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3126 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3130 /* Return a simplified tree node for the truth-negation of ARG. This
3131 never alters ARG itself. We assume that ARG is an operation that
3132 returns a truth value (0 or 1).
3134 FIXME: one would think we would fold the result, but it causes
3135 problems with the dominator optimizer. */
3138 fold_truth_not_expr (location_t loc
, tree arg
)
3140 tree type
= TREE_TYPE (arg
);
3141 enum tree_code code
= TREE_CODE (arg
);
3142 location_t loc1
, loc2
;
3144 /* If this is a comparison, we can simply invert it, except for
3145 floating-point non-equality comparisons, in which case we just
3146 enclose a TRUTH_NOT_EXPR around what we have. */
3148 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3150 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3151 if (FLOAT_TYPE_P (op_type
)
3152 && flag_trapping_math
3153 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3154 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3157 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3158 if (code
== ERROR_MARK
)
3161 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3162 TREE_OPERAND (arg
, 1));
3168 return constant_boolean_node (integer_zerop (arg
), type
);
3170 case TRUTH_AND_EXPR
:
3171 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3172 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3173 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3174 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3175 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3178 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3179 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3180 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3181 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3182 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3184 case TRUTH_XOR_EXPR
:
3185 /* Here we can invert either operand. We invert the first operand
3186 unless the second operand is a TRUTH_NOT_EXPR in which case our
3187 result is the XOR of the first operand with the inside of the
3188 negation of the second operand. */
3190 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3191 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3192 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3194 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3195 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3196 TREE_OPERAND (arg
, 1));
3198 case TRUTH_ANDIF_EXPR
:
3199 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3200 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3201 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3202 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3203 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3205 case TRUTH_ORIF_EXPR
:
3206 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3207 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3208 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3209 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3210 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3212 case TRUTH_NOT_EXPR
:
3213 return TREE_OPERAND (arg
, 0);
3217 tree arg1
= TREE_OPERAND (arg
, 1);
3218 tree arg2
= TREE_OPERAND (arg
, 2);
3220 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3221 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3223 /* A COND_EXPR may have a throw as one operand, which
3224 then has void type. Just leave void operands
3226 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3227 VOID_TYPE_P (TREE_TYPE (arg1
))
3228 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3229 VOID_TYPE_P (TREE_TYPE (arg2
))
3230 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3234 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3235 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3236 TREE_OPERAND (arg
, 0),
3237 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3239 case NON_LVALUE_EXPR
:
3240 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3241 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3244 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3245 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3247 /* ... fall through ... */
3250 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3251 return build1_loc (loc
, TREE_CODE (arg
), type
,
3252 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3255 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3257 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3260 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3262 case CLEANUP_POINT_EXPR
:
3263 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3264 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3265 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3272 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3273 assume that ARG is an operation that returns a truth value (0 or 1
3274 for scalars, 0 or -1 for vectors). Return the folded expression if
3275 folding is successful. Otherwise, return NULL_TREE. */
3278 fold_invert_truthvalue (location_t loc
, tree arg
)
3280 tree type
= TREE_TYPE (arg
);
3281 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3287 /* Return a simplified tree node for the truth-negation of ARG. This
3288 never alters ARG itself. We assume that ARG is an operation that
3289 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3292 invert_truthvalue_loc (location_t loc
, tree arg
)
3294 if (TREE_CODE (arg
) == ERROR_MARK
)
3297 tree type
= TREE_TYPE (arg
);
3298 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3304 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3305 operands are another bit-wise operation with a common input. If so,
3306 distribute the bit operations to save an operation and possibly two if
3307 constants are involved. For example, convert
3308 (A | B) & (A | C) into A | (B & C)
3309 Further simplification will occur if B and C are constants.
3311 If this optimization cannot be done, 0 will be returned. */
3314 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3315 tree arg0
, tree arg1
)
3320 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3321 || TREE_CODE (arg0
) == code
3322 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3323 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3326 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3328 common
= TREE_OPERAND (arg0
, 0);
3329 left
= TREE_OPERAND (arg0
, 1);
3330 right
= TREE_OPERAND (arg1
, 1);
3332 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3334 common
= TREE_OPERAND (arg0
, 0);
3335 left
= TREE_OPERAND (arg0
, 1);
3336 right
= TREE_OPERAND (arg1
, 0);
3338 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3340 common
= TREE_OPERAND (arg0
, 1);
3341 left
= TREE_OPERAND (arg0
, 0);
3342 right
= TREE_OPERAND (arg1
, 1);
3344 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3346 common
= TREE_OPERAND (arg0
, 1);
3347 left
= TREE_OPERAND (arg0
, 0);
3348 right
= TREE_OPERAND (arg1
, 0);
3353 common
= fold_convert_loc (loc
, type
, common
);
3354 left
= fold_convert_loc (loc
, type
, left
);
3355 right
= fold_convert_loc (loc
, type
, right
);
3356 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3357 fold_build2_loc (loc
, code
, type
, left
, right
));
3360 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3361 with code CODE. This optimization is unsafe. */
3363 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3364 tree arg0
, tree arg1
)
3366 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3367 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3369 /* (A / C) +- (B / C) -> (A +- B) / C. */
3371 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3372 TREE_OPERAND (arg1
, 1), 0))
3373 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3374 fold_build2_loc (loc
, code
, type
,
3375 TREE_OPERAND (arg0
, 0),
3376 TREE_OPERAND (arg1
, 0)),
3377 TREE_OPERAND (arg0
, 1));
3379 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3380 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3381 TREE_OPERAND (arg1
, 0), 0)
3382 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3383 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3385 REAL_VALUE_TYPE r0
, r1
;
3386 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3387 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3389 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3391 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3392 real_arithmetic (&r0
, code
, &r0
, &r1
);
3393 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3394 TREE_OPERAND (arg0
, 0),
3395 build_real (type
, r0
));
3401 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3402 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3405 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3406 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3408 tree result
, bftype
;
3412 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3413 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3414 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3415 && tree_fits_shwi_p (size
)
3416 && tree_to_shwi (size
) == bitsize
)
3417 return fold_convert_loc (loc
, type
, inner
);
3421 if (TYPE_PRECISION (bftype
) != bitsize
3422 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3423 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3425 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3426 size_int (bitsize
), bitsize_int (bitpos
));
3429 result
= fold_convert_loc (loc
, type
, result
);
3434 /* Optimize a bit-field compare.
3436 There are two cases: First is a compare against a constant and the
3437 second is a comparison of two items where the fields are at the same
3438 bit position relative to the start of a chunk (byte, halfword, word)
3439 large enough to contain it. In these cases we can avoid the shift
3440 implicit in bitfield extractions.
3442 For constants, we emit a compare of the shifted constant with the
3443 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3444 compared. For two fields at the same position, we do the ANDs with the
3445 similar mask and compare the result of the ANDs.
3447 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3448 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3449 are the left and right operands of the comparison, respectively.
3451 If the optimization described above can be done, we return the resulting
3452 tree. Otherwise we return zero. */
3455 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3456 tree compare_type
, tree lhs
, tree rhs
)
3458 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3459 tree type
= TREE_TYPE (lhs
);
3461 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3462 machine_mode lmode
, rmode
, nmode
;
3463 int lunsignedp
, runsignedp
;
3464 int lvolatilep
= 0, rvolatilep
= 0;
3465 tree linner
, rinner
= NULL_TREE
;
3469 /* Get all the information about the extractions being done. If the bit size
3470 if the same as the size of the underlying object, we aren't doing an
3471 extraction at all and so can do nothing. We also don't want to
3472 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3473 then will no longer be able to replace it. */
3474 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3475 &lunsignedp
, &lvolatilep
, false);
3476 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3477 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3482 /* If this is not a constant, we can only do something if bit positions,
3483 sizes, and signedness are the same. */
3484 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3485 &runsignedp
, &rvolatilep
, false);
3487 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3488 || lunsignedp
!= runsignedp
|| offset
!= 0
3489 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3493 /* See if we can find a mode to refer to this field. We should be able to,
3494 but fail if we can't. */
3495 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3496 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3497 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3498 TYPE_ALIGN (TREE_TYPE (rinner
))),
3500 if (nmode
== VOIDmode
)
3503 /* Set signed and unsigned types of the precision of this mode for the
3505 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3507 /* Compute the bit position and size for the new reference and our offset
3508 within it. If the new reference is the same size as the original, we
3509 won't optimize anything, so return zero. */
3510 nbitsize
= GET_MODE_BITSIZE (nmode
);
3511 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3513 if (nbitsize
== lbitsize
)
3516 if (BYTES_BIG_ENDIAN
)
3517 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3519 /* Make the mask to be used against the extracted field. */
3520 mask
= build_int_cst_type (unsigned_type
, -1);
3521 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3522 mask
= const_binop (RSHIFT_EXPR
, mask
,
3523 size_int (nbitsize
- lbitsize
- lbitpos
));
3526 /* If not comparing with constant, just rework the comparison
3528 return fold_build2_loc (loc
, code
, compare_type
,
3529 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3530 make_bit_field_ref (loc
, linner
,
3535 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3536 make_bit_field_ref (loc
, rinner
,
3542 /* Otherwise, we are handling the constant case. See if the constant is too
3543 big for the field. Warn and return a tree of for 0 (false) if so. We do
3544 this not only for its own sake, but to avoid having to test for this
3545 error case below. If we didn't, we might generate wrong code.
3547 For unsigned fields, the constant shifted right by the field length should
3548 be all zero. For signed fields, the high-order bits should agree with
3553 if (wi::lrshift (rhs
, lbitsize
) != 0)
3555 warning (0, "comparison is always %d due to width of bit-field",
3557 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3562 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3563 if (tem
!= 0 && tem
!= -1)
3565 warning (0, "comparison is always %d due to width of bit-field",
3567 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3571 /* Single-bit compares should always be against zero. */
3572 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3574 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3575 rhs
= build_int_cst (type
, 0);
3578 /* Make a new bitfield reference, shift the constant over the
3579 appropriate number of bits and mask it with the computed mask
3580 (in case this was a signed field). If we changed it, make a new one. */
3581 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3583 rhs
= const_binop (BIT_AND_EXPR
,
3584 const_binop (LSHIFT_EXPR
,
3585 fold_convert_loc (loc
, unsigned_type
, rhs
),
3586 size_int (lbitpos
)),
3589 lhs
= build2_loc (loc
, code
, compare_type
,
3590 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3594 /* Subroutine for fold_truth_andor_1: decode a field reference.
3596 If EXP is a comparison reference, we return the innermost reference.
3598 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3599 set to the starting bit number.
3601 If the innermost field can be completely contained in a mode-sized
3602 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3604 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3605 otherwise it is not changed.
3607 *PUNSIGNEDP is set to the signedness of the field.
3609 *PMASK is set to the mask used. This is either contained in a
3610 BIT_AND_EXPR or derived from the width of the field.
3612 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3614 Return 0 if this is not a component reference or is one that we can't
3615 do anything with. */
3618 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3619 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
3620 int *punsignedp
, int *pvolatilep
,
3621 tree
*pmask
, tree
*pand_mask
)
3623 tree outer_type
= 0;
3625 tree mask
, inner
, offset
;
3627 unsigned int precision
;
3629 /* All the optimizations using this function assume integer fields.
3630 There are problems with FP fields since the type_for_size call
3631 below can fail for, e.g., XFmode. */
3632 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3635 /* We are interested in the bare arrangement of bits, so strip everything
3636 that doesn't affect the machine mode. However, record the type of the
3637 outermost expression if it may matter below. */
3638 if (CONVERT_EXPR_P (exp
)
3639 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3640 outer_type
= TREE_TYPE (exp
);
3643 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3645 and_mask
= TREE_OPERAND (exp
, 1);
3646 exp
= TREE_OPERAND (exp
, 0);
3647 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3648 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3652 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3653 punsignedp
, pvolatilep
, false);
3654 if ((inner
== exp
&& and_mask
== 0)
3655 || *pbitsize
< 0 || offset
!= 0
3656 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3659 /* If the number of bits in the reference is the same as the bitsize of
3660 the outer type, then the outer type gives the signedness. Otherwise
3661 (in case of a small bitfield) the signedness is unchanged. */
3662 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3663 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3665 /* Compute the mask to access the bitfield. */
3666 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3667 precision
= TYPE_PRECISION (unsigned_type
);
3669 mask
= build_int_cst_type (unsigned_type
, -1);
3671 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3672 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3674 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3676 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3677 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3680 *pand_mask
= and_mask
;
3684 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3685 bit positions and MASK is SIGNED. */
3688 all_ones_mask_p (const_tree mask
, unsigned int size
)
3690 tree type
= TREE_TYPE (mask
);
3691 unsigned int precision
= TYPE_PRECISION (type
);
3693 /* If this function returns true when the type of the mask is
3694 UNSIGNED, then there will be errors. In particular see
3695 gcc.c-torture/execute/990326-1.c. There does not appear to be
3696 any documentation paper trail as to why this is so. But the pre
3697 wide-int worked with that restriction and it has been preserved
3699 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3702 return wi::mask (size
, false, precision
) == mask
;
3705 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3706 represents the sign bit of EXP's type. If EXP represents a sign
3707 or zero extension, also test VAL against the unextended type.
3708 The return value is the (sub)expression whose sign bit is VAL,
3709 or NULL_TREE otherwise. */
3712 sign_bit_p (tree exp
, const_tree val
)
3717 /* Tree EXP must have an integral type. */
3718 t
= TREE_TYPE (exp
);
3719 if (! INTEGRAL_TYPE_P (t
))
3722 /* Tree VAL must be an integer constant. */
3723 if (TREE_CODE (val
) != INTEGER_CST
3724 || TREE_OVERFLOW (val
))
3727 width
= TYPE_PRECISION (t
);
3728 if (wi::only_sign_bit_p (val
, width
))
3731 /* Handle extension from a narrower type. */
3732 if (TREE_CODE (exp
) == NOP_EXPR
3733 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3734 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3739 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3740 to be evaluated unconditionally. */
3743 simple_operand_p (const_tree exp
)
3745 /* Strip any conversions that don't change the machine mode. */
3748 return (CONSTANT_CLASS_P (exp
)
3749 || TREE_CODE (exp
) == SSA_NAME
3751 && ! TREE_ADDRESSABLE (exp
)
3752 && ! TREE_THIS_VOLATILE (exp
)
3753 && ! DECL_NONLOCAL (exp
)
3754 /* Don't regard global variables as simple. They may be
3755 allocated in ways unknown to the compiler (shared memory,
3756 #pragma weak, etc). */
3757 && ! TREE_PUBLIC (exp
)
3758 && ! DECL_EXTERNAL (exp
)
3759 /* Weakrefs are not safe to be read, since they can be NULL.
3760 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3761 have DECL_WEAK flag set. */
3762 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3763 /* Loading a static variable is unduly expensive, but global
3764 registers aren't expensive. */
3765 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3768 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3769 to be evaluated unconditionally.
3770 I addition to simple_operand_p, we assume that comparisons, conversions,
3771 and logic-not operations are simple, if their operands are simple, too. */
3774 simple_operand_p_2 (tree exp
)
3776 enum tree_code code
;
3778 if (TREE_SIDE_EFFECTS (exp
)
3779 || tree_could_trap_p (exp
))
3782 while (CONVERT_EXPR_P (exp
))
3783 exp
= TREE_OPERAND (exp
, 0);
3785 code
= TREE_CODE (exp
);
3787 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3788 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3789 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3791 if (code
== TRUTH_NOT_EXPR
)
3792 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3794 return simple_operand_p (exp
);
3798 /* The following functions are subroutines to fold_range_test and allow it to
3799 try to change a logical combination of comparisons into a range test.
3802 X == 2 || X == 3 || X == 4 || X == 5
3806 (unsigned) (X - 2) <= 3
3808 We describe each set of comparisons as being either inside or outside
3809 a range, using a variable named like IN_P, and then describe the
3810 range with a lower and upper bound. If one of the bounds is omitted,
3811 it represents either the highest or lowest value of the type.
3813 In the comments below, we represent a range by two numbers in brackets
3814 preceded by a "+" to designate being inside that range, or a "-" to
3815 designate being outside that range, so the condition can be inverted by
3816 flipping the prefix. An omitted bound is represented by a "-". For
3817 example, "- [-, 10]" means being outside the range starting at the lowest
3818 possible value and ending at 10, in other words, being greater than 10.
3819 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3822 We set up things so that the missing bounds are handled in a consistent
3823 manner so neither a missing bound nor "true" and "false" need to be
3824 handled using a special case. */
3826 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3827 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3828 and UPPER1_P are nonzero if the respective argument is an upper bound
3829 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3830 must be specified for a comparison. ARG1 will be converted to ARG0's
3831 type if both are specified. */
3834 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3835 tree arg1
, int upper1_p
)
3841 /* If neither arg represents infinity, do the normal operation.
3842 Else, if not a comparison, return infinity. Else handle the special
3843 comparison rules. Note that most of the cases below won't occur, but
3844 are handled for consistency. */
3846 if (arg0
!= 0 && arg1
!= 0)
3848 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3849 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3851 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3854 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3857 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3858 for neither. In real maths, we cannot assume open ended ranges are
3859 the same. But, this is computer arithmetic, where numbers are finite.
3860 We can therefore make the transformation of any unbounded range with
3861 the value Z, Z being greater than any representable number. This permits
3862 us to treat unbounded ranges as equal. */
3863 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3864 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3868 result
= sgn0
== sgn1
;
3871 result
= sgn0
!= sgn1
;
3874 result
= sgn0
< sgn1
;
3877 result
= sgn0
<= sgn1
;
3880 result
= sgn0
> sgn1
;
3883 result
= sgn0
>= sgn1
;
3889 return constant_boolean_node (result
, type
);
3892 /* Helper routine for make_range. Perform one step for it, return
3893 new expression if the loop should continue or NULL_TREE if it should
3897 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3898 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3899 bool *strict_overflow_p
)
3901 tree arg0_type
= TREE_TYPE (arg0
);
3902 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3903 int in_p
= *p_in_p
, n_in_p
;
3907 case TRUTH_NOT_EXPR
:
3908 /* We can only do something if the range is testing for zero. */
3909 if (low
== NULL_TREE
|| high
== NULL_TREE
3910 || ! integer_zerop (low
) || ! integer_zerop (high
))
3915 case EQ_EXPR
: case NE_EXPR
:
3916 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3917 /* We can only do something if the range is testing for zero
3918 and if the second operand is an integer constant. Note that
3919 saying something is "in" the range we make is done by
3920 complementing IN_P since it will set in the initial case of
3921 being not equal to zero; "out" is leaving it alone. */
3922 if (low
== NULL_TREE
|| high
== NULL_TREE
3923 || ! integer_zerop (low
) || ! integer_zerop (high
)
3924 || TREE_CODE (arg1
) != INTEGER_CST
)
3929 case NE_EXPR
: /* - [c, c] */
3932 case EQ_EXPR
: /* + [c, c] */
3933 in_p
= ! in_p
, low
= high
= arg1
;
3935 case GT_EXPR
: /* - [-, c] */
3936 low
= 0, high
= arg1
;
3938 case GE_EXPR
: /* + [c, -] */
3939 in_p
= ! in_p
, low
= arg1
, high
= 0;
3941 case LT_EXPR
: /* - [c, -] */
3942 low
= arg1
, high
= 0;
3944 case LE_EXPR
: /* + [-, c] */
3945 in_p
= ! in_p
, low
= 0, high
= arg1
;
3951 /* If this is an unsigned comparison, we also know that EXP is
3952 greater than or equal to zero. We base the range tests we make
3953 on that fact, so we record it here so we can parse existing
3954 range tests. We test arg0_type since often the return type
3955 of, e.g. EQ_EXPR, is boolean. */
3956 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3958 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3960 build_int_cst (arg0_type
, 0),
3964 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3966 /* If the high bound is missing, but we have a nonzero low
3967 bound, reverse the range so it goes from zero to the low bound
3969 if (high
== 0 && low
&& ! integer_zerop (low
))
3972 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3973 build_int_cst (TREE_TYPE (low
), 1), 0);
3974 low
= build_int_cst (arg0_type
, 0);
3984 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3985 low and high are non-NULL, then normalize will DTRT. */
3986 if (!TYPE_UNSIGNED (arg0_type
)
3987 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3989 if (low
== NULL_TREE
)
3990 low
= TYPE_MIN_VALUE (arg0_type
);
3991 if (high
== NULL_TREE
)
3992 high
= TYPE_MAX_VALUE (arg0_type
);
3995 /* (-x) IN [a,b] -> x in [-b, -a] */
3996 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3997 build_int_cst (exp_type
, 0),
3999 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4000 build_int_cst (exp_type
, 0),
4002 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4008 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4009 build_int_cst (exp_type
, 1));
4013 if (TREE_CODE (arg1
) != INTEGER_CST
)
4016 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4017 move a constant to the other side. */
4018 if (!TYPE_UNSIGNED (arg0_type
)
4019 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4022 /* If EXP is signed, any overflow in the computation is undefined,
4023 so we don't worry about it so long as our computations on
4024 the bounds don't overflow. For unsigned, overflow is defined
4025 and this is exactly the right thing. */
4026 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4027 arg0_type
, low
, 0, arg1
, 0);
4028 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4029 arg0_type
, high
, 1, arg1
, 0);
4030 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4031 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4034 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4035 *strict_overflow_p
= true;
4038 /* Check for an unsigned range which has wrapped around the maximum
4039 value thus making n_high < n_low, and normalize it. */
4040 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4042 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4043 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4044 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4045 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4047 /* If the range is of the form +/- [ x+1, x ], we won't
4048 be able to normalize it. But then, it represents the
4049 whole range or the empty set, so make it
4051 if (tree_int_cst_equal (n_low
, low
)
4052 && tree_int_cst_equal (n_high
, high
))
4058 low
= n_low
, high
= n_high
;
4066 case NON_LVALUE_EXPR
:
4067 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4070 if (! INTEGRAL_TYPE_P (arg0_type
)
4071 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4072 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4075 n_low
= low
, n_high
= high
;
4078 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4081 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4083 /* If we're converting arg0 from an unsigned type, to exp,
4084 a signed type, we will be doing the comparison as unsigned.
4085 The tests above have already verified that LOW and HIGH
4088 So we have to ensure that we will handle large unsigned
4089 values the same way that the current signed bounds treat
4092 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4096 /* For fixed-point modes, we need to pass the saturating flag
4097 as the 2nd parameter. */
4098 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4100 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4101 TYPE_SATURATING (arg0_type
));
4104 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4106 /* A range without an upper bound is, naturally, unbounded.
4107 Since convert would have cropped a very large value, use
4108 the max value for the destination type. */
4110 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4111 : TYPE_MAX_VALUE (arg0_type
);
4113 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4114 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4115 fold_convert_loc (loc
, arg0_type
,
4117 build_int_cst (arg0_type
, 1));
4119 /* If the low bound is specified, "and" the range with the
4120 range for which the original unsigned value will be
4124 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4125 1, fold_convert_loc (loc
, arg0_type
,
4130 in_p
= (n_in_p
== in_p
);
4134 /* Otherwise, "or" the range with the range of the input
4135 that will be interpreted as negative. */
4136 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4137 1, fold_convert_loc (loc
, arg0_type
,
4142 in_p
= (in_p
!= n_in_p
);
4156 /* Given EXP, a logical expression, set the range it is testing into
4157 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4158 actually being tested. *PLOW and *PHIGH will be made of the same
4159 type as the returned expression. If EXP is not a comparison, we
4160 will most likely not be returning a useful value and range. Set
4161 *STRICT_OVERFLOW_P to true if the return value is only valid
4162 because signed overflow is undefined; otherwise, do not change
4163 *STRICT_OVERFLOW_P. */
4166 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4167 bool *strict_overflow_p
)
4169 enum tree_code code
;
4170 tree arg0
, arg1
= NULL_TREE
;
4171 tree exp_type
, nexp
;
4174 location_t loc
= EXPR_LOCATION (exp
);
4176 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4177 and see if we can refine the range. Some of the cases below may not
4178 happen, but it doesn't seem worth worrying about this. We "continue"
4179 the outer loop when we've changed something; otherwise we "break"
4180 the switch, which will "break" the while. */
4183 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4187 code
= TREE_CODE (exp
);
4188 exp_type
= TREE_TYPE (exp
);
4191 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4193 if (TREE_OPERAND_LENGTH (exp
) > 0)
4194 arg0
= TREE_OPERAND (exp
, 0);
4195 if (TREE_CODE_CLASS (code
) == tcc_binary
4196 || TREE_CODE_CLASS (code
) == tcc_comparison
4197 || (TREE_CODE_CLASS (code
) == tcc_expression
4198 && TREE_OPERAND_LENGTH (exp
) > 1))
4199 arg1
= TREE_OPERAND (exp
, 1);
4201 if (arg0
== NULL_TREE
)
4204 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4205 &high
, &in_p
, strict_overflow_p
);
4206 if (nexp
== NULL_TREE
)
4211 /* If EXP is a constant, we can evaluate whether this is true or false. */
4212 if (TREE_CODE (exp
) == INTEGER_CST
)
4214 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4216 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4222 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4226 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4227 type, TYPE, return an expression to test if EXP is in (or out of, depending
4228 on IN_P) the range. Return 0 if the test couldn't be created. */
4231 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4232 tree low
, tree high
)
4234 tree etype
= TREE_TYPE (exp
), value
;
4236 #ifdef HAVE_canonicalize_funcptr_for_compare
4237 /* Disable this optimization for function pointer expressions
4238 on targets that require function pointer canonicalization. */
4239 if (HAVE_canonicalize_funcptr_for_compare
4240 && TREE_CODE (etype
) == POINTER_TYPE
4241 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4247 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4249 return invert_truthvalue_loc (loc
, value
);
4254 if (low
== 0 && high
== 0)
4255 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4258 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4259 fold_convert_loc (loc
, etype
, high
));
4262 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4263 fold_convert_loc (loc
, etype
, low
));
4265 if (operand_equal_p (low
, high
, 0))
4266 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4267 fold_convert_loc (loc
, etype
, low
));
4269 if (integer_zerop (low
))
4271 if (! TYPE_UNSIGNED (etype
))
4273 etype
= unsigned_type_for (etype
);
4274 high
= fold_convert_loc (loc
, etype
, high
);
4275 exp
= fold_convert_loc (loc
, etype
, exp
);
4277 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4280 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4281 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4283 int prec
= TYPE_PRECISION (etype
);
4285 if (wi::mask (prec
- 1, false, prec
) == high
)
4287 if (TYPE_UNSIGNED (etype
))
4289 tree signed_etype
= signed_type_for (etype
);
4290 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4292 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4294 etype
= signed_etype
;
4295 exp
= fold_convert_loc (loc
, etype
, exp
);
4297 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4298 build_int_cst (etype
, 0));
4302 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4303 This requires wrap-around arithmetics for the type of the expression.
4304 First make sure that arithmetics in this type is valid, then make sure
4305 that it wraps around. */
4306 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4307 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4308 TYPE_UNSIGNED (etype
));
4310 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4312 tree utype
, minv
, maxv
;
4314 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4315 for the type in question, as we rely on this here. */
4316 utype
= unsigned_type_for (etype
);
4317 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4318 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4319 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4320 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4322 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4329 high
= fold_convert_loc (loc
, etype
, high
);
4330 low
= fold_convert_loc (loc
, etype
, low
);
4331 exp
= fold_convert_loc (loc
, etype
, exp
);
4333 value
= const_binop (MINUS_EXPR
, high
, low
);
4336 if (POINTER_TYPE_P (etype
))
4338 if (value
!= 0 && !TREE_OVERFLOW (value
))
4340 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4341 return build_range_check (loc
, type
,
4342 fold_build_pointer_plus_loc (loc
, exp
, low
),
4343 1, build_int_cst (etype
, 0), value
);
4348 if (value
!= 0 && !TREE_OVERFLOW (value
))
4349 return build_range_check (loc
, type
,
4350 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4351 1, build_int_cst (etype
, 0), value
);
4356 /* Return the predecessor of VAL in its type, handling the infinite case. */
4359 range_predecessor (tree val
)
4361 tree type
= TREE_TYPE (val
);
4363 if (INTEGRAL_TYPE_P (type
)
4364 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4367 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4368 build_int_cst (TREE_TYPE (val
), 1), 0);
4371 /* Return the successor of VAL in its type, handling the infinite case. */
4374 range_successor (tree val
)
4376 tree type
= TREE_TYPE (val
);
4378 if (INTEGRAL_TYPE_P (type
)
4379 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4382 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4383 build_int_cst (TREE_TYPE (val
), 1), 0);
4386 /* Given two ranges, see if we can merge them into one. Return 1 if we
4387 can, 0 if we can't. Set the output range into the specified parameters. */
4390 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4391 tree high0
, int in1_p
, tree low1
, tree high1
)
4399 int lowequal
= ((low0
== 0 && low1
== 0)
4400 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4401 low0
, 0, low1
, 0)));
4402 int highequal
= ((high0
== 0 && high1
== 0)
4403 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4404 high0
, 1, high1
, 1)));
4406 /* Make range 0 be the range that starts first, or ends last if they
4407 start at the same value. Swap them if it isn't. */
4408 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4411 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4412 high1
, 1, high0
, 1))))
4414 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4415 tem
= low0
, low0
= low1
, low1
= tem
;
4416 tem
= high0
, high0
= high1
, high1
= tem
;
4419 /* Now flag two cases, whether the ranges are disjoint or whether the
4420 second range is totally subsumed in the first. Note that the tests
4421 below are simplified by the ones above. */
4422 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4423 high0
, 1, low1
, 0));
4424 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4425 high1
, 1, high0
, 1));
4427 /* We now have four cases, depending on whether we are including or
4428 excluding the two ranges. */
4431 /* If they don't overlap, the result is false. If the second range
4432 is a subset it is the result. Otherwise, the range is from the start
4433 of the second to the end of the first. */
4435 in_p
= 0, low
= high
= 0;
4437 in_p
= 1, low
= low1
, high
= high1
;
4439 in_p
= 1, low
= low1
, high
= high0
;
4442 else if (in0_p
&& ! in1_p
)
4444 /* If they don't overlap, the result is the first range. If they are
4445 equal, the result is false. If the second range is a subset of the
4446 first, and the ranges begin at the same place, we go from just after
4447 the end of the second range to the end of the first. If the second
4448 range is not a subset of the first, or if it is a subset and both
4449 ranges end at the same place, the range starts at the start of the
4450 first range and ends just before the second range.
4451 Otherwise, we can't describe this as a single range. */
4453 in_p
= 1, low
= low0
, high
= high0
;
4454 else if (lowequal
&& highequal
)
4455 in_p
= 0, low
= high
= 0;
4456 else if (subset
&& lowequal
)
4458 low
= range_successor (high1
);
4463 /* We are in the weird situation where high0 > high1 but
4464 high1 has no successor. Punt. */
4468 else if (! subset
|| highequal
)
4471 high
= range_predecessor (low1
);
4475 /* low0 < low1 but low1 has no predecessor. Punt. */
4483 else if (! in0_p
&& in1_p
)
4485 /* If they don't overlap, the result is the second range. If the second
4486 is a subset of the first, the result is false. Otherwise,
4487 the range starts just after the first range and ends at the
4488 end of the second. */
4490 in_p
= 1, low
= low1
, high
= high1
;
4491 else if (subset
|| highequal
)
4492 in_p
= 0, low
= high
= 0;
4495 low
= range_successor (high0
);
4500 /* high1 > high0 but high0 has no successor. Punt. */
4508 /* The case where we are excluding both ranges. Here the complex case
4509 is if they don't overlap. In that case, the only time we have a
4510 range is if they are adjacent. If the second is a subset of the
4511 first, the result is the first. Otherwise, the range to exclude
4512 starts at the beginning of the first range and ends at the end of the
4516 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4517 range_successor (high0
),
4519 in_p
= 0, low
= low0
, high
= high1
;
4522 /* Canonicalize - [min, x] into - [-, x]. */
4523 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4524 switch (TREE_CODE (TREE_TYPE (low0
)))
4527 if (TYPE_PRECISION (TREE_TYPE (low0
))
4528 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4532 if (tree_int_cst_equal (low0
,
4533 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4537 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4538 && integer_zerop (low0
))
4545 /* Canonicalize - [x, max] into - [x, -]. */
4546 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4547 switch (TREE_CODE (TREE_TYPE (high1
)))
4550 if (TYPE_PRECISION (TREE_TYPE (high1
))
4551 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4555 if (tree_int_cst_equal (high1
,
4556 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4560 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4561 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4563 build_int_cst (TREE_TYPE (high1
), 1),
4571 /* The ranges might be also adjacent between the maximum and
4572 minimum values of the given type. For
4573 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4574 return + [x + 1, y - 1]. */
4575 if (low0
== 0 && high1
== 0)
4577 low
= range_successor (high0
);
4578 high
= range_predecessor (low1
);
4579 if (low
== 0 || high
== 0)
4589 in_p
= 0, low
= low0
, high
= high0
;
4591 in_p
= 0, low
= low0
, high
= high1
;
4594 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4599 /* Subroutine of fold, looking inside expressions of the form
4600 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4601 of the COND_EXPR. This function is being used also to optimize
4602 A op B ? C : A, by reversing the comparison first.
4604 Return a folded expression whose code is not a COND_EXPR
4605 anymore, or NULL_TREE if no folding opportunity is found. */
4608 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4609 tree arg0
, tree arg1
, tree arg2
)
4611 enum tree_code comp_code
= TREE_CODE (arg0
);
4612 tree arg00
= TREE_OPERAND (arg0
, 0);
4613 tree arg01
= TREE_OPERAND (arg0
, 1);
4614 tree arg1_type
= TREE_TYPE (arg1
);
4620 /* If we have A op 0 ? A : -A, consider applying the following
4623 A == 0? A : -A same as -A
4624 A != 0? A : -A same as A
4625 A >= 0? A : -A same as abs (A)
4626 A > 0? A : -A same as abs (A)
4627 A <= 0? A : -A same as -abs (A)
4628 A < 0? A : -A same as -abs (A)
4630 None of these transformations work for modes with signed
4631 zeros. If A is +/-0, the first two transformations will
4632 change the sign of the result (from +0 to -0, or vice
4633 versa). The last four will fix the sign of the result,
4634 even though the original expressions could be positive or
4635 negative, depending on the sign of A.
4637 Note that all these transformations are correct if A is
4638 NaN, since the two alternatives (A and -A) are also NaNs. */
4639 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4640 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4641 ? real_zerop (arg01
)
4642 : integer_zerop (arg01
))
4643 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4644 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4645 /* In the case that A is of the form X-Y, '-A' (arg2) may
4646 have already been folded to Y-X, check for that. */
4647 || (TREE_CODE (arg1
) == MINUS_EXPR
4648 && TREE_CODE (arg2
) == MINUS_EXPR
4649 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4650 TREE_OPERAND (arg2
, 1), 0)
4651 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4652 TREE_OPERAND (arg2
, 0), 0))))
4657 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4658 return pedantic_non_lvalue_loc (loc
,
4659 fold_convert_loc (loc
, type
,
4660 negate_expr (tem
)));
4663 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4666 if (flag_trapping_math
)
4671 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4672 arg1
= fold_convert_loc (loc
, signed_type_for
4673 (TREE_TYPE (arg1
)), arg1
);
4674 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4675 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4678 if (flag_trapping_math
)
4682 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4683 arg1
= fold_convert_loc (loc
, signed_type_for
4684 (TREE_TYPE (arg1
)), arg1
);
4685 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4686 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4688 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4692 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4693 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4694 both transformations are correct when A is NaN: A != 0
4695 is then true, and A == 0 is false. */
4697 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4698 && integer_zerop (arg01
) && integer_zerop (arg2
))
4700 if (comp_code
== NE_EXPR
)
4701 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4702 else if (comp_code
== EQ_EXPR
)
4703 return build_zero_cst (type
);
4706 /* Try some transformations of A op B ? A : B.
4708 A == B? A : B same as B
4709 A != B? A : B same as A
4710 A >= B? A : B same as max (A, B)
4711 A > B? A : B same as max (B, A)
4712 A <= B? A : B same as min (A, B)
4713 A < B? A : B same as min (B, A)
4715 As above, these transformations don't work in the presence
4716 of signed zeros. For example, if A and B are zeros of
4717 opposite sign, the first two transformations will change
4718 the sign of the result. In the last four, the original
4719 expressions give different results for (A=+0, B=-0) and
4720 (A=-0, B=+0), but the transformed expressions do not.
4722 The first two transformations are correct if either A or B
4723 is a NaN. In the first transformation, the condition will
4724 be false, and B will indeed be chosen. In the case of the
4725 second transformation, the condition A != B will be true,
4726 and A will be chosen.
4728 The conversions to max() and min() are not correct if B is
4729 a number and A is not. The conditions in the original
4730 expressions will be false, so all four give B. The min()
4731 and max() versions would give a NaN instead. */
4732 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4733 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4734 /* Avoid these transformations if the COND_EXPR may be used
4735 as an lvalue in the C++ front-end. PR c++/19199. */
4737 || VECTOR_TYPE_P (type
)
4738 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4739 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4740 || ! maybe_lvalue_p (arg1
)
4741 || ! maybe_lvalue_p (arg2
)))
4743 tree comp_op0
= arg00
;
4744 tree comp_op1
= arg01
;
4745 tree comp_type
= TREE_TYPE (comp_op0
);
4747 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4748 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4758 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4760 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4765 /* In C++ a ?: expression can be an lvalue, so put the
4766 operand which will be used if they are equal first
4767 so that we can convert this back to the
4768 corresponding COND_EXPR. */
4769 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4771 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4772 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4773 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4774 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4775 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4776 comp_op1
, comp_op0
);
4777 return pedantic_non_lvalue_loc (loc
,
4778 fold_convert_loc (loc
, type
, tem
));
4785 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4787 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4788 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4789 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4790 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4791 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4792 comp_op1
, comp_op0
);
4793 return pedantic_non_lvalue_loc (loc
,
4794 fold_convert_loc (loc
, type
, tem
));
4798 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4799 return pedantic_non_lvalue_loc (loc
,
4800 fold_convert_loc (loc
, type
, arg2
));
4803 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4804 return pedantic_non_lvalue_loc (loc
,
4805 fold_convert_loc (loc
, type
, arg1
));
4808 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4813 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4814 we might still be able to simplify this. For example,
4815 if C1 is one less or one more than C2, this might have started
4816 out as a MIN or MAX and been transformed by this function.
4817 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4819 if (INTEGRAL_TYPE_P (type
)
4820 && TREE_CODE (arg01
) == INTEGER_CST
4821 && TREE_CODE (arg2
) == INTEGER_CST
)
4825 if (TREE_CODE (arg1
) == INTEGER_CST
)
4827 /* We can replace A with C1 in this case. */
4828 arg1
= fold_convert_loc (loc
, type
, arg01
);
4829 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4832 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4833 MIN_EXPR, to preserve the signedness of the comparison. */
4834 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4836 && operand_equal_p (arg01
,
4837 const_binop (PLUS_EXPR
, arg2
,
4838 build_int_cst (type
, 1)),
4841 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4842 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4844 return pedantic_non_lvalue_loc (loc
,
4845 fold_convert_loc (loc
, type
, tem
));
4850 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4852 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4854 && operand_equal_p (arg01
,
4855 const_binop (MINUS_EXPR
, arg2
,
4856 build_int_cst (type
, 1)),
4859 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4860 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4862 return pedantic_non_lvalue_loc (loc
,
4863 fold_convert_loc (loc
, type
, tem
));
4868 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4869 MAX_EXPR, to preserve the signedness of the comparison. */
4870 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4872 && operand_equal_p (arg01
,
4873 const_binop (MINUS_EXPR
, arg2
,
4874 build_int_cst (type
, 1)),
4877 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4878 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4880 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4885 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4886 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4888 && operand_equal_p (arg01
,
4889 const_binop (PLUS_EXPR
, arg2
,
4890 build_int_cst (type
, 1)),
4893 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4894 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4896 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4910 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4911 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4912 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4916 /* EXP is some logical combination of boolean tests. See if we can
4917 merge it into some range test. Return the new tree if so. */
4920 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4923 int or_op
= (code
== TRUTH_ORIF_EXPR
4924 || code
== TRUTH_OR_EXPR
);
4925 int in0_p
, in1_p
, in_p
;
4926 tree low0
, low1
, low
, high0
, high1
, high
;
4927 bool strict_overflow_p
= false;
4929 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4930 "when simplifying range test");
4932 if (!INTEGRAL_TYPE_P (type
))
4935 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4936 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4938 /* If this is an OR operation, invert both sides; we will invert
4939 again at the end. */
4941 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4943 /* If both expressions are the same, if we can merge the ranges, and we
4944 can build the range test, return it or it inverted. If one of the
4945 ranges is always true or always false, consider it to be the same
4946 expression as the other. */
4947 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4948 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4950 && 0 != (tem
= (build_range_check (loc
, type
,
4952 : rhs
!= 0 ? rhs
: integer_zero_node
,
4955 if (strict_overflow_p
)
4956 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4957 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4960 /* On machines where the branch cost is expensive, if this is a
4961 short-circuited branch and the underlying object on both sides
4962 is the same, make a non-short-circuit operation. */
4963 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4964 && lhs
!= 0 && rhs
!= 0
4965 && (code
== TRUTH_ANDIF_EXPR
4966 || code
== TRUTH_ORIF_EXPR
)
4967 && operand_equal_p (lhs
, rhs
, 0))
4969 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4970 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4971 which cases we can't do this. */
4972 if (simple_operand_p (lhs
))
4973 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4974 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4977 else if (!lang_hooks
.decls
.global_bindings_p ()
4978 && !CONTAINS_PLACEHOLDER_P (lhs
))
4980 tree common
= save_expr (lhs
);
4982 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4983 or_op
? ! in0_p
: in0_p
,
4985 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4986 or_op
? ! in1_p
: in1_p
,
4989 if (strict_overflow_p
)
4990 fold_overflow_warning (warnmsg
,
4991 WARN_STRICT_OVERFLOW_COMPARISON
);
4992 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5002 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5003 bit value. Arrange things so the extra bits will be set to zero if and
5004 only if C is signed-extended to its full width. If MASK is nonzero,
5005 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5008 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5010 tree type
= TREE_TYPE (c
);
5011 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5014 if (p
== modesize
|| unsignedp
)
5017 /* We work by getting just the sign bit into the low-order bit, then
5018 into the high-order bit, then sign-extend. We then XOR that value
5020 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5022 /* We must use a signed type in order to get an arithmetic right shift.
5023 However, we must also avoid introducing accidental overflows, so that
5024 a subsequent call to integer_zerop will work. Hence we must
5025 do the type conversion here. At this point, the constant is either
5026 zero or one, and the conversion to a signed type can never overflow.
5027 We could get an overflow if this conversion is done anywhere else. */
5028 if (TYPE_UNSIGNED (type
))
5029 temp
= fold_convert (signed_type_for (type
), temp
);
5031 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5032 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5034 temp
= const_binop (BIT_AND_EXPR
, temp
,
5035 fold_convert (TREE_TYPE (c
), mask
));
5036 /* If necessary, convert the type back to match the type of C. */
5037 if (TYPE_UNSIGNED (type
))
5038 temp
= fold_convert (type
, temp
);
5040 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5043 /* For an expression that has the form
5047 we can drop one of the inner expressions and simplify to
5051 LOC is the location of the resulting expression. OP is the inner
5052 logical operation; the left-hand side in the examples above, while CMPOP
5053 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5054 removing a condition that guards another, as in
5055 (A != NULL && A->...) || A == NULL
5056 which we must not transform. If RHS_ONLY is true, only eliminate the
5057 right-most operand of the inner logical operation. */
5060 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5063 tree type
= TREE_TYPE (cmpop
);
5064 enum tree_code code
= TREE_CODE (cmpop
);
5065 enum tree_code truthop_code
= TREE_CODE (op
);
5066 tree lhs
= TREE_OPERAND (op
, 0);
5067 tree rhs
= TREE_OPERAND (op
, 1);
5068 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5069 enum tree_code rhs_code
= TREE_CODE (rhs
);
5070 enum tree_code lhs_code
= TREE_CODE (lhs
);
5071 enum tree_code inv_code
;
5073 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5076 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5079 if (rhs_code
== truthop_code
)
5081 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5082 if (newrhs
!= NULL_TREE
)
5085 rhs_code
= TREE_CODE (rhs
);
5088 if (lhs_code
== truthop_code
&& !rhs_only
)
5090 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5091 if (newlhs
!= NULL_TREE
)
5094 lhs_code
= TREE_CODE (lhs
);
5098 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5099 if (inv_code
== rhs_code
5100 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5101 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5103 if (!rhs_only
&& inv_code
== lhs_code
5104 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5105 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5107 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5108 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5113 /* Find ways of folding logical expressions of LHS and RHS:
5114 Try to merge two comparisons to the same innermost item.
5115 Look for range tests like "ch >= '0' && ch <= '9'".
5116 Look for combinations of simple terms on machines with expensive branches
5117 and evaluate the RHS unconditionally.
5119 For example, if we have p->a == 2 && p->b == 4 and we can make an
5120 object large enough to span both A and B, we can do this with a comparison
5121 against the object ANDed with the a mask.
5123 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5124 operations to do this with one comparison.
5126 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5127 function and the one above.
5129 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5130 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5132 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5135 We return the simplified tree or 0 if no optimization is possible. */
5138 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5141 /* If this is the "or" of two comparisons, we can do something if
5142 the comparisons are NE_EXPR. If this is the "and", we can do something
5143 if the comparisons are EQ_EXPR. I.e.,
5144 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5146 WANTED_CODE is this operation code. For single bit fields, we can
5147 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5148 comparison for one-bit fields. */
5150 enum tree_code wanted_code
;
5151 enum tree_code lcode
, rcode
;
5152 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5153 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5154 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5155 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5156 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5157 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5158 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5159 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5160 machine_mode lnmode
, rnmode
;
5161 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5162 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5163 tree l_const
, r_const
;
5164 tree lntype
, rntype
, result
;
5165 HOST_WIDE_INT first_bit
, end_bit
;
5168 /* Start by getting the comparison codes. Fail if anything is volatile.
5169 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5170 it were surrounded with a NE_EXPR. */
5172 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5175 lcode
= TREE_CODE (lhs
);
5176 rcode
= TREE_CODE (rhs
);
5178 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5180 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5181 build_int_cst (TREE_TYPE (lhs
), 0));
5185 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5187 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5188 build_int_cst (TREE_TYPE (rhs
), 0));
5192 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5193 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5196 ll_arg
= TREE_OPERAND (lhs
, 0);
5197 lr_arg
= TREE_OPERAND (lhs
, 1);
5198 rl_arg
= TREE_OPERAND (rhs
, 0);
5199 rr_arg
= TREE_OPERAND (rhs
, 1);
5201 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5202 if (simple_operand_p (ll_arg
)
5203 && simple_operand_p (lr_arg
))
5205 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5206 && operand_equal_p (lr_arg
, rr_arg
, 0))
5208 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5209 truth_type
, ll_arg
, lr_arg
);
5213 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5214 && operand_equal_p (lr_arg
, rl_arg
, 0))
5216 result
= combine_comparisons (loc
, code
, lcode
,
5217 swap_tree_comparison (rcode
),
5218 truth_type
, ll_arg
, lr_arg
);
5224 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5225 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5227 /* If the RHS can be evaluated unconditionally and its operands are
5228 simple, it wins to evaluate the RHS unconditionally on machines
5229 with expensive branches. In this case, this isn't a comparison
5230 that can be merged. */
5232 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5234 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5235 && simple_operand_p (rl_arg
)
5236 && simple_operand_p (rr_arg
))
5238 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5239 if (code
== TRUTH_OR_EXPR
5240 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5241 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5242 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5244 return build2_loc (loc
, NE_EXPR
, truth_type
,
5245 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5247 build_int_cst (TREE_TYPE (ll_arg
), 0));
5249 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5250 if (code
== TRUTH_AND_EXPR
5251 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5252 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5253 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5254 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5255 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5256 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5258 build_int_cst (TREE_TYPE (ll_arg
), 0));
5261 /* See if the comparisons can be merged. Then get all the parameters for
5264 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5265 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5269 ll_inner
= decode_field_reference (loc
, ll_arg
,
5270 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5271 &ll_unsignedp
, &volatilep
, &ll_mask
,
5273 lr_inner
= decode_field_reference (loc
, lr_arg
,
5274 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5275 &lr_unsignedp
, &volatilep
, &lr_mask
,
5277 rl_inner
= decode_field_reference (loc
, rl_arg
,
5278 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5279 &rl_unsignedp
, &volatilep
, &rl_mask
,
5281 rr_inner
= decode_field_reference (loc
, rr_arg
,
5282 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5283 &rr_unsignedp
, &volatilep
, &rr_mask
,
5286 /* It must be true that the inner operation on the lhs of each
5287 comparison must be the same if we are to be able to do anything.
5288 Then see if we have constants. If not, the same must be true for
5290 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5291 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5294 if (TREE_CODE (lr_arg
) == INTEGER_CST
5295 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5296 l_const
= lr_arg
, r_const
= rr_arg
;
5297 else if (lr_inner
== 0 || rr_inner
== 0
5298 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5301 l_const
= r_const
= 0;
5303 /* If either comparison code is not correct for our logical operation,
5304 fail. However, we can convert a one-bit comparison against zero into
5305 the opposite comparison against that bit being set in the field. */
5307 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5308 if (lcode
!= wanted_code
)
5310 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5312 /* Make the left operand unsigned, since we are only interested
5313 in the value of one bit. Otherwise we are doing the wrong
5322 /* This is analogous to the code for l_const above. */
5323 if (rcode
!= wanted_code
)
5325 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5334 /* See if we can find a mode that contains both fields being compared on
5335 the left. If we can't, fail. Otherwise, update all constants and masks
5336 to be relative to a field of that size. */
5337 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5338 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5339 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5340 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5342 if (lnmode
== VOIDmode
)
5345 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5346 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5347 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5348 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5350 if (BYTES_BIG_ENDIAN
)
5352 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5353 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5356 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5357 size_int (xll_bitpos
));
5358 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5359 size_int (xrl_bitpos
));
5363 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5364 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5365 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5366 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5367 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5370 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5372 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5377 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5378 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5379 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5380 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5381 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5384 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5386 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5390 /* If the right sides are not constant, do the same for it. Also,
5391 disallow this optimization if a size or signedness mismatch occurs
5392 between the left and right sides. */
5395 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5396 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5397 /* Make sure the two fields on the right
5398 correspond to the left without being swapped. */
5399 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5402 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5403 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5404 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5405 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5407 if (rnmode
== VOIDmode
)
5410 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5411 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5412 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5413 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5415 if (BYTES_BIG_ENDIAN
)
5417 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5418 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5421 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5423 size_int (xlr_bitpos
));
5424 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5426 size_int (xrr_bitpos
));
5428 /* Make a mask that corresponds to both fields being compared.
5429 Do this for both items being compared. If the operands are the
5430 same size and the bits being compared are in the same position
5431 then we can do this by masking both and comparing the masked
5433 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5434 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5435 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5437 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5438 ll_unsignedp
|| rl_unsignedp
);
5439 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5440 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5442 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5443 lr_unsignedp
|| rr_unsignedp
);
5444 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5445 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5447 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5450 /* There is still another way we can do something: If both pairs of
5451 fields being compared are adjacent, we may be able to make a wider
5452 field containing them both.
5454 Note that we still must mask the lhs/rhs expressions. Furthermore,
5455 the mask must be shifted to account for the shift done by
5456 make_bit_field_ref. */
5457 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5458 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5459 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5460 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5464 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5465 ll_bitsize
+ rl_bitsize
,
5466 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5467 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5468 lr_bitsize
+ rr_bitsize
,
5469 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5471 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5472 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5473 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5474 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5476 /* Convert to the smaller type before masking out unwanted bits. */
5478 if (lntype
!= rntype
)
5480 if (lnbitsize
> rnbitsize
)
5482 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5483 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5486 else if (lnbitsize
< rnbitsize
)
5488 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5489 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5494 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5495 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5497 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5498 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5500 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5506 /* Handle the case of comparisons with constants. If there is something in
5507 common between the masks, those bits of the constants must be the same.
5508 If not, the condition is always false. Test for this to avoid generating
5509 incorrect code below. */
5510 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5511 if (! integer_zerop (result
)
5512 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5513 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5515 if (wanted_code
== NE_EXPR
)
5517 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5518 return constant_boolean_node (true, truth_type
);
5522 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5523 return constant_boolean_node (false, truth_type
);
5527 /* Construct the expression we will return. First get the component
5528 reference we will make. Unless the mask is all ones the width of
5529 that field, perform the mask operation. Then compare with the
5531 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5532 ll_unsignedp
|| rl_unsignedp
);
5534 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5535 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5536 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5538 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5539 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5542 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5546 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5550 enum tree_code op_code
;
5553 int consts_equal
, consts_lt
;
5556 STRIP_SIGN_NOPS (arg0
);
5558 op_code
= TREE_CODE (arg0
);
5559 minmax_const
= TREE_OPERAND (arg0
, 1);
5560 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5561 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5562 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5563 inner
= TREE_OPERAND (arg0
, 0);
5565 /* If something does not permit us to optimize, return the original tree. */
5566 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5567 || TREE_CODE (comp_const
) != INTEGER_CST
5568 || TREE_OVERFLOW (comp_const
)
5569 || TREE_CODE (minmax_const
) != INTEGER_CST
5570 || TREE_OVERFLOW (minmax_const
))
5573 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5574 and GT_EXPR, doing the rest with recursive calls using logical
5578 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5581 = optimize_minmax_comparison (loc
,
5582 invert_tree_comparison (code
, false),
5585 return invert_truthvalue_loc (loc
, tem
);
5591 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5592 optimize_minmax_comparison
5593 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5594 optimize_minmax_comparison
5595 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5598 if (op_code
== MAX_EXPR
&& consts_equal
)
5599 /* MAX (X, 0) == 0 -> X <= 0 */
5600 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5602 else if (op_code
== MAX_EXPR
&& consts_lt
)
5603 /* MAX (X, 0) == 5 -> X == 5 */
5604 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5606 else if (op_code
== MAX_EXPR
)
5607 /* MAX (X, 0) == -1 -> false */
5608 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5610 else if (consts_equal
)
5611 /* MIN (X, 0) == 0 -> X >= 0 */
5612 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5615 /* MIN (X, 0) == 5 -> false */
5616 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5619 /* MIN (X, 0) == -1 -> X == -1 */
5620 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5623 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5624 /* MAX (X, 0) > 0 -> X > 0
5625 MAX (X, 0) > 5 -> X > 5 */
5626 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5628 else if (op_code
== MAX_EXPR
)
5629 /* MAX (X, 0) > -1 -> true */
5630 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5632 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5633 /* MIN (X, 0) > 0 -> false
5634 MIN (X, 0) > 5 -> false */
5635 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5638 /* MIN (X, 0) > -1 -> X > -1 */
5639 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5646 /* T is an integer expression that is being multiplied, divided, or taken a
5647 modulus (CODE says which and what kind of divide or modulus) by a
5648 constant C. See if we can eliminate that operation by folding it with
5649 other operations already in T. WIDE_TYPE, if non-null, is a type that
5650 should be used for the computation if wider than our type.
5652 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5653 (X * 2) + (Y * 4). We must, however, be assured that either the original
5654 expression would not overflow or that overflow is undefined for the type
5655 in the language in question.
5657 If we return a non-null expression, it is an equivalent form of the
5658 original computation, but need not be in the original type.
5660 We set *STRICT_OVERFLOW_P to true if the return values depends on
5661 signed overflow being undefined. Otherwise we do not change
5662 *STRICT_OVERFLOW_P. */
5665 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5666 bool *strict_overflow_p
)
5668 /* To avoid exponential search depth, refuse to allow recursion past
5669 three levels. Beyond that (1) it's highly unlikely that we'll find
5670 something interesting and (2) we've probably processed it before
5671 when we built the inner expression. */
5680 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5687 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5688 bool *strict_overflow_p
)
5690 tree type
= TREE_TYPE (t
);
5691 enum tree_code tcode
= TREE_CODE (t
);
5692 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5693 > GET_MODE_SIZE (TYPE_MODE (type
)))
5694 ? wide_type
: type
);
5696 int same_p
= tcode
== code
;
5697 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5698 bool sub_strict_overflow_p
;
5700 /* Don't deal with constants of zero here; they confuse the code below. */
5701 if (integer_zerop (c
))
5704 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5705 op0
= TREE_OPERAND (t
, 0);
5707 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5708 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5710 /* Note that we need not handle conditional operations here since fold
5711 already handles those cases. So just do arithmetic here. */
5715 /* For a constant, we can always simplify if we are a multiply
5716 or (for divide and modulus) if it is a multiple of our constant. */
5717 if (code
== MULT_EXPR
5718 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5719 return const_binop (code
, fold_convert (ctype
, t
),
5720 fold_convert (ctype
, c
));
5723 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5724 /* If op0 is an expression ... */
5725 if ((COMPARISON_CLASS_P (op0
)
5726 || UNARY_CLASS_P (op0
)
5727 || BINARY_CLASS_P (op0
)
5728 || VL_EXP_CLASS_P (op0
)
5729 || EXPRESSION_CLASS_P (op0
))
5730 /* ... and has wrapping overflow, and its type is smaller
5731 than ctype, then we cannot pass through as widening. */
5732 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5733 && (TYPE_PRECISION (ctype
)
5734 > TYPE_PRECISION (TREE_TYPE (op0
))))
5735 /* ... or this is a truncation (t is narrower than op0),
5736 then we cannot pass through this narrowing. */
5737 || (TYPE_PRECISION (type
)
5738 < TYPE_PRECISION (TREE_TYPE (op0
)))
5739 /* ... or signedness changes for division or modulus,
5740 then we cannot pass through this conversion. */
5741 || (code
!= MULT_EXPR
5742 && (TYPE_UNSIGNED (ctype
)
5743 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5744 /* ... or has undefined overflow while the converted to
5745 type has not, we cannot do the operation in the inner type
5746 as that would introduce undefined overflow. */
5747 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5748 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5751 /* Pass the constant down and see if we can make a simplification. If
5752 we can, replace this expression with the inner simplification for
5753 possible later conversion to our or some other type. */
5754 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5755 && TREE_CODE (t2
) == INTEGER_CST
5756 && !TREE_OVERFLOW (t2
)
5757 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5759 ? ctype
: NULL_TREE
,
5760 strict_overflow_p
))))
5765 /* If widening the type changes it from signed to unsigned, then we
5766 must avoid building ABS_EXPR itself as unsigned. */
5767 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5769 tree cstype
= (*signed_type_for
) (ctype
);
5770 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5773 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5774 return fold_convert (ctype
, t1
);
5778 /* If the constant is negative, we cannot simplify this. */
5779 if (tree_int_cst_sgn (c
) == -1)
5783 /* For division and modulus, type can't be unsigned, as e.g.
5784 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5785 For signed types, even with wrapping overflow, this is fine. */
5786 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5788 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5790 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5793 case MIN_EXPR
: case MAX_EXPR
:
5794 /* If widening the type changes the signedness, then we can't perform
5795 this optimization as that changes the result. */
5796 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5799 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5800 sub_strict_overflow_p
= false;
5801 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5802 &sub_strict_overflow_p
)) != 0
5803 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5804 &sub_strict_overflow_p
)) != 0)
5806 if (tree_int_cst_sgn (c
) < 0)
5807 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5808 if (sub_strict_overflow_p
)
5809 *strict_overflow_p
= true;
5810 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5811 fold_convert (ctype
, t2
));
5815 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5816 /* If the second operand is constant, this is a multiplication
5817 or floor division, by a power of two, so we can treat it that
5818 way unless the multiplier or divisor overflows. Signed
5819 left-shift overflow is implementation-defined rather than
5820 undefined in C90, so do not convert signed left shift into
5822 if (TREE_CODE (op1
) == INTEGER_CST
5823 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5824 /* const_binop may not detect overflow correctly,
5825 so check for it explicitly here. */
5826 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5827 && 0 != (t1
= fold_convert (ctype
,
5828 const_binop (LSHIFT_EXPR
,
5831 && !TREE_OVERFLOW (t1
))
5832 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5833 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5835 fold_convert (ctype
, op0
),
5837 c
, code
, wide_type
, strict_overflow_p
);
5840 case PLUS_EXPR
: case MINUS_EXPR
:
5841 /* See if we can eliminate the operation on both sides. If we can, we
5842 can return a new PLUS or MINUS. If we can't, the only remaining
5843 cases where we can do anything are if the second operand is a
5845 sub_strict_overflow_p
= false;
5846 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5847 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5848 if (t1
!= 0 && t2
!= 0
5849 && (code
== MULT_EXPR
5850 /* If not multiplication, we can only do this if both operands
5851 are divisible by c. */
5852 || (multiple_of_p (ctype
, op0
, c
)
5853 && multiple_of_p (ctype
, op1
, c
))))
5855 if (sub_strict_overflow_p
)
5856 *strict_overflow_p
= true;
5857 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5858 fold_convert (ctype
, t2
));
5861 /* If this was a subtraction, negate OP1 and set it to be an addition.
5862 This simplifies the logic below. */
5863 if (tcode
== MINUS_EXPR
)
5865 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5866 /* If OP1 was not easily negatable, the constant may be OP0. */
5867 if (TREE_CODE (op0
) == INTEGER_CST
)
5878 if (TREE_CODE (op1
) != INTEGER_CST
)
5881 /* If either OP1 or C are negative, this optimization is not safe for
5882 some of the division and remainder types while for others we need
5883 to change the code. */
5884 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5886 if (code
== CEIL_DIV_EXPR
)
5887 code
= FLOOR_DIV_EXPR
;
5888 else if (code
== FLOOR_DIV_EXPR
)
5889 code
= CEIL_DIV_EXPR
;
5890 else if (code
!= MULT_EXPR
5891 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5895 /* If it's a multiply or a division/modulus operation of a multiple
5896 of our constant, do the operation and verify it doesn't overflow. */
5897 if (code
== MULT_EXPR
5898 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5900 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5901 fold_convert (ctype
, c
));
5902 /* We allow the constant to overflow with wrapping semantics. */
5904 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5910 /* If we have an unsigned type, we cannot widen the operation since it
5911 will change the result if the original computation overflowed. */
5912 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5915 /* If we were able to eliminate our operation from the first side,
5916 apply our operation to the second side and reform the PLUS. */
5917 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5918 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5920 /* The last case is if we are a multiply. In that case, we can
5921 apply the distributive law to commute the multiply and addition
5922 if the multiplication of the constants doesn't overflow
5923 and overflow is defined. With undefined overflow
5924 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5925 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5926 return fold_build2 (tcode
, ctype
,
5927 fold_build2 (code
, ctype
,
5928 fold_convert (ctype
, op0
),
5929 fold_convert (ctype
, c
)),
5935 /* We have a special case here if we are doing something like
5936 (C * 8) % 4 since we know that's zero. */
5937 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5938 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5939 /* If the multiplication can overflow we cannot optimize this. */
5940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5941 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5942 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5944 *strict_overflow_p
= true;
5945 return omit_one_operand (type
, integer_zero_node
, op0
);
5948 /* ... fall through ... */
5950 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5951 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5952 /* If we can extract our operation from the LHS, do so and return a
5953 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5954 do something only if the second operand is a constant. */
5956 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5957 strict_overflow_p
)) != 0)
5958 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5959 fold_convert (ctype
, op1
));
5960 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5961 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5962 strict_overflow_p
)) != 0)
5963 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5964 fold_convert (ctype
, t1
));
5965 else if (TREE_CODE (op1
) != INTEGER_CST
)
5968 /* If these are the same operation types, we can associate them
5969 assuming no overflow. */
5972 bool overflow_p
= false;
5973 bool overflow_mul_p
;
5974 signop sign
= TYPE_SIGN (ctype
);
5975 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5976 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5978 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5981 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5982 wide_int_to_tree (ctype
, mul
));
5985 /* If these operations "cancel" each other, we have the main
5986 optimizations of this pass, which occur when either constant is a
5987 multiple of the other, in which case we replace this with either an
5988 operation or CODE or TCODE.
5990 If we have an unsigned type, we cannot do this since it will change
5991 the result if the original computation overflowed. */
5992 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5993 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5994 || (tcode
== MULT_EXPR
5995 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5996 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5997 && code
!= MULT_EXPR
)))
5999 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6001 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6002 *strict_overflow_p
= true;
6003 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6004 fold_convert (ctype
,
6005 const_binop (TRUNC_DIV_EXPR
,
6008 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6010 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6011 *strict_overflow_p
= true;
6012 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6013 fold_convert (ctype
,
6014 const_binop (TRUNC_DIV_EXPR
,
6027 /* Return a node which has the indicated constant VALUE (either 0 or
6028 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6029 and is of the indicated TYPE. */
6032 constant_boolean_node (bool value
, tree type
)
6034 if (type
== integer_type_node
)
6035 return value
? integer_one_node
: integer_zero_node
;
6036 else if (type
== boolean_type_node
)
6037 return value
? boolean_true_node
: boolean_false_node
;
6038 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6039 return build_vector_from_val (type
,
6040 build_int_cst (TREE_TYPE (type
),
6043 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6047 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6048 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6049 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6050 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6051 COND is the first argument to CODE; otherwise (as in the example
6052 given here), it is the second argument. TYPE is the type of the
6053 original expression. Return NULL_TREE if no simplification is
6057 fold_binary_op_with_conditional_arg (location_t loc
,
6058 enum tree_code code
,
6059 tree type
, tree op0
, tree op1
,
6060 tree cond
, tree arg
, int cond_first_p
)
6062 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6063 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6064 tree test
, true_value
, false_value
;
6065 tree lhs
= NULL_TREE
;
6066 tree rhs
= NULL_TREE
;
6067 enum tree_code cond_code
= COND_EXPR
;
6069 if (TREE_CODE (cond
) == COND_EXPR
6070 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6072 test
= TREE_OPERAND (cond
, 0);
6073 true_value
= TREE_OPERAND (cond
, 1);
6074 false_value
= TREE_OPERAND (cond
, 2);
6075 /* If this operand throws an expression, then it does not make
6076 sense to try to perform a logical or arithmetic operation
6078 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6080 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6085 tree testtype
= TREE_TYPE (cond
);
6087 true_value
= constant_boolean_node (true, testtype
);
6088 false_value
= constant_boolean_node (false, testtype
);
6091 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6092 cond_code
= VEC_COND_EXPR
;
6094 /* This transformation is only worthwhile if we don't have to wrap ARG
6095 in a SAVE_EXPR and the operation can be simplified without recursing
6096 on at least one of the branches once its pushed inside the COND_EXPR. */
6097 if (!TREE_CONSTANT (arg
)
6098 && (TREE_SIDE_EFFECTS (arg
)
6099 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6100 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6103 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6106 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6108 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6110 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6114 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6116 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6118 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6121 /* Check that we have simplified at least one of the branches. */
6122 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6125 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6129 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6131 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6132 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6133 ADDEND is the same as X.
6135 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6136 and finite. The problematic cases are when X is zero, and its mode
6137 has signed zeros. In the case of rounding towards -infinity,
6138 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6139 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6142 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6144 if (!real_zerop (addend
))
6147 /* Don't allow the fold with -fsignaling-nans. */
6148 if (HONOR_SNANS (TYPE_MODE (type
)))
6151 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6152 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6155 /* In a vector or complex, we would need to check the sign of all zeros. */
6156 if (TREE_CODE (addend
) != REAL_CST
)
6159 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6160 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6163 /* The mode has signed zeros, and we have to honor their sign.
6164 In this situation, there is only one case we can return true for.
6165 X - 0 is the same as X unless rounding towards -infinity is
6167 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6170 /* Subroutine of fold() that checks comparisons of built-in math
6171 functions against real constants.
6173 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6174 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6175 is the type of the result and ARG0 and ARG1 are the operands of the
6176 comparison. ARG1 must be a TREE_REAL_CST.
6178 The function returns the constant folded tree if a simplification
6179 can be made, and NULL_TREE otherwise. */
6182 fold_mathfn_compare (location_t loc
,
6183 enum built_in_function fcode
, enum tree_code code
,
6184 tree type
, tree arg0
, tree arg1
)
6188 if (BUILTIN_SQRT_P (fcode
))
6190 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6191 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6193 c
= TREE_REAL_CST (arg1
);
6194 if (REAL_VALUE_NEGATIVE (c
))
6196 /* sqrt(x) < y is always false, if y is negative. */
6197 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6198 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6200 /* sqrt(x) > y is always true, if y is negative and we
6201 don't care about NaNs, i.e. negative values of x. */
6202 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6203 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6205 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6206 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6207 build_real (TREE_TYPE (arg
), dconst0
));
6209 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6213 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6214 real_convert (&c2
, mode
, &c2
);
6216 if (REAL_VALUE_ISINF (c2
))
6218 /* sqrt(x) > y is x == +Inf, when y is very large. */
6219 if (HONOR_INFINITIES (mode
))
6220 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6221 build_real (TREE_TYPE (arg
), c2
));
6223 /* sqrt(x) > y is always false, when y is very large
6224 and we don't care about infinities. */
6225 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6228 /* sqrt(x) > c is the same as x > c*c. */
6229 return fold_build2_loc (loc
, code
, type
, arg
,
6230 build_real (TREE_TYPE (arg
), c2
));
6232 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6236 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6237 real_convert (&c2
, mode
, &c2
);
6239 if (REAL_VALUE_ISINF (c2
))
6241 /* sqrt(x) < y is always true, when y is a very large
6242 value and we don't care about NaNs or Infinities. */
6243 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6244 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6246 /* sqrt(x) < y is x != +Inf when y is very large and we
6247 don't care about NaNs. */
6248 if (! HONOR_NANS (mode
))
6249 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6250 build_real (TREE_TYPE (arg
), c2
));
6252 /* sqrt(x) < y is x >= 0 when y is very large and we
6253 don't care about Infinities. */
6254 if (! HONOR_INFINITIES (mode
))
6255 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6256 build_real (TREE_TYPE (arg
), dconst0
));
6258 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6259 arg
= save_expr (arg
);
6260 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6261 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6262 build_real (TREE_TYPE (arg
),
6264 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6265 build_real (TREE_TYPE (arg
),
6269 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6270 if (! HONOR_NANS (mode
))
6271 return fold_build2_loc (loc
, code
, type
, arg
,
6272 build_real (TREE_TYPE (arg
), c2
));
6274 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6275 arg
= save_expr (arg
);
6276 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6277 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6278 build_real (TREE_TYPE (arg
),
6280 fold_build2_loc (loc
, code
, type
, arg
,
6281 build_real (TREE_TYPE (arg
),
6289 /* Subroutine of fold() that optimizes comparisons against Infinities,
6290 either +Inf or -Inf.
6292 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6293 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6294 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6296 The function returns the constant folded tree if a simplification
6297 can be made, and NULL_TREE otherwise. */
6300 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6301 tree arg0
, tree arg1
)
6304 REAL_VALUE_TYPE max
;
6308 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6310 /* For negative infinity swap the sense of the comparison. */
6311 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6313 code
= swap_tree_comparison (code
);
6318 /* x > +Inf is always false, if with ignore sNANs. */
6319 if (HONOR_SNANS (mode
))
6321 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6324 /* x <= +Inf is always true, if we don't case about NaNs. */
6325 if (! HONOR_NANS (mode
))
6326 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6328 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6329 arg0
= save_expr (arg0
);
6330 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6334 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6335 real_maxval (&max
, neg
, mode
);
6336 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6337 arg0
, build_real (TREE_TYPE (arg0
), max
));
6340 /* x < +Inf is always equal to x <= DBL_MAX. */
6341 real_maxval (&max
, neg
, mode
);
6342 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6343 arg0
, build_real (TREE_TYPE (arg0
), max
));
6346 /* x != +Inf is always equal to !(x > DBL_MAX). */
6347 real_maxval (&max
, neg
, mode
);
6348 if (! HONOR_NANS (mode
))
6349 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6350 arg0
, build_real (TREE_TYPE (arg0
), max
));
6352 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6353 arg0
, build_real (TREE_TYPE (arg0
), max
));
6354 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6363 /* Subroutine of fold() that optimizes comparisons of a division by
6364 a nonzero integer constant against an integer constant, i.e.
6367 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6368 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6369 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6371 The function returns the constant folded tree if a simplification
6372 can be made, and NULL_TREE otherwise. */
6375 fold_div_compare (location_t loc
,
6376 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6378 tree prod
, tmp
, hi
, lo
;
6379 tree arg00
= TREE_OPERAND (arg0
, 0);
6380 tree arg01
= TREE_OPERAND (arg0
, 1);
6381 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6382 bool neg_overflow
= false;
6385 /* We have to do this the hard way to detect unsigned overflow.
6386 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6387 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6388 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6389 neg_overflow
= false;
6391 if (sign
== UNSIGNED
)
6393 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6394 build_int_cst (TREE_TYPE (arg01
), 1));
6397 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6398 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6399 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6400 -1, overflow
| TREE_OVERFLOW (prod
));
6402 else if (tree_int_cst_sgn (arg01
) >= 0)
6404 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6405 build_int_cst (TREE_TYPE (arg01
), 1));
6406 switch (tree_int_cst_sgn (arg1
))
6409 neg_overflow
= true;
6410 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6415 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6420 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6430 /* A negative divisor reverses the relational operators. */
6431 code
= swap_tree_comparison (code
);
6433 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6434 build_int_cst (TREE_TYPE (arg01
), 1));
6435 switch (tree_int_cst_sgn (arg1
))
6438 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6443 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6448 neg_overflow
= true;
6449 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6461 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6462 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6463 if (TREE_OVERFLOW (hi
))
6464 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6465 if (TREE_OVERFLOW (lo
))
6466 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6467 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6470 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6471 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6472 if (TREE_OVERFLOW (hi
))
6473 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6474 if (TREE_OVERFLOW (lo
))
6475 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6476 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6479 if (TREE_OVERFLOW (lo
))
6481 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6482 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6484 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6487 if (TREE_OVERFLOW (hi
))
6489 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6490 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6492 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6495 if (TREE_OVERFLOW (hi
))
6497 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6498 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6500 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6503 if (TREE_OVERFLOW (lo
))
6505 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6506 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6508 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6518 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6519 equality/inequality test, then return a simplified form of the test
6520 using a sign testing. Otherwise return NULL. TYPE is the desired
6524 fold_single_bit_test_into_sign_test (location_t loc
,
6525 enum tree_code code
, tree arg0
, tree arg1
,
6528 /* If this is testing a single bit, we can optimize the test. */
6529 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6530 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6531 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6533 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6534 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6535 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6537 if (arg00
!= NULL_TREE
6538 /* This is only a win if casting to a signed type is cheap,
6539 i.e. when arg00's type is not a partial mode. */
6540 && TYPE_PRECISION (TREE_TYPE (arg00
))
6541 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6543 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6544 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6546 fold_convert_loc (loc
, stype
, arg00
),
6547 build_int_cst (stype
, 0));
6554 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6555 equality/inequality test, then return a simplified form of
6556 the test using shifts and logical operations. Otherwise return
6557 NULL. TYPE is the desired result type. */
6560 fold_single_bit_test (location_t loc
, enum tree_code code
,
6561 tree arg0
, tree arg1
, tree result_type
)
6563 /* If this is testing a single bit, we can optimize the test. */
6564 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6565 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6566 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6568 tree inner
= TREE_OPERAND (arg0
, 0);
6569 tree type
= TREE_TYPE (arg0
);
6570 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6571 machine_mode operand_mode
= TYPE_MODE (type
);
6573 tree signed_type
, unsigned_type
, intermediate_type
;
6576 /* First, see if we can fold the single bit test into a sign-bit
6578 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6583 /* Otherwise we have (A & C) != 0 where C is a single bit,
6584 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6585 Similarly for (A & C) == 0. */
6587 /* If INNER is a right shift of a constant and it plus BITNUM does
6588 not overflow, adjust BITNUM and INNER. */
6589 if (TREE_CODE (inner
) == RSHIFT_EXPR
6590 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6591 && bitnum
< TYPE_PRECISION (type
)
6592 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6593 TYPE_PRECISION (type
) - bitnum
))
6595 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6596 inner
= TREE_OPERAND (inner
, 0);
6599 /* If we are going to be able to omit the AND below, we must do our
6600 operations as unsigned. If we must use the AND, we have a choice.
6601 Normally unsigned is faster, but for some machines signed is. */
6602 #ifdef LOAD_EXTEND_OP
6603 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6604 && !flag_syntax_only
) ? 0 : 1;
6609 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6610 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6611 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6612 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6615 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6616 inner
, size_int (bitnum
));
6618 one
= build_int_cst (intermediate_type
, 1);
6620 if (code
== EQ_EXPR
)
6621 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6623 /* Put the AND last so it can combine with more things. */
6624 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6626 /* Make sure to return the proper type. */
6627 inner
= fold_convert_loc (loc
, result_type
, inner
);
6634 /* Check whether we are allowed to reorder operands arg0 and arg1,
6635 such that the evaluation of arg1 occurs before arg0. */
6638 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6640 if (! flag_evaluation_order
)
6642 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6644 return ! TREE_SIDE_EFFECTS (arg0
)
6645 && ! TREE_SIDE_EFFECTS (arg1
);
6648 /* Test whether it is preferable two swap two operands, ARG0 and
6649 ARG1, for example because ARG0 is an integer constant and ARG1
6650 isn't. If REORDER is true, only recommend swapping if we can
6651 evaluate the operands in reverse order. */
6654 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6656 if (CONSTANT_CLASS_P (arg1
))
6658 if (CONSTANT_CLASS_P (arg0
))
6661 STRIP_SIGN_NOPS (arg0
);
6662 STRIP_SIGN_NOPS (arg1
);
6664 if (TREE_CONSTANT (arg1
))
6666 if (TREE_CONSTANT (arg0
))
6669 if (reorder
&& flag_evaluation_order
6670 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6673 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6674 for commutative and comparison operators. Ensuring a canonical
6675 form allows the optimizers to find additional redundancies without
6676 having to explicitly check for both orderings. */
6677 if (TREE_CODE (arg0
) == SSA_NAME
6678 && TREE_CODE (arg1
) == SSA_NAME
6679 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6682 /* Put SSA_NAMEs last. */
6683 if (TREE_CODE (arg1
) == SSA_NAME
)
6685 if (TREE_CODE (arg0
) == SSA_NAME
)
6688 /* Put variables last. */
6697 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6698 ARG0 is extended to a wider type. */
6701 fold_widened_comparison (location_t loc
, enum tree_code code
,
6702 tree type
, tree arg0
, tree arg1
)
6704 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6706 tree shorter_type
, outer_type
;
6710 if (arg0_unw
== arg0
)
6712 shorter_type
= TREE_TYPE (arg0_unw
);
6714 #ifdef HAVE_canonicalize_funcptr_for_compare
6715 /* Disable this optimization if we're casting a function pointer
6716 type on targets that require function pointer canonicalization. */
6717 if (HAVE_canonicalize_funcptr_for_compare
6718 && TREE_CODE (shorter_type
) == POINTER_TYPE
6719 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6723 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6726 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6728 /* If possible, express the comparison in the shorter mode. */
6729 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6730 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6731 && (TREE_TYPE (arg1_unw
) == shorter_type
6732 || ((TYPE_PRECISION (shorter_type
)
6733 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6734 && (TYPE_UNSIGNED (shorter_type
)
6735 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6736 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6737 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6738 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6739 && int_fits_type_p (arg1_unw
, shorter_type
))))
6740 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6741 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6743 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6744 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6745 || !int_fits_type_p (arg1_unw
, shorter_type
))
6748 /* If we are comparing with the integer that does not fit into the range
6749 of the shorter type, the result is known. */
6750 outer_type
= TREE_TYPE (arg1_unw
);
6751 min
= lower_bound_in_type (outer_type
, shorter_type
);
6752 max
= upper_bound_in_type (outer_type
, shorter_type
);
6754 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6756 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6763 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6768 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6774 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6776 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6781 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6783 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6792 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6793 ARG0 just the signedness is changed. */
6796 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6797 tree arg0
, tree arg1
)
6800 tree inner_type
, outer_type
;
6802 if (!CONVERT_EXPR_P (arg0
))
6805 outer_type
= TREE_TYPE (arg0
);
6806 arg0_inner
= TREE_OPERAND (arg0
, 0);
6807 inner_type
= TREE_TYPE (arg0_inner
);
6809 #ifdef HAVE_canonicalize_funcptr_for_compare
6810 /* Disable this optimization if we're casting a function pointer
6811 type on targets that require function pointer canonicalization. */
6812 if (HAVE_canonicalize_funcptr_for_compare
6813 && TREE_CODE (inner_type
) == POINTER_TYPE
6814 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6818 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6821 if (TREE_CODE (arg1
) != INTEGER_CST
6822 && !(CONVERT_EXPR_P (arg1
)
6823 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6826 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6831 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6834 if (TREE_CODE (arg1
) == INTEGER_CST
)
6835 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6836 TREE_OVERFLOW (arg1
));
6838 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6840 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6844 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6845 means A >= Y && A != MAX, but in this case we know that
6846 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6849 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6851 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6853 if (TREE_CODE (bound
) == LT_EXPR
)
6854 a
= TREE_OPERAND (bound
, 0);
6855 else if (TREE_CODE (bound
) == GT_EXPR
)
6856 a
= TREE_OPERAND (bound
, 1);
6860 typea
= TREE_TYPE (a
);
6861 if (!INTEGRAL_TYPE_P (typea
)
6862 && !POINTER_TYPE_P (typea
))
6865 if (TREE_CODE (ineq
) == LT_EXPR
)
6867 a1
= TREE_OPERAND (ineq
, 1);
6868 y
= TREE_OPERAND (ineq
, 0);
6870 else if (TREE_CODE (ineq
) == GT_EXPR
)
6872 a1
= TREE_OPERAND (ineq
, 0);
6873 y
= TREE_OPERAND (ineq
, 1);
6878 if (TREE_TYPE (a1
) != typea
)
6881 if (POINTER_TYPE_P (typea
))
6883 /* Convert the pointer types into integer before taking the difference. */
6884 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6885 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6886 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6889 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6891 if (!diff
|| !integer_onep (diff
))
6894 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6897 /* Fold a sum or difference of at least one multiplication.
6898 Returns the folded tree or NULL if no simplification could be made. */
6901 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6902 tree arg0
, tree arg1
)
6904 tree arg00
, arg01
, arg10
, arg11
;
6905 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6907 /* (A * C) +- (B * C) -> (A+-B) * C.
6908 (A * C) +- A -> A * (C+-1).
6909 We are most concerned about the case where C is a constant,
6910 but other combinations show up during loop reduction. Since
6911 it is not difficult, try all four possibilities. */
6913 if (TREE_CODE (arg0
) == MULT_EXPR
)
6915 arg00
= TREE_OPERAND (arg0
, 0);
6916 arg01
= TREE_OPERAND (arg0
, 1);
6918 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6920 arg00
= build_one_cst (type
);
6925 /* We cannot generate constant 1 for fract. */
6926 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6929 arg01
= build_one_cst (type
);
6931 if (TREE_CODE (arg1
) == MULT_EXPR
)
6933 arg10
= TREE_OPERAND (arg1
, 0);
6934 arg11
= TREE_OPERAND (arg1
, 1);
6936 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6938 arg10
= build_one_cst (type
);
6939 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6940 the purpose of this canonicalization. */
6941 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6942 && negate_expr_p (arg1
)
6943 && code
== PLUS_EXPR
)
6945 arg11
= negate_expr (arg1
);
6953 /* We cannot generate constant 1 for fract. */
6954 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6957 arg11
= build_one_cst (type
);
6961 if (operand_equal_p (arg01
, arg11
, 0))
6962 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6963 else if (operand_equal_p (arg00
, arg10
, 0))
6964 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6965 else if (operand_equal_p (arg00
, arg11
, 0))
6966 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6967 else if (operand_equal_p (arg01
, arg10
, 0))
6968 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6970 /* No identical multiplicands; see if we can find a common
6971 power-of-two factor in non-power-of-two multiplies. This
6972 can help in multi-dimensional array access. */
6973 else if (tree_fits_shwi_p (arg01
)
6974 && tree_fits_shwi_p (arg11
))
6976 HOST_WIDE_INT int01
, int11
, tmp
;
6979 int01
= tree_to_shwi (arg01
);
6980 int11
= tree_to_shwi (arg11
);
6982 /* Move min of absolute values to int11. */
6983 if (absu_hwi (int01
) < absu_hwi (int11
))
6985 tmp
= int01
, int01
= int11
, int11
= tmp
;
6986 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6993 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6994 /* The remainder should not be a constant, otherwise we
6995 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6996 increased the number of multiplications necessary. */
6997 && TREE_CODE (arg10
) != INTEGER_CST
)
6999 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7000 build_int_cst (TREE_TYPE (arg00
),
7005 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7010 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7011 fold_build2_loc (loc
, code
, type
,
7012 fold_convert_loc (loc
, type
, alt0
),
7013 fold_convert_loc (loc
, type
, alt1
)),
7014 fold_convert_loc (loc
, type
, same
));
7019 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7020 specified by EXPR into the buffer PTR of length LEN bytes.
7021 Return the number of bytes placed in the buffer, or zero
7025 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7027 tree type
= TREE_TYPE (expr
);
7028 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7029 int byte
, offset
, word
, words
;
7030 unsigned char value
;
7032 if ((off
== -1 && total_bytes
> len
)
7033 || off
>= total_bytes
)
7037 words
= total_bytes
/ UNITS_PER_WORD
;
7039 for (byte
= 0; byte
< total_bytes
; byte
++)
7041 int bitpos
= byte
* BITS_PER_UNIT
;
7042 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7044 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7046 if (total_bytes
> UNITS_PER_WORD
)
7048 word
= byte
/ UNITS_PER_WORD
;
7049 if (WORDS_BIG_ENDIAN
)
7050 word
= (words
- 1) - word
;
7051 offset
= word
* UNITS_PER_WORD
;
7052 if (BYTES_BIG_ENDIAN
)
7053 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7055 offset
+= byte
% UNITS_PER_WORD
;
7058 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7060 && offset
- off
< len
)
7061 ptr
[offset
- off
] = value
;
7063 return MIN (len
, total_bytes
- off
);
7067 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7068 specified by EXPR into the buffer PTR of length LEN bytes.
7069 Return the number of bytes placed in the buffer, or zero
7073 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7075 tree type
= TREE_TYPE (expr
);
7076 machine_mode mode
= TYPE_MODE (type
);
7077 int total_bytes
= GET_MODE_SIZE (mode
);
7078 FIXED_VALUE_TYPE value
;
7079 tree i_value
, i_type
;
7081 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7084 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7086 if (NULL_TREE
== i_type
7087 || TYPE_PRECISION (i_type
) != total_bytes
)
7090 value
= TREE_FIXED_CST (expr
);
7091 i_value
= double_int_to_tree (i_type
, value
.data
);
7093 return native_encode_int (i_value
, ptr
, len
, off
);
7097 /* Subroutine of native_encode_expr. Encode the REAL_CST
7098 specified by EXPR into the buffer PTR of length LEN bytes.
7099 Return the number of bytes placed in the buffer, or zero
7103 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7105 tree type
= TREE_TYPE (expr
);
7106 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7107 int byte
, offset
, word
, words
, bitpos
;
7108 unsigned char value
;
7110 /* There are always 32 bits in each long, no matter the size of
7111 the hosts long. We handle floating point representations with
7115 if ((off
== -1 && total_bytes
> len
)
7116 || off
>= total_bytes
)
7120 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7122 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7124 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7125 bitpos
+= BITS_PER_UNIT
)
7127 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7128 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7130 if (UNITS_PER_WORD
< 4)
7132 word
= byte
/ UNITS_PER_WORD
;
7133 if (WORDS_BIG_ENDIAN
)
7134 word
= (words
- 1) - word
;
7135 offset
= word
* UNITS_PER_WORD
;
7136 if (BYTES_BIG_ENDIAN
)
7137 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7139 offset
+= byte
% UNITS_PER_WORD
;
7142 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7143 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7145 && offset
- off
< len
)
7146 ptr
[offset
- off
] = value
;
7148 return MIN (len
, total_bytes
- off
);
7151 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7157 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7162 part
= TREE_REALPART (expr
);
7163 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7167 part
= TREE_IMAGPART (expr
);
7169 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7170 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7174 return rsize
+ isize
;
7178 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7179 specified by EXPR into the buffer PTR of length LEN bytes.
7180 Return the number of bytes placed in the buffer, or zero
7184 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7191 count
= VECTOR_CST_NELTS (expr
);
7192 itype
= TREE_TYPE (TREE_TYPE (expr
));
7193 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7194 for (i
= 0; i
< count
; i
++)
7201 elem
= VECTOR_CST_ELT (expr
, i
);
7202 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7203 if ((off
== -1 && res
!= size
)
7216 /* Subroutine of native_encode_expr. Encode the STRING_CST
7217 specified by EXPR into the buffer PTR of length LEN bytes.
7218 Return the number of bytes placed in the buffer, or zero
7222 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7224 tree type
= TREE_TYPE (expr
);
7225 HOST_WIDE_INT total_bytes
;
7227 if (TREE_CODE (type
) != ARRAY_TYPE
7228 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7229 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7230 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7232 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7233 if ((off
== -1 && total_bytes
> len
)
7234 || off
>= total_bytes
)
7238 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7241 if (off
< TREE_STRING_LENGTH (expr
))
7243 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7244 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7246 memset (ptr
+ written
, 0,
7247 MIN (total_bytes
- written
, len
- written
));
7250 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7251 return MIN (total_bytes
- off
, len
);
7255 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7256 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7257 buffer PTR of length LEN bytes. If OFF is not -1 then start
7258 the encoding at byte offset OFF and encode at most LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero upon failure. */
7262 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7264 switch (TREE_CODE (expr
))
7267 return native_encode_int (expr
, ptr
, len
, off
);
7270 return native_encode_real (expr
, ptr
, len
, off
);
7273 return native_encode_fixed (expr
, ptr
, len
, off
);
7276 return native_encode_complex (expr
, ptr
, len
, off
);
7279 return native_encode_vector (expr
, ptr
, len
, off
);
7282 return native_encode_string (expr
, ptr
, len
, off
);
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7295 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7297 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7299 if (total_bytes
> len
7300 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7303 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7305 return wide_int_to_tree (type
, result
);
7309 /* Subroutine of native_interpret_expr. Interpret the contents of
7310 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7311 If the buffer cannot be interpreted, return NULL_TREE. */
7314 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7316 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7318 FIXED_VALUE_TYPE fixed_value
;
7320 if (total_bytes
> len
7321 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7324 result
= double_int::from_buffer (ptr
, total_bytes
);
7325 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7327 return build_fixed (type
, fixed_value
);
7331 /* Subroutine of native_interpret_expr. Interpret the contents of
7332 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7333 If the buffer cannot be interpreted, return NULL_TREE. */
7336 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7338 machine_mode mode
= TYPE_MODE (type
);
7339 int total_bytes
= GET_MODE_SIZE (mode
);
7340 int byte
, offset
, word
, words
, bitpos
;
7341 unsigned char value
;
7342 /* There are always 32 bits in each long, no matter the size of
7343 the hosts long. We handle floating point representations with
7348 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7349 if (total_bytes
> len
|| total_bytes
> 24)
7351 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7353 memset (tmp
, 0, sizeof (tmp
));
7354 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7355 bitpos
+= BITS_PER_UNIT
)
7357 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7358 if (UNITS_PER_WORD
< 4)
7360 word
= byte
/ UNITS_PER_WORD
;
7361 if (WORDS_BIG_ENDIAN
)
7362 word
= (words
- 1) - word
;
7363 offset
= word
* UNITS_PER_WORD
;
7364 if (BYTES_BIG_ENDIAN
)
7365 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7367 offset
+= byte
% UNITS_PER_WORD
;
7370 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7371 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7373 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7376 real_from_target (&r
, tmp
, mode
);
7377 return build_real (type
, r
);
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7386 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7388 tree etype
, rpart
, ipart
;
7391 etype
= TREE_TYPE (type
);
7392 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7395 rpart
= native_interpret_expr (etype
, ptr
, size
);
7398 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7401 return build_complex (type
, rpart
, ipart
);
7405 /* Subroutine of native_interpret_expr. Interpret the contents of
7406 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7407 If the buffer cannot be interpreted, return NULL_TREE. */
7410 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7416 etype
= TREE_TYPE (type
);
7417 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7418 count
= TYPE_VECTOR_SUBPARTS (type
);
7419 if (size
* count
> len
)
7422 elements
= XALLOCAVEC (tree
, count
);
7423 for (i
= count
- 1; i
>= 0; i
--)
7425 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7430 return build_vector (type
, elements
);
7434 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a constant of type TYPE. For
7436 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7437 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7438 return NULL_TREE. */
7441 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7443 switch (TREE_CODE (type
))
7449 case REFERENCE_TYPE
:
7450 return native_interpret_int (type
, ptr
, len
);
7453 return native_interpret_real (type
, ptr
, len
);
7455 case FIXED_POINT_TYPE
:
7456 return native_interpret_fixed (type
, ptr
, len
);
7459 return native_interpret_complex (type
, ptr
, len
);
7462 return native_interpret_vector (type
, ptr
, len
);
7469 /* Returns true if we can interpret the contents of a native encoding
7473 can_native_interpret_type_p (tree type
)
7475 switch (TREE_CODE (type
))
7481 case REFERENCE_TYPE
:
7482 case FIXED_POINT_TYPE
:
7492 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7493 TYPE at compile-time. If we're unable to perform the conversion
7494 return NULL_TREE. */
7497 fold_view_convert_expr (tree type
, tree expr
)
7499 /* We support up to 512-bit values (for V8DFmode). */
7500 unsigned char buffer
[64];
7503 /* Check that the host and target are sane. */
7504 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7507 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7511 return native_interpret_expr (type
, buffer
, len
);
7514 /* Build an expression for the address of T. Folds away INDIRECT_REF
7515 to avoid confusing the gimplify process. */
7518 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7520 /* The size of the object is not relevant when talking about its address. */
7521 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7522 t
= TREE_OPERAND (t
, 0);
7524 if (TREE_CODE (t
) == INDIRECT_REF
)
7526 t
= TREE_OPERAND (t
, 0);
7528 if (TREE_TYPE (t
) != ptrtype
)
7529 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7531 else if (TREE_CODE (t
) == MEM_REF
7532 && integer_zerop (TREE_OPERAND (t
, 1)))
7533 return TREE_OPERAND (t
, 0);
7534 else if (TREE_CODE (t
) == MEM_REF
7535 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7536 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7537 TREE_OPERAND (t
, 0),
7538 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7539 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7541 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7543 if (TREE_TYPE (t
) != ptrtype
)
7544 t
= fold_convert_loc (loc
, ptrtype
, t
);
7547 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7552 /* Build an expression for the address of T. */
7555 build_fold_addr_expr_loc (location_t loc
, tree t
)
7557 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7559 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7562 static bool vec_cst_ctor_to_array (tree
, tree
*);
7564 /* Fold a unary expression of code CODE and type TYPE with operand
7565 OP0. Return the folded expression if folding is successful.
7566 Otherwise, return NULL_TREE. */
7569 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7573 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7575 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7576 && TREE_CODE_LENGTH (code
) == 1);
7578 tem
= generic_simplify (loc
, code
, type
, op0
);
7585 if (CONVERT_EXPR_CODE_P (code
)
7586 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7588 /* Don't use STRIP_NOPS, because signedness of argument type
7590 STRIP_SIGN_NOPS (arg0
);
7594 /* Strip any conversions that don't change the mode. This
7595 is safe for every expression, except for a comparison
7596 expression because its signedness is derived from its
7599 Note that this is done as an internal manipulation within
7600 the constant folder, in order to find the simplest
7601 representation of the arguments so that their form can be
7602 studied. In any cases, the appropriate type conversions
7603 should be put back in the tree that will get out of the
7609 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7611 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7612 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7613 fold_build1_loc (loc
, code
, type
,
7614 fold_convert_loc (loc
, TREE_TYPE (op0
),
7615 TREE_OPERAND (arg0
, 1))));
7616 else if (TREE_CODE (arg0
) == COND_EXPR
)
7618 tree arg01
= TREE_OPERAND (arg0
, 1);
7619 tree arg02
= TREE_OPERAND (arg0
, 2);
7620 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7621 arg01
= fold_build1_loc (loc
, code
, type
,
7622 fold_convert_loc (loc
,
7623 TREE_TYPE (op0
), arg01
));
7624 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7625 arg02
= fold_build1_loc (loc
, code
, type
,
7626 fold_convert_loc (loc
,
7627 TREE_TYPE (op0
), arg02
));
7628 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7631 /* If this was a conversion, and all we did was to move into
7632 inside the COND_EXPR, bring it back out. But leave it if
7633 it is a conversion from integer to integer and the
7634 result precision is no wider than a word since such a
7635 conversion is cheap and may be optimized away by combine,
7636 while it couldn't if it were outside the COND_EXPR. Then return
7637 so we don't get into an infinite recursion loop taking the
7638 conversion out and then back in. */
7640 if ((CONVERT_EXPR_CODE_P (code
)
7641 || code
== NON_LVALUE_EXPR
)
7642 && TREE_CODE (tem
) == COND_EXPR
7643 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7644 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7646 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7647 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7648 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7649 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7651 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7652 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7653 || flag_syntax_only
))
7654 tem
= build1_loc (loc
, code
, type
,
7656 TREE_TYPE (TREE_OPERAND
7657 (TREE_OPERAND (tem
, 1), 0)),
7658 TREE_OPERAND (tem
, 0),
7659 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7660 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7668 case NON_LVALUE_EXPR
:
7669 if (!maybe_lvalue_p (op0
))
7670 return fold_convert_loc (loc
, type
, op0
);
7675 case FIX_TRUNC_EXPR
:
7676 if (COMPARISON_CLASS_P (op0
))
7678 /* If we have (type) (a CMP b) and type is an integral type, return
7679 new expression involving the new type. Canonicalize
7680 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7682 Do not fold the result as that would not simplify further, also
7683 folding again results in recursions. */
7684 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7685 return build2_loc (loc
, TREE_CODE (op0
), type
,
7686 TREE_OPERAND (op0
, 0),
7687 TREE_OPERAND (op0
, 1));
7688 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7689 && TREE_CODE (type
) != VECTOR_TYPE
)
7690 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7691 constant_boolean_node (true, type
),
7692 constant_boolean_node (false, type
));
7695 /* Handle (T *)&A.B.C for A being of type T and B and C
7696 living at offset zero. This occurs frequently in
7697 C++ upcasting and then accessing the base. */
7698 if (TREE_CODE (op0
) == ADDR_EXPR
7699 && POINTER_TYPE_P (type
)
7700 && handled_component_p (TREE_OPERAND (op0
, 0)))
7702 HOST_WIDE_INT bitsize
, bitpos
;
7705 int unsignedp
, volatilep
;
7706 tree base
= TREE_OPERAND (op0
, 0);
7707 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7708 &mode
, &unsignedp
, &volatilep
, false);
7709 /* If the reference was to a (constant) zero offset, we can use
7710 the address of the base if it has the same base type
7711 as the result type and the pointer type is unqualified. */
7712 if (! offset
&& bitpos
== 0
7713 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7714 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7715 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7716 return fold_convert_loc (loc
, type
,
7717 build_fold_addr_expr_loc (loc
, base
));
7720 if (TREE_CODE (op0
) == MODIFY_EXPR
7721 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7722 /* Detect assigning a bitfield. */
7723 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7725 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7727 /* Don't leave an assignment inside a conversion
7728 unless assigning a bitfield. */
7729 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7730 /* First do the assignment, then return converted constant. */
7731 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7732 TREE_NO_WARNING (tem
) = 1;
7733 TREE_USED (tem
) = 1;
7737 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7738 constants (if x has signed type, the sign bit cannot be set
7739 in c). This folds extension into the BIT_AND_EXPR.
7740 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7741 very likely don't have maximal range for their precision and this
7742 transformation effectively doesn't preserve non-maximal ranges. */
7743 if (TREE_CODE (type
) == INTEGER_TYPE
7744 && TREE_CODE (op0
) == BIT_AND_EXPR
7745 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7747 tree and_expr
= op0
;
7748 tree and0
= TREE_OPERAND (and_expr
, 0);
7749 tree and1
= TREE_OPERAND (and_expr
, 1);
7752 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7753 || (TYPE_PRECISION (type
)
7754 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7756 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7757 <= HOST_BITS_PER_WIDE_INT
7758 && tree_fits_uhwi_p (and1
))
7760 unsigned HOST_WIDE_INT cst
;
7762 cst
= tree_to_uhwi (and1
);
7763 cst
&= HOST_WIDE_INT_M1U
7764 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7765 change
= (cst
== 0);
7766 #ifdef LOAD_EXTEND_OP
7768 && !flag_syntax_only
7769 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7772 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7773 and0
= fold_convert_loc (loc
, uns
, and0
);
7774 and1
= fold_convert_loc (loc
, uns
, and1
);
7780 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7781 TREE_OVERFLOW (and1
));
7782 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7783 fold_convert_loc (loc
, type
, and0
), tem
);
7787 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7788 when one of the new casts will fold away. Conservatively we assume
7789 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7790 if (POINTER_TYPE_P (type
)
7791 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7792 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7793 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7794 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7795 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7797 tree arg00
= TREE_OPERAND (arg0
, 0);
7798 tree arg01
= TREE_OPERAND (arg0
, 1);
7800 return fold_build_pointer_plus_loc
7801 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7804 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7805 of the same precision, and X is an integer type not narrower than
7806 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7807 if (INTEGRAL_TYPE_P (type
)
7808 && TREE_CODE (op0
) == BIT_NOT_EXPR
7809 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7810 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7811 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7813 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7814 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7815 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7816 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7817 fold_convert_loc (loc
, type
, tem
));
7820 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7821 type of X and Y (integer types only). */
7822 if (INTEGRAL_TYPE_P (type
)
7823 && TREE_CODE (op0
) == MULT_EXPR
7824 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7825 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7827 /* Be careful not to introduce new overflows. */
7829 if (TYPE_OVERFLOW_WRAPS (type
))
7832 mult_type
= unsigned_type_for (type
);
7834 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7836 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7837 fold_convert_loc (loc
, mult_type
,
7838 TREE_OPERAND (op0
, 0)),
7839 fold_convert_loc (loc
, mult_type
,
7840 TREE_OPERAND (op0
, 1)));
7841 return fold_convert_loc (loc
, type
, tem
);
7845 tem
= fold_convert_const (code
, type
, arg0
);
7846 return tem
? tem
: NULL_TREE
;
7848 case ADDR_SPACE_CONVERT_EXPR
:
7849 if (integer_zerop (arg0
))
7850 return fold_convert_const (code
, type
, arg0
);
7853 case FIXED_CONVERT_EXPR
:
7854 tem
= fold_convert_const (code
, type
, arg0
);
7855 return tem
? tem
: NULL_TREE
;
7857 case VIEW_CONVERT_EXPR
:
7858 if (TREE_CODE (op0
) == MEM_REF
)
7859 return fold_build2_loc (loc
, MEM_REF
, type
,
7860 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7862 return fold_view_convert_expr (type
, op0
);
7865 tem
= fold_negate_expr (loc
, arg0
);
7867 return fold_convert_loc (loc
, type
, tem
);
7871 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7872 return fold_abs_const (arg0
, type
);
7873 /* Convert fabs((double)float) into (double)fabsf(float). */
7874 else if (TREE_CODE (arg0
) == NOP_EXPR
7875 && TREE_CODE (type
) == REAL_TYPE
)
7877 tree targ0
= strip_float_extensions (arg0
);
7879 return fold_convert_loc (loc
, type
,
7880 fold_build1_loc (loc
, ABS_EXPR
,
7884 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7885 else if (TREE_CODE (arg0
) == ABS_EXPR
)
7888 /* Strip sign ops from argument. */
7889 if (TREE_CODE (type
) == REAL_TYPE
)
7891 tem
= fold_strip_sign_ops (arg0
);
7893 return fold_build1_loc (loc
, ABS_EXPR
, type
,
7894 fold_convert_loc (loc
, type
, tem
));
7899 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7900 return fold_convert_loc (loc
, type
, arg0
);
7901 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7903 tree itype
= TREE_TYPE (type
);
7904 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
7905 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
7906 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
7907 negate_expr (ipart
));
7909 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7911 tree itype
= TREE_TYPE (type
);
7912 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
7913 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
7914 return build_complex (type
, rpart
, negate_expr (ipart
));
7916 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7917 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
7921 if (TREE_CODE (arg0
) == INTEGER_CST
)
7922 return fold_not_const (arg0
, type
);
7923 /* Convert ~ (-A) to A - 1. */
7924 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7925 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
7926 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
7927 build_int_cst (type
, 1));
7928 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7929 else if (INTEGRAL_TYPE_P (type
)
7930 && ((TREE_CODE (arg0
) == MINUS_EXPR
7931 && integer_onep (TREE_OPERAND (arg0
, 1)))
7932 || (TREE_CODE (arg0
) == PLUS_EXPR
7933 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7934 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
7935 fold_convert_loc (loc
, type
,
7936 TREE_OPERAND (arg0
, 0)));
7937 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7938 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7939 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7940 fold_convert_loc (loc
, type
,
7941 TREE_OPERAND (arg0
, 0)))))
7942 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7943 fold_convert_loc (loc
, type
,
7944 TREE_OPERAND (arg0
, 1)));
7945 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7946 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7947 fold_convert_loc (loc
, type
,
7948 TREE_OPERAND (arg0
, 1)))))
7949 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7950 fold_convert_loc (loc
, type
,
7951 TREE_OPERAND (arg0
, 0)), tem
);
7952 /* Perform BIT_NOT_EXPR on each element individually. */
7953 else if (TREE_CODE (arg0
) == VECTOR_CST
)
7957 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
7959 elements
= XALLOCAVEC (tree
, count
);
7960 for (i
= 0; i
< count
; i
++)
7962 elem
= VECTOR_CST_ELT (arg0
, i
);
7963 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
7964 if (elem
== NULL_TREE
)
7969 return build_vector (type
, elements
);
7971 else if (COMPARISON_CLASS_P (arg0
)
7972 && (VECTOR_TYPE_P (type
)
7973 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
7975 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
7976 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
7977 HONOR_NANS (TYPE_MODE (op_type
)));
7978 if (subcode
!= ERROR_MARK
)
7979 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
7980 TREE_OPERAND (arg0
, 1));
7986 case TRUTH_NOT_EXPR
:
7987 /* Note that the operand of this must be an int
7988 and its values must be 0 or 1.
7989 ("true" is a fixed value perhaps depending on the language,
7990 but we don't handle values other than 1 correctly yet.) */
7991 tem
= fold_truth_not_expr (loc
, arg0
);
7994 return fold_convert_loc (loc
, type
, tem
);
7997 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7998 return fold_convert_loc (loc
, type
, arg0
);
7999 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8000 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8001 TREE_OPERAND (arg0
, 1));
8002 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8003 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8004 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8006 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8007 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8008 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8009 TREE_OPERAND (arg0
, 0)),
8010 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8011 TREE_OPERAND (arg0
, 1)));
8012 return fold_convert_loc (loc
, type
, tem
);
8014 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8016 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8017 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8018 TREE_OPERAND (arg0
, 0));
8019 return fold_convert_loc (loc
, type
, tem
);
8021 if (TREE_CODE (arg0
) == CALL_EXPR
)
8023 tree fn
= get_callee_fndecl (arg0
);
8024 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8025 switch (DECL_FUNCTION_CODE (fn
))
8027 CASE_FLT_FN (BUILT_IN_CEXPI
):
8028 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8030 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8040 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8041 return build_zero_cst (type
);
8042 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8043 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8044 TREE_OPERAND (arg0
, 0));
8045 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8046 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8047 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8049 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8050 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8051 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8052 TREE_OPERAND (arg0
, 0)),
8053 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8054 TREE_OPERAND (arg0
, 1)));
8055 return fold_convert_loc (loc
, type
, tem
);
8057 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8059 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8060 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8061 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8063 if (TREE_CODE (arg0
) == CALL_EXPR
)
8065 tree fn
= get_callee_fndecl (arg0
);
8066 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8067 switch (DECL_FUNCTION_CODE (fn
))
8069 CASE_FLT_FN (BUILT_IN_CEXPI
):
8070 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8072 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8082 /* Fold *&X to X if X is an lvalue. */
8083 if (TREE_CODE (op0
) == ADDR_EXPR
)
8085 tree op00
= TREE_OPERAND (op0
, 0);
8086 if ((TREE_CODE (op00
) == VAR_DECL
8087 || TREE_CODE (op00
) == PARM_DECL
8088 || TREE_CODE (op00
) == RESULT_DECL
)
8089 && !TREE_READONLY (op00
))
8094 case VEC_UNPACK_LO_EXPR
:
8095 case VEC_UNPACK_HI_EXPR
:
8096 case VEC_UNPACK_FLOAT_LO_EXPR
:
8097 case VEC_UNPACK_FLOAT_HI_EXPR
:
8099 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8101 enum tree_code subcode
;
8103 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8104 if (TREE_CODE (arg0
) != VECTOR_CST
)
8107 elts
= XALLOCAVEC (tree
, nelts
* 2);
8108 if (!vec_cst_ctor_to_array (arg0
, elts
))
8111 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8112 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8115 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8118 subcode
= FLOAT_EXPR
;
8120 for (i
= 0; i
< nelts
; i
++)
8122 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8123 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8127 return build_vector (type
, elts
);
8130 case REDUC_MIN_EXPR
:
8131 case REDUC_MAX_EXPR
:
8132 case REDUC_PLUS_EXPR
:
8134 unsigned int nelts
, i
;
8136 enum tree_code subcode
;
8138 if (TREE_CODE (op0
) != VECTOR_CST
)
8140 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0
));
8142 elts
= XALLOCAVEC (tree
, nelts
);
8143 if (!vec_cst_ctor_to_array (op0
, elts
))
8148 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8149 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8150 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8151 default: gcc_unreachable ();
8154 for (i
= 1; i
< nelts
; i
++)
8156 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8157 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8166 } /* switch (code) */
8170 /* If the operation was a conversion do _not_ mark a resulting constant
8171 with TREE_OVERFLOW if the original constant was not. These conversions
8172 have implementation defined behavior and retaining the TREE_OVERFLOW
8173 flag here would confuse later passes such as VRP. */
8175 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8176 tree type
, tree op0
)
8178 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8180 && TREE_CODE (res
) == INTEGER_CST
8181 && TREE_CODE (op0
) == INTEGER_CST
8182 && CONVERT_EXPR_CODE_P (code
))
8183 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8188 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8189 operands OP0 and OP1. LOC is the location of the resulting expression.
8190 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8191 Return the folded expression if folding is successful. Otherwise,
8192 return NULL_TREE. */
8194 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8195 tree arg0
, tree arg1
, tree op0
, tree op1
)
8199 /* We only do these simplifications if we are optimizing. */
8203 /* Check for things like (A || B) && (A || C). We can convert this
8204 to A || (B && C). Note that either operator can be any of the four
8205 truth and/or operations and the transformation will still be
8206 valid. Also note that we only care about order for the
8207 ANDIF and ORIF operators. If B contains side effects, this
8208 might change the truth-value of A. */
8209 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8210 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8211 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8212 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8213 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8214 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8216 tree a00
= TREE_OPERAND (arg0
, 0);
8217 tree a01
= TREE_OPERAND (arg0
, 1);
8218 tree a10
= TREE_OPERAND (arg1
, 0);
8219 tree a11
= TREE_OPERAND (arg1
, 1);
8220 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8221 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8222 && (code
== TRUTH_AND_EXPR
8223 || code
== TRUTH_OR_EXPR
));
8225 if (operand_equal_p (a00
, a10
, 0))
8226 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8227 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8228 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8229 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8230 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8231 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8232 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8233 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8235 /* This case if tricky because we must either have commutative
8236 operators or else A10 must not have side-effects. */
8238 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8239 && operand_equal_p (a01
, a11
, 0))
8240 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8241 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8245 /* See if we can build a range comparison. */
8246 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8249 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8250 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8252 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8254 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8257 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8258 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8260 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8262 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8265 /* Check for the possibility of merging component references. If our
8266 lhs is another similar operation, try to merge its rhs with our
8267 rhs. Then try to merge our lhs and rhs. */
8268 if (TREE_CODE (arg0
) == code
8269 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8270 TREE_OPERAND (arg0
, 1), arg1
)))
8271 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8273 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8276 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8277 && (code
== TRUTH_AND_EXPR
8278 || code
== TRUTH_ANDIF_EXPR
8279 || code
== TRUTH_OR_EXPR
8280 || code
== TRUTH_ORIF_EXPR
))
8282 enum tree_code ncode
, icode
;
8284 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8285 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8286 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8288 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8289 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8290 We don't want to pack more than two leafs to a non-IF AND/OR
8292 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8293 equal to IF-CODE, then we don't want to add right-hand operand.
8294 If the inner right-hand side of left-hand operand has
8295 side-effects, or isn't simple, then we can't add to it,
8296 as otherwise we might destroy if-sequence. */
8297 if (TREE_CODE (arg0
) == icode
8298 && simple_operand_p_2 (arg1
)
8299 /* Needed for sequence points to handle trappings, and
8301 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8303 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8305 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8308 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8309 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8310 else if (TREE_CODE (arg1
) == icode
8311 && simple_operand_p_2 (arg0
)
8312 /* Needed for sequence points to handle trappings, and
8314 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8316 tem
= fold_build2_loc (loc
, ncode
, type
,
8317 arg0
, TREE_OPERAND (arg1
, 0));
8318 return fold_build2_loc (loc
, icode
, type
, tem
,
8319 TREE_OPERAND (arg1
, 1));
8321 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8323 For sequence point consistancy, we need to check for trapping,
8324 and side-effects. */
8325 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8326 && simple_operand_p_2 (arg1
))
8327 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8333 /* Fold a binary expression of code CODE and type TYPE with operands
8334 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8335 Return the folded expression if folding is successful. Otherwise,
8336 return NULL_TREE. */
8339 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8341 enum tree_code compl_code
;
8343 if (code
== MIN_EXPR
)
8344 compl_code
= MAX_EXPR
;
8345 else if (code
== MAX_EXPR
)
8346 compl_code
= MIN_EXPR
;
8350 /* MIN (MAX (a, b), b) == b. */
8351 if (TREE_CODE (op0
) == compl_code
8352 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8353 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8355 /* MIN (MAX (b, a), b) == b. */
8356 if (TREE_CODE (op0
) == compl_code
8357 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8358 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8359 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8361 /* MIN (a, MAX (a, b)) == a. */
8362 if (TREE_CODE (op1
) == compl_code
8363 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8364 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8365 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8367 /* MIN (a, MAX (b, a)) == a. */
8368 if (TREE_CODE (op1
) == compl_code
8369 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8370 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8371 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8376 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8377 by changing CODE to reduce the magnitude of constants involved in
8378 ARG0 of the comparison.
8379 Returns a canonicalized comparison tree if a simplification was
8380 possible, otherwise returns NULL_TREE.
8381 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8382 valid if signed overflow is undefined. */
8385 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8386 tree arg0
, tree arg1
,
8387 bool *strict_overflow_p
)
8389 enum tree_code code0
= TREE_CODE (arg0
);
8390 tree t
, cst0
= NULL_TREE
;
8394 /* Match A +- CST code arg1 and CST code arg1. We can change the
8395 first form only if overflow is undefined. */
8396 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8397 /* In principle pointers also have undefined overflow behavior,
8398 but that causes problems elsewhere. */
8399 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8400 && (code0
== MINUS_EXPR
8401 || code0
== PLUS_EXPR
)
8402 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8403 || code0
== INTEGER_CST
))
8406 /* Identify the constant in arg0 and its sign. */
8407 if (code0
== INTEGER_CST
)
8410 cst0
= TREE_OPERAND (arg0
, 1);
8411 sgn0
= tree_int_cst_sgn (cst0
);
8413 /* Overflowed constants and zero will cause problems. */
8414 if (integer_zerop (cst0
)
8415 || TREE_OVERFLOW (cst0
))
8418 /* See if we can reduce the magnitude of the constant in
8419 arg0 by changing the comparison code. */
8420 if (code0
== INTEGER_CST
)
8422 /* CST <= arg1 -> CST-1 < arg1. */
8423 if (code
== LE_EXPR
&& sgn0
== 1)
8425 /* -CST < arg1 -> -CST-1 <= arg1. */
8426 else if (code
== LT_EXPR
&& sgn0
== -1)
8428 /* CST > arg1 -> CST-1 >= arg1. */
8429 else if (code
== GT_EXPR
&& sgn0
== 1)
8431 /* -CST >= arg1 -> -CST-1 > arg1. */
8432 else if (code
== GE_EXPR
&& sgn0
== -1)
8436 /* arg1 code' CST' might be more canonical. */
8441 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8443 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8445 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8446 else if (code
== GT_EXPR
8447 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8449 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8450 else if (code
== LE_EXPR
8451 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8453 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8454 else if (code
== GE_EXPR
8455 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8459 *strict_overflow_p
= true;
8462 /* Now build the constant reduced in magnitude. But not if that
8463 would produce one outside of its types range. */
8464 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8466 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8467 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8469 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8470 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8471 /* We cannot swap the comparison here as that would cause us to
8472 endlessly recurse. */
8475 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8476 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8477 if (code0
!= INTEGER_CST
)
8478 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8479 t
= fold_convert (TREE_TYPE (arg1
), t
);
8481 /* If swapping might yield to a more canonical form, do so. */
8483 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8485 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8488 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8489 overflow further. Try to decrease the magnitude of constants involved
8490 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8491 and put sole constants at the second argument position.
8492 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8495 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8496 tree arg0
, tree arg1
)
8499 bool strict_overflow_p
;
8500 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8501 "when reducing constant in comparison");
8503 /* Try canonicalization by simplifying arg0. */
8504 strict_overflow_p
= false;
8505 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8506 &strict_overflow_p
);
8509 if (strict_overflow_p
)
8510 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8514 /* Try canonicalization by simplifying arg1 using the swapped
8516 code
= swap_tree_comparison (code
);
8517 strict_overflow_p
= false;
8518 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8519 &strict_overflow_p
);
8520 if (t
&& strict_overflow_p
)
8521 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8525 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8526 space. This is used to avoid issuing overflow warnings for
8527 expressions like &p->x which can not wrap. */
8530 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8532 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8539 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8540 if (offset
== NULL_TREE
)
8541 wi_offset
= wi::zero (precision
);
8542 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8548 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8549 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8553 if (!wi::fits_uhwi_p (total
))
8556 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8560 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8562 if (TREE_CODE (base
) == ADDR_EXPR
)
8564 HOST_WIDE_INT base_size
;
8566 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8567 if (base_size
> 0 && size
< base_size
)
8571 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8574 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8575 kind INTEGER_CST. This makes sure to properly sign-extend the
8578 static HOST_WIDE_INT
8579 size_low_cst (const_tree t
)
8581 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8582 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8583 if (prec
< HOST_BITS_PER_WIDE_INT
)
8584 return sext_hwi (w
, prec
);
8588 /* Subroutine of fold_binary. This routine performs all of the
8589 transformations that are common to the equality/inequality
8590 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8591 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8592 fold_binary should call fold_binary. Fold a comparison with
8593 tree code CODE and type TYPE with operands OP0 and OP1. Return
8594 the folded comparison or NULL_TREE. */
8597 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8600 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8601 tree arg0
, arg1
, tem
;
8606 STRIP_SIGN_NOPS (arg0
);
8607 STRIP_SIGN_NOPS (arg1
);
8609 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8610 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8611 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8612 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8613 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8614 && TREE_CODE (arg1
) == INTEGER_CST
8615 && !TREE_OVERFLOW (arg1
))
8617 const enum tree_code
8618 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8619 tree const1
= TREE_OPERAND (arg0
, 1);
8620 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8621 tree variable
= TREE_OPERAND (arg0
, 0);
8622 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8624 /* If the constant operation overflowed this can be
8625 simplified as a comparison against INT_MAX/INT_MIN. */
8626 if (TREE_OVERFLOW (new_const
)
8627 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
8629 int const1_sgn
= tree_int_cst_sgn (const1
);
8630 enum tree_code code2
= code
;
8632 /* Get the sign of the constant on the lhs if the
8633 operation were VARIABLE + CONST1. */
8634 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8635 const1_sgn
= -const1_sgn
;
8637 /* The sign of the constant determines if we overflowed
8638 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8639 Canonicalize to the INT_MIN overflow by swapping the comparison
8641 if (const1_sgn
== -1)
8642 code2
= swap_tree_comparison (code
);
8644 /* We now can look at the canonicalized case
8645 VARIABLE + 1 CODE2 INT_MIN
8646 and decide on the result. */
8653 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8659 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8668 fold_overflow_warning ("assuming signed overflow does not occur "
8669 "when changing X +- C1 cmp C2 to "
8671 WARN_STRICT_OVERFLOW_COMPARISON
);
8672 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8676 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8677 if (TREE_CODE (arg0
) == MINUS_EXPR
8679 && integer_zerop (arg1
))
8681 /* ??? The transformation is valid for the other operators if overflow
8682 is undefined for the type, but performing it here badly interacts
8683 with the transformation in fold_cond_expr_with_comparison which
8684 attempts to synthetize ABS_EXPR. */
8686 fold_overflow_warning ("assuming signed overflow does not occur "
8687 "when changing X - Y cmp 0 to X cmp Y",
8688 WARN_STRICT_OVERFLOW_COMPARISON
);
8689 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8690 TREE_OPERAND (arg0
, 1));
8693 /* For comparisons of pointers we can decompose it to a compile time
8694 comparison of the base objects and the offsets into the object.
8695 This requires at least one operand being an ADDR_EXPR or a
8696 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8697 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8698 && (TREE_CODE (arg0
) == ADDR_EXPR
8699 || TREE_CODE (arg1
) == ADDR_EXPR
8700 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8701 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8703 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8704 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8706 int volatilep
, unsignedp
;
8707 bool indirect_base0
= false, indirect_base1
= false;
8709 /* Get base and offset for the access. Strip ADDR_EXPR for
8710 get_inner_reference, but put it back by stripping INDIRECT_REF
8711 off the base object if possible. indirect_baseN will be true
8712 if baseN is not an address but refers to the object itself. */
8714 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8716 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8717 &bitsize
, &bitpos0
, &offset0
, &mode
,
8718 &unsignedp
, &volatilep
, false);
8719 if (TREE_CODE (base0
) == INDIRECT_REF
)
8720 base0
= TREE_OPERAND (base0
, 0);
8722 indirect_base0
= true;
8724 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8726 base0
= TREE_OPERAND (arg0
, 0);
8727 STRIP_SIGN_NOPS (base0
);
8728 if (TREE_CODE (base0
) == ADDR_EXPR
)
8730 base0
= TREE_OPERAND (base0
, 0);
8731 indirect_base0
= true;
8733 offset0
= TREE_OPERAND (arg0
, 1);
8734 if (tree_fits_shwi_p (offset0
))
8736 HOST_WIDE_INT off
= size_low_cst (offset0
);
8737 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8739 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8741 bitpos0
= off
* BITS_PER_UNIT
;
8742 offset0
= NULL_TREE
;
8748 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8750 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8751 &bitsize
, &bitpos1
, &offset1
, &mode
,
8752 &unsignedp
, &volatilep
, false);
8753 if (TREE_CODE (base1
) == INDIRECT_REF
)
8754 base1
= TREE_OPERAND (base1
, 0);
8756 indirect_base1
= true;
8758 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8760 base1
= TREE_OPERAND (arg1
, 0);
8761 STRIP_SIGN_NOPS (base1
);
8762 if (TREE_CODE (base1
) == ADDR_EXPR
)
8764 base1
= TREE_OPERAND (base1
, 0);
8765 indirect_base1
= true;
8767 offset1
= TREE_OPERAND (arg1
, 1);
8768 if (tree_fits_shwi_p (offset1
))
8770 HOST_WIDE_INT off
= size_low_cst (offset1
);
8771 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8773 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8775 bitpos1
= off
* BITS_PER_UNIT
;
8776 offset1
= NULL_TREE
;
8781 /* A local variable can never be pointed to by
8782 the default SSA name of an incoming parameter. */
8783 if ((TREE_CODE (arg0
) == ADDR_EXPR
8785 && TREE_CODE (base0
) == VAR_DECL
8786 && auto_var_in_fn_p (base0
, current_function_decl
)
8788 && TREE_CODE (base1
) == SSA_NAME
8789 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8790 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8791 || (TREE_CODE (arg1
) == ADDR_EXPR
8793 && TREE_CODE (base1
) == VAR_DECL
8794 && auto_var_in_fn_p (base1
, current_function_decl
)
8796 && TREE_CODE (base0
) == SSA_NAME
8797 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8798 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8800 if (code
== NE_EXPR
)
8801 return constant_boolean_node (1, type
);
8802 else if (code
== EQ_EXPR
)
8803 return constant_boolean_node (0, type
);
8805 /* If we have equivalent bases we might be able to simplify. */
8806 else if (indirect_base0
== indirect_base1
8807 && operand_equal_p (base0
, base1
, 0))
8809 /* We can fold this expression to a constant if the non-constant
8810 offset parts are equal. */
8811 if ((offset0
== offset1
8812 || (offset0
&& offset1
8813 && operand_equal_p (offset0
, offset1
, 0)))
8816 || (indirect_base0
&& DECL_P (base0
))
8817 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8821 && bitpos0
!= bitpos1
8822 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8823 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8824 fold_overflow_warning (("assuming pointer wraparound does not "
8825 "occur when comparing P +- C1 with "
8827 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8832 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8834 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8836 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8838 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8840 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8842 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8846 /* We can simplify the comparison to a comparison of the variable
8847 offset parts if the constant offset parts are equal.
8848 Be careful to use signed sizetype here because otherwise we
8849 mess with array offsets in the wrong way. This is possible
8850 because pointer arithmetic is restricted to retain within an
8851 object and overflow on pointer differences is undefined as of
8852 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8853 else if (bitpos0
== bitpos1
8855 || (indirect_base0
&& DECL_P (base0
))
8856 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8858 /* By converting to signed sizetype we cover middle-end pointer
8859 arithmetic which operates on unsigned pointer types of size
8860 type size and ARRAY_REF offsets which are properly sign or
8861 zero extended from their type in case it is narrower than
8863 if (offset0
== NULL_TREE
)
8864 offset0
= build_int_cst (ssizetype
, 0);
8866 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8867 if (offset1
== NULL_TREE
)
8868 offset1
= build_int_cst (ssizetype
, 0);
8870 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8873 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8874 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8875 fold_overflow_warning (("assuming pointer wraparound does not "
8876 "occur when comparing P +- C1 with "
8878 WARN_STRICT_OVERFLOW_COMPARISON
);
8880 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8883 /* For non-equal bases we can simplify if they are addresses
8884 of local binding decls or constants. */
8885 else if (indirect_base0
&& indirect_base1
8886 /* We know that !operand_equal_p (base0, base1, 0)
8887 because the if condition was false. But make
8888 sure two decls are not the same. */
8890 && TREE_CODE (arg0
) == ADDR_EXPR
8891 && TREE_CODE (arg1
) == ADDR_EXPR
8892 && (((TREE_CODE (base0
) == VAR_DECL
8893 || TREE_CODE (base0
) == PARM_DECL
)
8894 && (targetm
.binds_local_p (base0
)
8895 || CONSTANT_CLASS_P (base1
)))
8896 || CONSTANT_CLASS_P (base0
))
8897 && (((TREE_CODE (base1
) == VAR_DECL
8898 || TREE_CODE (base1
) == PARM_DECL
)
8899 && (targetm
.binds_local_p (base1
)
8900 || CONSTANT_CLASS_P (base0
)))
8901 || CONSTANT_CLASS_P (base1
)))
8903 if (code
== EQ_EXPR
)
8904 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8906 else if (code
== NE_EXPR
)
8907 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8910 /* For equal offsets we can simplify to a comparison of the
8912 else if (bitpos0
== bitpos1
8914 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8916 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8917 && ((offset0
== offset1
)
8918 || (offset0
&& offset1
8919 && operand_equal_p (offset0
, offset1
, 0))))
8922 base0
= build_fold_addr_expr_loc (loc
, base0
);
8924 base1
= build_fold_addr_expr_loc (loc
, base1
);
8925 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8929 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8930 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8931 the resulting offset is smaller in absolute value than the
8932 original one and has the same sign. */
8933 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8934 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8935 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8936 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8937 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8938 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8939 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8941 tree const1
= TREE_OPERAND (arg0
, 1);
8942 tree const2
= TREE_OPERAND (arg1
, 1);
8943 tree variable1
= TREE_OPERAND (arg0
, 0);
8944 tree variable2
= TREE_OPERAND (arg1
, 0);
8946 const char * const warnmsg
= G_("assuming signed overflow does not "
8947 "occur when combining constants around "
8950 /* Put the constant on the side where it doesn't overflow and is
8951 of lower absolute value and of same sign than before. */
8952 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8953 ? MINUS_EXPR
: PLUS_EXPR
,
8955 if (!TREE_OVERFLOW (cst
)
8956 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8957 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8959 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8960 return fold_build2_loc (loc
, code
, type
,
8962 fold_build2_loc (loc
, TREE_CODE (arg1
),
8967 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8968 ? MINUS_EXPR
: PLUS_EXPR
,
8970 if (!TREE_OVERFLOW (cst
)
8971 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8972 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8974 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8975 return fold_build2_loc (loc
, code
, type
,
8976 fold_build2_loc (loc
, TREE_CODE (arg0
),
8983 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8984 signed arithmetic case. That form is created by the compiler
8985 often enough for folding it to be of value. One example is in
8986 computing loop trip counts after Operator Strength Reduction. */
8987 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8988 && TREE_CODE (arg0
) == MULT_EXPR
8989 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8990 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8991 && integer_zerop (arg1
))
8993 tree const1
= TREE_OPERAND (arg0
, 1);
8994 tree const2
= arg1
; /* zero */
8995 tree variable1
= TREE_OPERAND (arg0
, 0);
8996 enum tree_code cmp_code
= code
;
8998 /* Handle unfolded multiplication by zero. */
8999 if (integer_zerop (const1
))
9000 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9002 fold_overflow_warning (("assuming signed overflow does not occur when "
9003 "eliminating multiplication in comparison "
9005 WARN_STRICT_OVERFLOW_COMPARISON
);
9007 /* If const1 is negative we swap the sense of the comparison. */
9008 if (tree_int_cst_sgn (const1
) < 0)
9009 cmp_code
= swap_tree_comparison (cmp_code
);
9011 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9014 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9018 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9020 tree targ0
= strip_float_extensions (arg0
);
9021 tree targ1
= strip_float_extensions (arg1
);
9022 tree newtype
= TREE_TYPE (targ0
);
9024 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9025 newtype
= TREE_TYPE (targ1
);
9027 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9028 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9029 return fold_build2_loc (loc
, code
, type
,
9030 fold_convert_loc (loc
, newtype
, targ0
),
9031 fold_convert_loc (loc
, newtype
, targ1
));
9033 /* (-a) CMP (-b) -> b CMP a */
9034 if (TREE_CODE (arg0
) == NEGATE_EXPR
9035 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9036 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9037 TREE_OPERAND (arg0
, 0));
9039 if (TREE_CODE (arg1
) == REAL_CST
)
9041 REAL_VALUE_TYPE cst
;
9042 cst
= TREE_REAL_CST (arg1
);
9044 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9045 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9046 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9047 TREE_OPERAND (arg0
, 0),
9048 build_real (TREE_TYPE (arg1
),
9049 real_value_negate (&cst
)));
9051 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9052 /* a CMP (-0) -> a CMP 0 */
9053 if (REAL_VALUE_MINUS_ZERO (cst
))
9054 return fold_build2_loc (loc
, code
, type
, arg0
,
9055 build_real (TREE_TYPE (arg1
), dconst0
));
9057 /* x != NaN is always true, other ops are always false. */
9058 if (REAL_VALUE_ISNAN (cst
)
9059 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9061 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9062 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9065 /* Fold comparisons against infinity. */
9066 if (REAL_VALUE_ISINF (cst
)
9067 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9069 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9070 if (tem
!= NULL_TREE
)
9075 /* If this is a comparison of a real constant with a PLUS_EXPR
9076 or a MINUS_EXPR of a real constant, we can convert it into a
9077 comparison with a revised real constant as long as no overflow
9078 occurs when unsafe_math_optimizations are enabled. */
9079 if (flag_unsafe_math_optimizations
9080 && TREE_CODE (arg1
) == REAL_CST
9081 && (TREE_CODE (arg0
) == PLUS_EXPR
9082 || TREE_CODE (arg0
) == MINUS_EXPR
)
9083 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9084 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9085 ? MINUS_EXPR
: PLUS_EXPR
,
9086 arg1
, TREE_OPERAND (arg0
, 1)))
9087 && !TREE_OVERFLOW (tem
))
9088 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9090 /* Likewise, we can simplify a comparison of a real constant with
9091 a MINUS_EXPR whose first operand is also a real constant, i.e.
9092 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9093 floating-point types only if -fassociative-math is set. */
9094 if (flag_associative_math
9095 && TREE_CODE (arg1
) == REAL_CST
9096 && TREE_CODE (arg0
) == MINUS_EXPR
9097 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9098 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9100 && !TREE_OVERFLOW (tem
))
9101 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9102 TREE_OPERAND (arg0
, 1), tem
);
9104 /* Fold comparisons against built-in math functions. */
9105 if (TREE_CODE (arg1
) == REAL_CST
9106 && flag_unsafe_math_optimizations
9107 && ! flag_errno_math
)
9109 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9111 if (fcode
!= END_BUILTINS
)
9113 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9114 if (tem
!= NULL_TREE
)
9120 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9121 && CONVERT_EXPR_P (arg0
))
9123 /* If we are widening one operand of an integer comparison,
9124 see if the other operand is similarly being widened. Perhaps we
9125 can do the comparison in the narrower type. */
9126 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9130 /* Or if we are changing signedness. */
9131 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9136 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9137 constant, we can simplify it. */
9138 if (TREE_CODE (arg1
) == INTEGER_CST
9139 && (TREE_CODE (arg0
) == MIN_EXPR
9140 || TREE_CODE (arg0
) == MAX_EXPR
)
9141 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9143 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9148 /* Simplify comparison of something with itself. (For IEEE
9149 floating-point, we can only do some of these simplifications.) */
9150 if (operand_equal_p (arg0
, arg1
, 0))
9155 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9156 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9157 return constant_boolean_node (1, type
);
9162 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9163 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9164 return constant_boolean_node (1, type
);
9165 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9168 /* For NE, we can only do this simplification if integer
9169 or we don't honor IEEE floating point NaNs. */
9170 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9171 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9173 /* ... fall through ... */
9176 return constant_boolean_node (0, type
);
9182 /* If we are comparing an expression that just has comparisons
9183 of two integer values, arithmetic expressions of those comparisons,
9184 and constants, we can simplify it. There are only three cases
9185 to check: the two values can either be equal, the first can be
9186 greater, or the second can be greater. Fold the expression for
9187 those three values. Since each value must be 0 or 1, we have
9188 eight possibilities, each of which corresponds to the constant 0
9189 or 1 or one of the six possible comparisons.
9191 This handles common cases like (a > b) == 0 but also handles
9192 expressions like ((x > y) - (y > x)) > 0, which supposedly
9193 occur in macroized code. */
9195 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9197 tree cval1
= 0, cval2
= 0;
9200 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9201 /* Don't handle degenerate cases here; they should already
9202 have been handled anyway. */
9203 && cval1
!= 0 && cval2
!= 0
9204 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9205 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9206 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9207 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9208 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9209 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9210 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9212 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9213 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9215 /* We can't just pass T to eval_subst in case cval1 or cval2
9216 was the same as ARG1. */
9219 = fold_build2_loc (loc
, code
, type
,
9220 eval_subst (loc
, arg0
, cval1
, maxval
,
9224 = fold_build2_loc (loc
, code
, type
,
9225 eval_subst (loc
, arg0
, cval1
, maxval
,
9229 = fold_build2_loc (loc
, code
, type
,
9230 eval_subst (loc
, arg0
, cval1
, minval
,
9234 /* All three of these results should be 0 or 1. Confirm they are.
9235 Then use those values to select the proper code to use. */
9237 if (TREE_CODE (high_result
) == INTEGER_CST
9238 && TREE_CODE (equal_result
) == INTEGER_CST
9239 && TREE_CODE (low_result
) == INTEGER_CST
)
9241 /* Make a 3-bit mask with the high-order bit being the
9242 value for `>', the next for '=', and the low for '<'. */
9243 switch ((integer_onep (high_result
) * 4)
9244 + (integer_onep (equal_result
) * 2)
9245 + integer_onep (low_result
))
9249 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9270 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9275 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9276 SET_EXPR_LOCATION (tem
, loc
);
9279 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9284 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9285 into a single range test. */
9286 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9287 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9288 && TREE_CODE (arg1
) == INTEGER_CST
9289 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9290 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9291 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9292 && !TREE_OVERFLOW (arg1
))
9294 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9295 if (tem
!= NULL_TREE
)
9299 /* Fold ~X op ~Y as Y op X. */
9300 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9301 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9303 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9304 return fold_build2_loc (loc
, code
, type
,
9305 fold_convert_loc (loc
, cmp_type
,
9306 TREE_OPERAND (arg1
, 0)),
9307 TREE_OPERAND (arg0
, 0));
9310 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9311 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9312 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9314 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9315 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9316 TREE_OPERAND (arg0
, 0),
9317 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9318 fold_convert_loc (loc
, cmp_type
, arg1
)));
9325 /* Subroutine of fold_binary. Optimize complex multiplications of the
9326 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9327 argument EXPR represents the expression "z" of type TYPE. */
9330 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9332 tree itype
= TREE_TYPE (type
);
9333 tree rpart
, ipart
, tem
;
9335 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9337 rpart
= TREE_OPERAND (expr
, 0);
9338 ipart
= TREE_OPERAND (expr
, 1);
9340 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9342 rpart
= TREE_REALPART (expr
);
9343 ipart
= TREE_IMAGPART (expr
);
9347 expr
= save_expr (expr
);
9348 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9349 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9352 rpart
= save_expr (rpart
);
9353 ipart
= save_expr (ipart
);
9354 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9355 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9356 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9357 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9358 build_zero_cst (itype
));
9362 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9363 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9364 guarantees that P and N have the same least significant log2(M) bits.
9365 N is not otherwise constrained. In particular, N is not normalized to
9366 0 <= N < M as is common. In general, the precise value of P is unknown.
9367 M is chosen as large as possible such that constant N can be determined.
9369 Returns M and sets *RESIDUE to N.
9371 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9372 account. This is not always possible due to PR 35705.
9375 static unsigned HOST_WIDE_INT
9376 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9377 bool allow_func_align
)
9379 enum tree_code code
;
9383 code
= TREE_CODE (expr
);
9384 if (code
== ADDR_EXPR
)
9386 unsigned int bitalign
;
9387 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9388 *residue
/= BITS_PER_UNIT
;
9389 return bitalign
/ BITS_PER_UNIT
;
9391 else if (code
== POINTER_PLUS_EXPR
)
9394 unsigned HOST_WIDE_INT modulus
;
9395 enum tree_code inner_code
;
9397 op0
= TREE_OPERAND (expr
, 0);
9399 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9402 op1
= TREE_OPERAND (expr
, 1);
9404 inner_code
= TREE_CODE (op1
);
9405 if (inner_code
== INTEGER_CST
)
9407 *residue
+= TREE_INT_CST_LOW (op1
);
9410 else if (inner_code
== MULT_EXPR
)
9412 op1
= TREE_OPERAND (op1
, 1);
9413 if (TREE_CODE (op1
) == INTEGER_CST
)
9415 unsigned HOST_WIDE_INT align
;
9417 /* Compute the greatest power-of-2 divisor of op1. */
9418 align
= TREE_INT_CST_LOW (op1
);
9421 /* If align is non-zero and less than *modulus, replace
9422 *modulus with align., If align is 0, then either op1 is 0
9423 or the greatest power-of-2 divisor of op1 doesn't fit in an
9424 unsigned HOST_WIDE_INT. In either case, no additional
9425 constraint is imposed. */
9427 modulus
= MIN (modulus
, align
);
9434 /* If we get here, we were unable to determine anything useful about the
9439 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9440 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9443 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9445 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9447 if (TREE_CODE (arg
) == VECTOR_CST
)
9449 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9450 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9452 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9454 constructor_elt
*elt
;
9456 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9457 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9460 elts
[i
] = elt
->value
;
9464 for (; i
< nelts
; i
++)
9466 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9470 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9471 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9472 NULL_TREE otherwise. */
9475 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9477 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9479 bool need_ctor
= false;
9481 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9482 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9483 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9484 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9487 elts
= XALLOCAVEC (tree
, nelts
* 3);
9488 if (!vec_cst_ctor_to_array (arg0
, elts
)
9489 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9492 for (i
= 0; i
< nelts
; i
++)
9494 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9496 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9501 vec
<constructor_elt
, va_gc
> *v
;
9502 vec_alloc (v
, nelts
);
9503 for (i
= 0; i
< nelts
; i
++)
9504 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9505 return build_constructor (type
, v
);
9508 return build_vector (type
, &elts
[2 * nelts
]);
9511 /* Try to fold a pointer difference of type TYPE two address expressions of
9512 array references AREF0 and AREF1 using location LOC. Return a
9513 simplified expression for the difference or NULL_TREE. */
9516 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9517 tree aref0
, tree aref1
)
9519 tree base0
= TREE_OPERAND (aref0
, 0);
9520 tree base1
= TREE_OPERAND (aref1
, 0);
9521 tree base_offset
= build_int_cst (type
, 0);
9523 /* If the bases are array references as well, recurse. If the bases
9524 are pointer indirections compute the difference of the pointers.
9525 If the bases are equal, we are set. */
9526 if ((TREE_CODE (base0
) == ARRAY_REF
9527 && TREE_CODE (base1
) == ARRAY_REF
9529 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9530 || (INDIRECT_REF_P (base0
)
9531 && INDIRECT_REF_P (base1
)
9532 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9533 TREE_OPERAND (base0
, 0),
9534 TREE_OPERAND (base1
, 0))))
9535 || operand_equal_p (base0
, base1
, 0))
9537 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9538 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9539 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9540 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9541 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9543 fold_build2_loc (loc
, MULT_EXPR
, type
,
9549 /* If the real or vector real constant CST of type TYPE has an exact
9550 inverse, return it, else return NULL. */
9553 exact_inverse (tree type
, tree cst
)
9556 tree unit_type
, *elts
;
9558 unsigned vec_nelts
, i
;
9560 switch (TREE_CODE (cst
))
9563 r
= TREE_REAL_CST (cst
);
9565 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9566 return build_real (type
, r
);
9571 vec_nelts
= VECTOR_CST_NELTS (cst
);
9572 elts
= XALLOCAVEC (tree
, vec_nelts
);
9573 unit_type
= TREE_TYPE (type
);
9574 mode
= TYPE_MODE (unit_type
);
9576 for (i
= 0; i
< vec_nelts
; i
++)
9578 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9579 if (!exact_real_inverse (mode
, &r
))
9581 elts
[i
] = build_real (unit_type
, r
);
9584 return build_vector (type
, elts
);
9591 /* Mask out the tz least significant bits of X of type TYPE where
9592 tz is the number of trailing zeroes in Y. */
9594 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9596 int tz
= wi::ctz (y
);
9598 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9602 /* Return true when T is an address and is known to be nonzero.
9603 For floating point we further ensure that T is not denormal.
9604 Similar logic is present in nonzero_address in rtlanal.h.
9606 If the return value is based on the assumption that signed overflow
9607 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9608 change *STRICT_OVERFLOW_P. */
9611 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9613 tree type
= TREE_TYPE (t
);
9614 enum tree_code code
;
9616 /* Doing something useful for floating point would need more work. */
9617 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9620 code
= TREE_CODE (t
);
9621 switch (TREE_CODE_CLASS (code
))
9624 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9627 case tcc_comparison
:
9628 return tree_binary_nonzero_warnv_p (code
, type
,
9629 TREE_OPERAND (t
, 0),
9630 TREE_OPERAND (t
, 1),
9633 case tcc_declaration
:
9635 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9643 case TRUTH_NOT_EXPR
:
9644 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9647 case TRUTH_AND_EXPR
:
9649 case TRUTH_XOR_EXPR
:
9650 return tree_binary_nonzero_warnv_p (code
, type
,
9651 TREE_OPERAND (t
, 0),
9652 TREE_OPERAND (t
, 1),
9660 case WITH_SIZE_EXPR
:
9662 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9667 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9671 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9676 tree fndecl
= get_callee_fndecl (t
);
9677 if (!fndecl
) return false;
9678 if (flag_delete_null_pointer_checks
&& !flag_check_new
9679 && DECL_IS_OPERATOR_NEW (fndecl
)
9680 && !TREE_NOTHROW (fndecl
))
9682 if (flag_delete_null_pointer_checks
9683 && lookup_attribute ("returns_nonnull",
9684 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9686 return alloca_call_p (t
);
9695 /* Return true when T is an address and is known to be nonzero.
9696 Handle warnings about undefined signed overflow. */
9699 tree_expr_nonzero_p (tree t
)
9701 bool ret
, strict_overflow_p
;
9703 strict_overflow_p
= false;
9704 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9705 if (strict_overflow_p
)
9706 fold_overflow_warning (("assuming signed overflow does not occur when "
9707 "determining that expression is always "
9709 WARN_STRICT_OVERFLOW_MISC
);
9713 /* Fold a binary expression of code CODE and type TYPE with operands
9714 OP0 and OP1. LOC is the location of the resulting expression.
9715 Return the folded expression if folding is successful. Otherwise,
9716 return NULL_TREE. */
9719 fold_binary_loc (location_t loc
,
9720 enum tree_code code
, tree type
, tree op0
, tree op1
)
9722 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9723 tree arg0
, arg1
, tem
;
9724 tree t1
= NULL_TREE
;
9725 bool strict_overflow_p
;
9728 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9729 && TREE_CODE_LENGTH (code
) == 2
9731 && op1
!= NULL_TREE
);
9736 /* Strip any conversions that don't change the mode. This is
9737 safe for every expression, except for a comparison expression
9738 because its signedness is derived from its operands. So, in
9739 the latter case, only strip conversions that don't change the
9740 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9743 Note that this is done as an internal manipulation within the
9744 constant folder, in order to find the simplest representation
9745 of the arguments so that their form can be studied. In any
9746 cases, the appropriate type conversions should be put back in
9747 the tree that will get out of the constant folder. */
9749 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9751 STRIP_SIGN_NOPS (arg0
);
9752 STRIP_SIGN_NOPS (arg1
);
9760 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9761 constant but we can't do arithmetic on them. */
9762 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9763 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9764 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9765 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9766 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9767 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9768 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9770 if (kind
== tcc_binary
)
9772 /* Make sure type and arg0 have the same saturating flag. */
9773 gcc_assert (TYPE_SATURATING (type
)
9774 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9775 tem
= const_binop (code
, arg0
, arg1
);
9777 else if (kind
== tcc_comparison
)
9778 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9782 if (tem
!= NULL_TREE
)
9784 if (TREE_TYPE (tem
) != type
)
9785 tem
= fold_convert_loc (loc
, type
, tem
);
9790 /* If this is a commutative operation, and ARG0 is a constant, move it
9791 to ARG1 to reduce the number of tests below. */
9792 if (commutative_tree_code (code
)
9793 && tree_swap_operands_p (arg0
, arg1
, true))
9794 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9796 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9797 to ARG1 to reduce the number of tests below. */
9798 if (kind
== tcc_comparison
9799 && tree_swap_operands_p (arg0
, arg1
, true))
9800 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9802 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9806 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9808 First check for cases where an arithmetic operation is applied to a
9809 compound, conditional, or comparison operation. Push the arithmetic
9810 operation inside the compound or conditional to see if any folding
9811 can then be done. Convert comparison to conditional for this purpose.
9812 The also optimizes non-constant cases that used to be done in
9815 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9816 one of the operands is a comparison and the other is a comparison, a
9817 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9818 code below would make the expression more complex. Change it to a
9819 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9820 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9822 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9823 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9824 && TREE_CODE (type
) != VECTOR_TYPE
9825 && ((truth_value_p (TREE_CODE (arg0
))
9826 && (truth_value_p (TREE_CODE (arg1
))
9827 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9828 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9829 || (truth_value_p (TREE_CODE (arg1
))
9830 && (truth_value_p (TREE_CODE (arg0
))
9831 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9832 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9834 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9835 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9838 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9839 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9841 if (code
== EQ_EXPR
)
9842 tem
= invert_truthvalue_loc (loc
, tem
);
9844 return fold_convert_loc (loc
, type
, tem
);
9847 if (TREE_CODE_CLASS (code
) == tcc_binary
9848 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9850 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9852 tem
= fold_build2_loc (loc
, code
, type
,
9853 fold_convert_loc (loc
, TREE_TYPE (op0
),
9854 TREE_OPERAND (arg0
, 1)), op1
);
9855 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9858 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9859 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9861 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9862 fold_convert_loc (loc
, TREE_TYPE (op1
),
9863 TREE_OPERAND (arg1
, 1)));
9864 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9868 if (TREE_CODE (arg0
) == COND_EXPR
9869 || TREE_CODE (arg0
) == VEC_COND_EXPR
9870 || COMPARISON_CLASS_P (arg0
))
9872 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9874 /*cond_first_p=*/1);
9875 if (tem
!= NULL_TREE
)
9879 if (TREE_CODE (arg1
) == COND_EXPR
9880 || TREE_CODE (arg1
) == VEC_COND_EXPR
9881 || COMPARISON_CLASS_P (arg1
))
9883 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9885 /*cond_first_p=*/0);
9886 if (tem
!= NULL_TREE
)
9894 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9895 if (TREE_CODE (arg0
) == ADDR_EXPR
9896 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9898 tree iref
= TREE_OPERAND (arg0
, 0);
9899 return fold_build2 (MEM_REF
, type
,
9900 TREE_OPERAND (iref
, 0),
9901 int_const_binop (PLUS_EXPR
, arg1
,
9902 TREE_OPERAND (iref
, 1)));
9905 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9906 if (TREE_CODE (arg0
) == ADDR_EXPR
9907 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9910 HOST_WIDE_INT coffset
;
9911 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9915 return fold_build2 (MEM_REF
, type
,
9916 build_fold_addr_expr (base
),
9917 int_const_binop (PLUS_EXPR
, arg1
,
9918 size_int (coffset
)));
9923 case POINTER_PLUS_EXPR
:
9924 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9925 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9926 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9927 return fold_convert_loc (loc
, type
,
9928 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9929 fold_convert_loc (loc
, sizetype
,
9931 fold_convert_loc (loc
, sizetype
,
9934 /* PTR_CST +p CST -> CST1 */
9935 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9936 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9937 fold_convert_loc (loc
, type
, arg1
));
9942 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9944 /* X + (X / CST) * -CST is X % CST. */
9945 if (TREE_CODE (arg1
) == MULT_EXPR
9946 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9947 && operand_equal_p (arg0
,
9948 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9950 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9951 tree cst1
= TREE_OPERAND (arg1
, 1);
9952 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9954 if (sum
&& integer_zerop (sum
))
9955 return fold_convert_loc (loc
, type
,
9956 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9957 TREE_TYPE (arg0
), arg0
,
9962 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9963 one. Make sure the type is not saturating and has the signedness of
9964 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9965 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9966 if ((TREE_CODE (arg0
) == MULT_EXPR
9967 || TREE_CODE (arg1
) == MULT_EXPR
)
9968 && !TYPE_SATURATING (type
)
9969 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9970 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9971 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9973 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9978 if (! FLOAT_TYPE_P (type
))
9980 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9981 with a constant, and the two constants have no bits in common,
9982 we should treat this as a BIT_IOR_EXPR since this may produce more
9984 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9985 && TREE_CODE (arg1
) == BIT_AND_EXPR
9986 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9987 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9988 && wi::bit_and (TREE_OPERAND (arg0
, 1),
9989 TREE_OPERAND (arg1
, 1)) == 0)
9991 code
= BIT_IOR_EXPR
;
9995 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9996 (plus (plus (mult) (mult)) (foo)) so that we can
9997 take advantage of the factoring cases below. */
9998 if (TYPE_OVERFLOW_WRAPS (type
)
9999 && (((TREE_CODE (arg0
) == PLUS_EXPR
10000 || TREE_CODE (arg0
) == MINUS_EXPR
)
10001 && TREE_CODE (arg1
) == MULT_EXPR
)
10002 || ((TREE_CODE (arg1
) == PLUS_EXPR
10003 || TREE_CODE (arg1
) == MINUS_EXPR
)
10004 && TREE_CODE (arg0
) == MULT_EXPR
)))
10006 tree parg0
, parg1
, parg
, marg
;
10007 enum tree_code pcode
;
10009 if (TREE_CODE (arg1
) == MULT_EXPR
)
10010 parg
= arg0
, marg
= arg1
;
10012 parg
= arg1
, marg
= arg0
;
10013 pcode
= TREE_CODE (parg
);
10014 parg0
= TREE_OPERAND (parg
, 0);
10015 parg1
= TREE_OPERAND (parg
, 1);
10016 STRIP_NOPS (parg0
);
10017 STRIP_NOPS (parg1
);
10019 if (TREE_CODE (parg0
) == MULT_EXPR
10020 && TREE_CODE (parg1
) != MULT_EXPR
)
10021 return fold_build2_loc (loc
, pcode
, type
,
10022 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10023 fold_convert_loc (loc
, type
,
10025 fold_convert_loc (loc
, type
,
10027 fold_convert_loc (loc
, type
, parg1
));
10028 if (TREE_CODE (parg0
) != MULT_EXPR
10029 && TREE_CODE (parg1
) == MULT_EXPR
)
10031 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10032 fold_convert_loc (loc
, type
, parg0
),
10033 fold_build2_loc (loc
, pcode
, type
,
10034 fold_convert_loc (loc
, type
, marg
),
10035 fold_convert_loc (loc
, type
,
10041 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10042 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10043 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10045 /* Likewise if the operands are reversed. */
10046 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10047 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10049 /* Convert X + -C into X - C. */
10050 if (TREE_CODE (arg1
) == REAL_CST
10051 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10053 tem
= fold_negate_const (arg1
, type
);
10054 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10055 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10056 fold_convert_loc (loc
, type
, arg0
),
10057 fold_convert_loc (loc
, type
, tem
));
10060 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10061 to __complex__ ( x, y ). This is not the same for SNaNs or
10062 if signed zeros are involved. */
10063 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10064 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10065 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10067 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10068 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10069 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10070 bool arg0rz
= false, arg0iz
= false;
10071 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10072 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10074 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10075 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10076 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10078 tree rp
= arg1r
? arg1r
10079 : build1 (REALPART_EXPR
, rtype
, arg1
);
10080 tree ip
= arg0i
? arg0i
10081 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10082 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10084 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10086 tree rp
= arg0r
? arg0r
10087 : build1 (REALPART_EXPR
, rtype
, arg0
);
10088 tree ip
= arg1i
? arg1i
10089 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10090 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10095 if (flag_unsafe_math_optimizations
10096 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10097 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10098 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10101 /* Convert x+x into x*2.0. */
10102 if (operand_equal_p (arg0
, arg1
, 0)
10103 && SCALAR_FLOAT_TYPE_P (type
))
10104 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10105 build_real (type
, dconst2
));
10107 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10108 We associate floats only if the user has specified
10109 -fassociative-math. */
10110 if (flag_associative_math
10111 && TREE_CODE (arg1
) == PLUS_EXPR
10112 && TREE_CODE (arg0
) != MULT_EXPR
)
10114 tree tree10
= TREE_OPERAND (arg1
, 0);
10115 tree tree11
= TREE_OPERAND (arg1
, 1);
10116 if (TREE_CODE (tree11
) == MULT_EXPR
10117 && TREE_CODE (tree10
) == MULT_EXPR
)
10120 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10121 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10124 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10125 We associate floats only if the user has specified
10126 -fassociative-math. */
10127 if (flag_associative_math
10128 && TREE_CODE (arg0
) == PLUS_EXPR
10129 && TREE_CODE (arg1
) != MULT_EXPR
)
10131 tree tree00
= TREE_OPERAND (arg0
, 0);
10132 tree tree01
= TREE_OPERAND (arg0
, 1);
10133 if (TREE_CODE (tree01
) == MULT_EXPR
10134 && TREE_CODE (tree00
) == MULT_EXPR
)
10137 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10138 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10144 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10145 is a rotate of A by C1 bits. */
10146 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10147 is a rotate of A by B bits. */
10149 enum tree_code code0
, code1
;
10151 code0
= TREE_CODE (arg0
);
10152 code1
= TREE_CODE (arg1
);
10153 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10154 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10155 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10156 TREE_OPERAND (arg1
, 0), 0)
10157 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10158 TYPE_UNSIGNED (rtype
))
10159 /* Only create rotates in complete modes. Other cases are not
10160 expanded properly. */
10161 && (element_precision (rtype
)
10162 == element_precision (TYPE_MODE (rtype
))))
10164 tree tree01
, tree11
;
10165 enum tree_code code01
, code11
;
10167 tree01
= TREE_OPERAND (arg0
, 1);
10168 tree11
= TREE_OPERAND (arg1
, 1);
10169 STRIP_NOPS (tree01
);
10170 STRIP_NOPS (tree11
);
10171 code01
= TREE_CODE (tree01
);
10172 code11
= TREE_CODE (tree11
);
10173 if (code01
== INTEGER_CST
10174 && code11
== INTEGER_CST
10175 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10176 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10178 tem
= build2_loc (loc
, LROTATE_EXPR
,
10179 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10180 TREE_OPERAND (arg0
, 0),
10181 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10182 return fold_convert_loc (loc
, type
, tem
);
10184 else if (code11
== MINUS_EXPR
)
10186 tree tree110
, tree111
;
10187 tree110
= TREE_OPERAND (tree11
, 0);
10188 tree111
= TREE_OPERAND (tree11
, 1);
10189 STRIP_NOPS (tree110
);
10190 STRIP_NOPS (tree111
);
10191 if (TREE_CODE (tree110
) == INTEGER_CST
10192 && 0 == compare_tree_int (tree110
,
10194 (TREE_TYPE (TREE_OPERAND
10196 && operand_equal_p (tree01
, tree111
, 0))
10198 fold_convert_loc (loc
, type
,
10199 build2 ((code0
== LSHIFT_EXPR
10202 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10203 TREE_OPERAND (arg0
, 0), tree01
));
10205 else if (code01
== MINUS_EXPR
)
10207 tree tree010
, tree011
;
10208 tree010
= TREE_OPERAND (tree01
, 0);
10209 tree011
= TREE_OPERAND (tree01
, 1);
10210 STRIP_NOPS (tree010
);
10211 STRIP_NOPS (tree011
);
10212 if (TREE_CODE (tree010
) == INTEGER_CST
10213 && 0 == compare_tree_int (tree010
,
10215 (TREE_TYPE (TREE_OPERAND
10217 && operand_equal_p (tree11
, tree011
, 0))
10218 return fold_convert_loc
10220 build2 ((code0
!= LSHIFT_EXPR
10223 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10224 TREE_OPERAND (arg0
, 0), tree11
));
10230 /* In most languages, can't associate operations on floats through
10231 parentheses. Rather than remember where the parentheses were, we
10232 don't associate floats at all, unless the user has specified
10233 -fassociative-math.
10234 And, we need to make sure type is not saturating. */
10236 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10237 && !TYPE_SATURATING (type
))
10239 tree var0
, con0
, lit0
, minus_lit0
;
10240 tree var1
, con1
, lit1
, minus_lit1
;
10244 /* Split both trees into variables, constants, and literals. Then
10245 associate each group together, the constants with literals,
10246 then the result with variables. This increases the chances of
10247 literals being recombined later and of generating relocatable
10248 expressions for the sum of a constant and literal. */
10249 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10250 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10251 code
== MINUS_EXPR
);
10253 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10254 if (code
== MINUS_EXPR
)
10257 /* With undefined overflow prefer doing association in a type
10258 which wraps on overflow, if that is one of the operand types. */
10259 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10260 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10262 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10263 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10264 atype
= TREE_TYPE (arg0
);
10265 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10266 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10267 atype
= TREE_TYPE (arg1
);
10268 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10271 /* With undefined overflow we can only associate constants with one
10272 variable, and constants whose association doesn't overflow. */
10273 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10274 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10281 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10282 tmp0
= TREE_OPERAND (tmp0
, 0);
10283 if (CONVERT_EXPR_P (tmp0
)
10284 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10285 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10286 <= TYPE_PRECISION (atype
)))
10287 tmp0
= TREE_OPERAND (tmp0
, 0);
10288 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10289 tmp1
= TREE_OPERAND (tmp1
, 0);
10290 if (CONVERT_EXPR_P (tmp1
)
10291 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10292 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10293 <= TYPE_PRECISION (atype
)))
10294 tmp1
= TREE_OPERAND (tmp1
, 0);
10295 /* The only case we can still associate with two variables
10296 is if they are the same, modulo negation and bit-pattern
10297 preserving conversions. */
10298 if (!operand_equal_p (tmp0
, tmp1
, 0))
10303 /* Only do something if we found more than two objects. Otherwise,
10304 nothing has changed and we risk infinite recursion. */
10306 && (2 < ((var0
!= 0) + (var1
!= 0)
10307 + (con0
!= 0) + (con1
!= 0)
10308 + (lit0
!= 0) + (lit1
!= 0)
10309 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10311 bool any_overflows
= false;
10312 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10313 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10314 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10315 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10316 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10317 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10318 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10319 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10322 /* Preserve the MINUS_EXPR if the negative part of the literal is
10323 greater than the positive part. Otherwise, the multiplicative
10324 folding code (i.e extract_muldiv) may be fooled in case
10325 unsigned constants are subtracted, like in the following
10326 example: ((X*2 + 4) - 8U)/2. */
10327 if (minus_lit0
&& lit0
)
10329 if (TREE_CODE (lit0
) == INTEGER_CST
10330 && TREE_CODE (minus_lit0
) == INTEGER_CST
10331 && tree_int_cst_lt (lit0
, minus_lit0
))
10333 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10334 MINUS_EXPR
, atype
);
10339 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10340 MINUS_EXPR
, atype
);
10345 /* Don't introduce overflows through reassociation. */
10347 && ((lit0
&& TREE_OVERFLOW (lit0
))
10348 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10355 fold_convert_loc (loc
, type
,
10356 associate_trees (loc
, var0
, minus_lit0
,
10357 MINUS_EXPR
, atype
));
10360 con0
= associate_trees (loc
, con0
, minus_lit0
,
10361 MINUS_EXPR
, atype
);
10363 fold_convert_loc (loc
, type
,
10364 associate_trees (loc
, var0
, con0
,
10365 PLUS_EXPR
, atype
));
10369 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10371 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10379 /* Pointer simplifications for subtraction, simple reassociations. */
10380 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10382 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10383 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10384 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10386 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10387 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10388 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10389 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10390 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10391 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10393 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10396 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10397 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10399 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10400 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10401 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10402 fold_convert_loc (loc
, type
, arg1
));
10404 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10406 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10408 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10410 tree arg10
= fold_convert_loc (loc
, type
,
10411 TREE_OPERAND (arg1
, 0));
10412 tree arg11
= fold_convert_loc (loc
, type
,
10413 TREE_OPERAND (arg1
, 1));
10414 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10415 fold_convert_loc (loc
, type
, arg0
),
10418 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10421 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10422 if (TREE_CODE (arg0
) == NEGATE_EXPR
10423 && negate_expr_p (arg1
)
10424 && reorder_operands_p (arg0
, arg1
))
10425 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10426 fold_convert_loc (loc
, type
,
10427 negate_expr (arg1
)),
10428 fold_convert_loc (loc
, type
,
10429 TREE_OPERAND (arg0
, 0)));
10430 /* Convert -A - 1 to ~A. */
10431 if (TREE_CODE (arg0
) == NEGATE_EXPR
10432 && integer_each_onep (arg1
)
10433 && !TYPE_OVERFLOW_TRAPS (type
))
10434 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10435 fold_convert_loc (loc
, type
,
10436 TREE_OPERAND (arg0
, 0)));
10438 /* Convert -1 - A to ~A. */
10439 if (TREE_CODE (type
) != COMPLEX_TYPE
10440 && integer_all_onesp (arg0
))
10441 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10444 /* X - (X / Y) * Y is X % Y. */
10445 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10446 && TREE_CODE (arg1
) == MULT_EXPR
10447 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10448 && operand_equal_p (arg0
,
10449 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10450 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10451 TREE_OPERAND (arg1
, 1), 0))
10453 fold_convert_loc (loc
, type
,
10454 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10455 arg0
, TREE_OPERAND (arg1
, 1)));
10457 if (! FLOAT_TYPE_P (type
))
10459 if (integer_zerop (arg0
))
10460 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10462 /* Fold A - (A & B) into ~B & A. */
10463 if (!TREE_SIDE_EFFECTS (arg0
)
10464 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10466 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10468 tree arg10
= fold_convert_loc (loc
, type
,
10469 TREE_OPERAND (arg1
, 0));
10470 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10471 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10473 fold_convert_loc (loc
, type
, arg0
));
10475 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10477 tree arg11
= fold_convert_loc (loc
,
10478 type
, TREE_OPERAND (arg1
, 1));
10479 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10480 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10482 fold_convert_loc (loc
, type
, arg0
));
10486 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10487 any power of 2 minus 1. */
10488 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10489 && TREE_CODE (arg1
) == BIT_AND_EXPR
10490 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10491 TREE_OPERAND (arg1
, 0), 0))
10493 tree mask0
= TREE_OPERAND (arg0
, 1);
10494 tree mask1
= TREE_OPERAND (arg1
, 1);
10495 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10497 if (operand_equal_p (tem
, mask1
, 0))
10499 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10500 TREE_OPERAND (arg0
, 0), mask1
);
10501 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10506 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10507 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10508 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10510 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10511 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10512 (-ARG1 + ARG0) reduces to -ARG1. */
10513 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10514 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10516 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10517 __complex__ ( x, -y ). This is not the same for SNaNs or if
10518 signed zeros are involved. */
10519 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10520 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10521 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10523 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10524 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10525 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10526 bool arg0rz
= false, arg0iz
= false;
10527 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10528 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10530 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10531 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10532 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10534 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10536 : build1 (REALPART_EXPR
, rtype
, arg1
));
10537 tree ip
= arg0i
? arg0i
10538 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10539 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10541 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10543 tree rp
= arg0r
? arg0r
10544 : build1 (REALPART_EXPR
, rtype
, arg0
);
10545 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10547 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10548 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10553 /* A - B -> A + (-B) if B is easily negatable. */
10554 if (negate_expr_p (arg1
)
10555 && ((FLOAT_TYPE_P (type
)
10556 /* Avoid this transformation if B is a positive REAL_CST. */
10557 && (TREE_CODE (arg1
) != REAL_CST
10558 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10559 || INTEGRAL_TYPE_P (type
)))
10560 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10561 fold_convert_loc (loc
, type
, arg0
),
10562 fold_convert_loc (loc
, type
,
10563 negate_expr (arg1
)));
10565 /* Try folding difference of addresses. */
10567 HOST_WIDE_INT diff
;
10569 if ((TREE_CODE (arg0
) == ADDR_EXPR
10570 || TREE_CODE (arg1
) == ADDR_EXPR
)
10571 && ptr_difference_const (arg0
, arg1
, &diff
))
10572 return build_int_cst_type (type
, diff
);
10575 /* Fold &a[i] - &a[j] to i-j. */
10576 if (TREE_CODE (arg0
) == ADDR_EXPR
10577 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10578 && TREE_CODE (arg1
) == ADDR_EXPR
10579 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10581 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10582 TREE_OPERAND (arg0
, 0),
10583 TREE_OPERAND (arg1
, 0));
10588 if (FLOAT_TYPE_P (type
)
10589 && flag_unsafe_math_optimizations
10590 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10591 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10592 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10595 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10596 one. Make sure the type is not saturating and has the signedness of
10597 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10598 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10599 if ((TREE_CODE (arg0
) == MULT_EXPR
10600 || TREE_CODE (arg1
) == MULT_EXPR
)
10601 && !TYPE_SATURATING (type
)
10602 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10603 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10604 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10606 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10614 /* (-A) * (-B) -> A * B */
10615 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10616 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10617 fold_convert_loc (loc
, type
,
10618 TREE_OPERAND (arg0
, 0)),
10619 fold_convert_loc (loc
, type
,
10620 negate_expr (arg1
)));
10621 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10622 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10623 fold_convert_loc (loc
, type
,
10624 negate_expr (arg0
)),
10625 fold_convert_loc (loc
, type
,
10626 TREE_OPERAND (arg1
, 0)));
10628 if (! FLOAT_TYPE_P (type
))
10630 /* Transform x * -1 into -x. Make sure to do the negation
10631 on the original operand with conversions not stripped
10632 because we can only strip non-sign-changing conversions. */
10633 if (integer_minus_onep (arg1
))
10634 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10635 /* Transform x * -C into -x * C if x is easily negatable. */
10636 if (TREE_CODE (arg1
) == INTEGER_CST
10637 && tree_int_cst_sgn (arg1
) == -1
10638 && negate_expr_p (arg0
)
10639 && (tem
= negate_expr (arg1
)) != arg1
10640 && !TREE_OVERFLOW (tem
))
10641 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10642 fold_convert_loc (loc
, type
,
10643 negate_expr (arg0
)),
10646 /* (a * (1 << b)) is (a << b) */
10647 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10648 && integer_onep (TREE_OPERAND (arg1
, 0)))
10649 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10650 TREE_OPERAND (arg1
, 1));
10651 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10652 && integer_onep (TREE_OPERAND (arg0
, 0)))
10653 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10654 TREE_OPERAND (arg0
, 1));
10656 /* (A + A) * C -> A * 2 * C */
10657 if (TREE_CODE (arg0
) == PLUS_EXPR
10658 && TREE_CODE (arg1
) == INTEGER_CST
10659 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10660 TREE_OPERAND (arg0
, 1), 0))
10661 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10662 omit_one_operand_loc (loc
, type
,
10663 TREE_OPERAND (arg0
, 0),
10664 TREE_OPERAND (arg0
, 1)),
10665 fold_build2_loc (loc
, MULT_EXPR
, type
,
10666 build_int_cst (type
, 2) , arg1
));
10668 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10669 sign-changing only. */
10670 if (TREE_CODE (arg1
) == INTEGER_CST
10671 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10672 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10673 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10675 strict_overflow_p
= false;
10676 if (TREE_CODE (arg1
) == INTEGER_CST
10677 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10678 &strict_overflow_p
)))
10680 if (strict_overflow_p
)
10681 fold_overflow_warning (("assuming signed overflow does not "
10682 "occur when simplifying "
10684 WARN_STRICT_OVERFLOW_MISC
);
10685 return fold_convert_loc (loc
, type
, tem
);
10688 /* Optimize z * conj(z) for integer complex numbers. */
10689 if (TREE_CODE (arg0
) == CONJ_EXPR
10690 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10691 return fold_mult_zconjz (loc
, type
, arg1
);
10692 if (TREE_CODE (arg1
) == CONJ_EXPR
10693 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10694 return fold_mult_zconjz (loc
, type
, arg0
);
10698 /* Maybe fold x * 0 to 0. The expressions aren't the same
10699 when x is NaN, since x * 0 is also NaN. Nor are they the
10700 same in modes with signed zeros, since multiplying a
10701 negative value by 0 gives -0, not +0. */
10702 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10703 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10704 && real_zerop (arg1
))
10705 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10706 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10707 Likewise for complex arithmetic with signed zeros. */
10708 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10709 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10710 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10711 && real_onep (arg1
))
10712 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10714 /* Transform x * -1.0 into -x. */
10715 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10716 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10717 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10718 && real_minus_onep (arg1
))
10719 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10721 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10722 the result for floating point types due to rounding so it is applied
10723 only if -fassociative-math was specify. */
10724 if (flag_associative_math
10725 && TREE_CODE (arg0
) == RDIV_EXPR
10726 && TREE_CODE (arg1
) == REAL_CST
10727 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10729 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10732 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10733 TREE_OPERAND (arg0
, 1));
10736 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10737 if (operand_equal_p (arg0
, arg1
, 0))
10739 tree tem
= fold_strip_sign_ops (arg0
);
10740 if (tem
!= NULL_TREE
)
10742 tem
= fold_convert_loc (loc
, type
, tem
);
10743 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10747 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10748 This is not the same for NaNs or if signed zeros are
10750 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10751 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10752 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10753 && TREE_CODE (arg1
) == COMPLEX_CST
10754 && real_zerop (TREE_REALPART (arg1
)))
10756 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10757 if (real_onep (TREE_IMAGPART (arg1
)))
10759 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10760 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10762 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10763 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10765 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10766 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10767 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10771 /* Optimize z * conj(z) for floating point complex numbers.
10772 Guarded by flag_unsafe_math_optimizations as non-finite
10773 imaginary components don't produce scalar results. */
10774 if (flag_unsafe_math_optimizations
10775 && TREE_CODE (arg0
) == CONJ_EXPR
10776 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10777 return fold_mult_zconjz (loc
, type
, arg1
);
10778 if (flag_unsafe_math_optimizations
10779 && TREE_CODE (arg1
) == CONJ_EXPR
10780 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10781 return fold_mult_zconjz (loc
, type
, arg0
);
10783 if (flag_unsafe_math_optimizations
)
10785 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10786 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10788 /* Optimizations of root(...)*root(...). */
10789 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10792 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10793 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10795 /* Optimize sqrt(x)*sqrt(x) as x. */
10796 if (BUILTIN_SQRT_P (fcode0
)
10797 && operand_equal_p (arg00
, arg10
, 0)
10798 && ! HONOR_SNANS (TYPE_MODE (type
)))
10801 /* Optimize root(x)*root(y) as root(x*y). */
10802 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10803 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10804 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10807 /* Optimize expN(x)*expN(y) as expN(x+y). */
10808 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10810 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10811 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10812 CALL_EXPR_ARG (arg0
, 0),
10813 CALL_EXPR_ARG (arg1
, 0));
10814 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10817 /* Optimizations of pow(...)*pow(...). */
10818 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10819 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10820 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10822 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10823 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10824 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10825 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10827 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10828 if (operand_equal_p (arg01
, arg11
, 0))
10830 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10831 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10833 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10836 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10837 if (operand_equal_p (arg00
, arg10
, 0))
10839 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10840 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10842 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10846 /* Optimize tan(x)*cos(x) as sin(x). */
10847 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10848 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10849 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10850 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10851 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10852 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10853 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10854 CALL_EXPR_ARG (arg1
, 0), 0))
10856 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10858 if (sinfn
!= NULL_TREE
)
10859 return build_call_expr_loc (loc
, sinfn
, 1,
10860 CALL_EXPR_ARG (arg0
, 0));
10863 /* Optimize x*pow(x,c) as pow(x,c+1). */
10864 if (fcode1
== BUILT_IN_POW
10865 || fcode1
== BUILT_IN_POWF
10866 || fcode1
== BUILT_IN_POWL
)
10868 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10869 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10870 if (TREE_CODE (arg11
) == REAL_CST
10871 && !TREE_OVERFLOW (arg11
)
10872 && operand_equal_p (arg0
, arg10
, 0))
10874 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10878 c
= TREE_REAL_CST (arg11
);
10879 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10880 arg
= build_real (type
, c
);
10881 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10885 /* Optimize pow(x,c)*x as pow(x,c+1). */
10886 if (fcode0
== BUILT_IN_POW
10887 || fcode0
== BUILT_IN_POWF
10888 || fcode0
== BUILT_IN_POWL
)
10890 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10891 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10892 if (TREE_CODE (arg01
) == REAL_CST
10893 && !TREE_OVERFLOW (arg01
)
10894 && operand_equal_p (arg1
, arg00
, 0))
10896 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10900 c
= TREE_REAL_CST (arg01
);
10901 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10902 arg
= build_real (type
, c
);
10903 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10907 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10908 if (!in_gimple_form
10910 && operand_equal_p (arg0
, arg1
, 0))
10912 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10916 tree arg
= build_real (type
, dconst2
);
10917 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10926 /* ~X | X is -1. */
10927 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10928 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10930 t1
= build_zero_cst (type
);
10931 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10932 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10935 /* X | ~X is -1. */
10936 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10937 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10939 t1
= build_zero_cst (type
);
10940 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10941 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10944 /* Canonicalize (X & C1) | C2. */
10945 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10946 && TREE_CODE (arg1
) == INTEGER_CST
10947 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10949 int width
= TYPE_PRECISION (type
), w
;
10950 wide_int c1
= TREE_OPERAND (arg0
, 1);
10951 wide_int c2
= arg1
;
10953 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10954 if ((c1
& c2
) == c1
)
10955 return omit_one_operand_loc (loc
, type
, arg1
,
10956 TREE_OPERAND (arg0
, 0));
10958 wide_int msk
= wi::mask (width
, false,
10959 TYPE_PRECISION (TREE_TYPE (arg1
)));
10961 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10962 if (msk
.and_not (c1
| c2
) == 0)
10963 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10964 TREE_OPERAND (arg0
, 0), arg1
);
10966 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10967 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10968 mode which allows further optimizations. */
10971 wide_int c3
= c1
.and_not (c2
);
10972 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
10974 wide_int mask
= wi::mask (w
, false,
10975 TYPE_PRECISION (type
));
10976 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
10984 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10985 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10986 TREE_OPERAND (arg0
, 0),
10987 wide_int_to_tree (type
,
10992 /* (X & ~Y) | (~X & Y) is X ^ Y */
10993 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10994 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10996 tree a0
, a1
, l0
, l1
, n0
, n1
;
10998 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10999 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11001 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11002 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11004 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11005 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11007 if ((operand_equal_p (n0
, a0
, 0)
11008 && operand_equal_p (n1
, a1
, 0))
11009 || (operand_equal_p (n0
, a1
, 0)
11010 && operand_equal_p (n1
, a0
, 0)))
11011 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11014 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11015 if (t1
!= NULL_TREE
)
11018 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11020 This results in more efficient code for machines without a NAND
11021 instruction. Combine will canonicalize to the first form
11022 which will allow use of NAND instructions provided by the
11023 backend if they exist. */
11024 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11025 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11028 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11029 build2 (BIT_AND_EXPR
, type
,
11030 fold_convert_loc (loc
, type
,
11031 TREE_OPERAND (arg0
, 0)),
11032 fold_convert_loc (loc
, type
,
11033 TREE_OPERAND (arg1
, 0))));
11036 /* See if this can be simplified into a rotate first. If that
11037 is unsuccessful continue in the association code. */
11041 /* ~X ^ X is -1. */
11042 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11043 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11045 t1
= build_zero_cst (type
);
11046 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11047 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11050 /* X ^ ~X is -1. */
11051 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11052 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11054 t1
= build_zero_cst (type
);
11055 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11056 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11059 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11060 with a constant, and the two constants have no bits in common,
11061 we should treat this as a BIT_IOR_EXPR since this may produce more
11062 simplifications. */
11063 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11064 && TREE_CODE (arg1
) == BIT_AND_EXPR
11065 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11066 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11067 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11068 TREE_OPERAND (arg1
, 1)) == 0)
11070 code
= BIT_IOR_EXPR
;
11074 /* (X | Y) ^ X -> Y & ~ X*/
11075 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11076 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11078 tree t2
= TREE_OPERAND (arg0
, 1);
11079 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11081 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11082 fold_convert_loc (loc
, type
, t2
),
11083 fold_convert_loc (loc
, type
, t1
));
11087 /* (Y | X) ^ X -> Y & ~ X*/
11088 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11089 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11091 tree t2
= TREE_OPERAND (arg0
, 0);
11092 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11094 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11095 fold_convert_loc (loc
, type
, t2
),
11096 fold_convert_loc (loc
, type
, t1
));
11100 /* X ^ (X | Y) -> Y & ~ X*/
11101 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11102 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11104 tree t2
= TREE_OPERAND (arg1
, 1);
11105 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11107 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11108 fold_convert_loc (loc
, type
, t2
),
11109 fold_convert_loc (loc
, type
, t1
));
11113 /* X ^ (Y | X) -> Y & ~ X*/
11114 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11117 tree t2
= TREE_OPERAND (arg1
, 0);
11118 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11120 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11121 fold_convert_loc (loc
, type
, t2
),
11122 fold_convert_loc (loc
, type
, t1
));
11126 /* Convert ~X ^ ~Y to X ^ Y. */
11127 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11128 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11129 return fold_build2_loc (loc
, code
, type
,
11130 fold_convert_loc (loc
, type
,
11131 TREE_OPERAND (arg0
, 0)),
11132 fold_convert_loc (loc
, type
,
11133 TREE_OPERAND (arg1
, 0)));
11135 /* Convert ~X ^ C to X ^ ~C. */
11136 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11137 && TREE_CODE (arg1
) == INTEGER_CST
)
11138 return fold_build2_loc (loc
, code
, type
,
11139 fold_convert_loc (loc
, type
,
11140 TREE_OPERAND (arg0
, 0)),
11141 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11143 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11144 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11145 && INTEGRAL_TYPE_P (type
)
11146 && integer_onep (TREE_OPERAND (arg0
, 1))
11147 && integer_onep (arg1
))
11148 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11149 build_zero_cst (TREE_TYPE (arg0
)));
11151 /* Fold (X & Y) ^ Y as ~X & Y. */
11152 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11155 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11156 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11157 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11158 fold_convert_loc (loc
, type
, arg1
));
11160 /* Fold (X & Y) ^ X as ~Y & X. */
11161 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11162 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11163 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11165 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11166 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11167 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11168 fold_convert_loc (loc
, type
, arg1
));
11170 /* Fold X ^ (X & Y) as X & ~Y. */
11171 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11172 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11174 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11175 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11176 fold_convert_loc (loc
, type
, arg0
),
11177 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11179 /* Fold X ^ (Y & X) as ~Y & X. */
11180 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11181 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11182 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11184 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11185 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11186 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11187 fold_convert_loc (loc
, type
, arg0
));
11190 /* See if this can be simplified into a rotate first. If that
11191 is unsuccessful continue in the association code. */
11195 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11196 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11197 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11198 || (TREE_CODE (arg0
) == EQ_EXPR
11199 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11200 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11201 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11203 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11204 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11205 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11206 || (TREE_CODE (arg1
) == EQ_EXPR
11207 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11208 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11209 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11211 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11212 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11213 && INTEGRAL_TYPE_P (type
)
11214 && integer_onep (TREE_OPERAND (arg0
, 1))
11215 && integer_onep (arg1
))
11218 tem
= TREE_OPERAND (arg0
, 0);
11219 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11220 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11222 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11223 build_zero_cst (TREE_TYPE (tem
)));
11225 /* Fold ~X & 1 as (X & 1) == 0. */
11226 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11227 && INTEGRAL_TYPE_P (type
)
11228 && integer_onep (arg1
))
11231 tem
= TREE_OPERAND (arg0
, 0);
11232 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11233 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11235 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11236 build_zero_cst (TREE_TYPE (tem
)));
11238 /* Fold !X & 1 as X == 0. */
11239 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11240 && integer_onep (arg1
))
11242 tem
= TREE_OPERAND (arg0
, 0);
11243 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11244 build_zero_cst (TREE_TYPE (tem
)));
11247 /* Fold (X ^ Y) & Y as ~X & Y. */
11248 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11249 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11251 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11252 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11253 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11254 fold_convert_loc (loc
, type
, arg1
));
11256 /* Fold (X ^ Y) & X as ~Y & X. */
11257 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11258 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11259 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11261 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11262 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11263 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11264 fold_convert_loc (loc
, type
, arg1
));
11266 /* Fold X & (X ^ Y) as X & ~Y. */
11267 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11268 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11270 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11271 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11272 fold_convert_loc (loc
, type
, arg0
),
11273 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11275 /* Fold X & (Y ^ X) as ~Y & X. */
11276 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11277 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11278 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11280 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11281 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11282 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11283 fold_convert_loc (loc
, type
, arg0
));
11286 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11287 multiple of 1 << CST. */
11288 if (TREE_CODE (arg1
) == INTEGER_CST
)
11290 wide_int cst1
= arg1
;
11291 wide_int ncst1
= -cst1
;
11292 if ((cst1
& ncst1
) == ncst1
11293 && multiple_of_p (type
, arg0
,
11294 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11295 return fold_convert_loc (loc
, type
, arg0
);
11298 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11300 if (TREE_CODE (arg1
) == INTEGER_CST
11301 && TREE_CODE (arg0
) == MULT_EXPR
11302 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11304 wide_int warg1
= arg1
;
11305 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11308 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11310 else if (masked
!= warg1
)
11312 /* Avoid the transform if arg1 is a mask of some
11313 mode which allows further optimizations. */
11314 int pop
= wi::popcount (warg1
);
11315 if (!(pop
>= BITS_PER_UNIT
11316 && exact_log2 (pop
) != -1
11317 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11318 return fold_build2_loc (loc
, code
, type
, op0
,
11319 wide_int_to_tree (type
, masked
));
11323 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11324 ((A & N) + B) & M -> (A + B) & M
11325 Similarly if (N & M) == 0,
11326 ((A | N) + B) & M -> (A + B) & M
11327 and for - instead of + (or unary - instead of +)
11328 and/or ^ instead of |.
11329 If B is constant and (B & M) == 0, fold into A & M. */
11330 if (TREE_CODE (arg1
) == INTEGER_CST
)
11332 wide_int cst1
= arg1
;
11333 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11334 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11335 && (TREE_CODE (arg0
) == PLUS_EXPR
11336 || TREE_CODE (arg0
) == MINUS_EXPR
11337 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11338 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11339 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11345 /* Now we know that arg0 is (C + D) or (C - D) or
11346 -C and arg1 (M) is == (1LL << cst) - 1.
11347 Store C into PMOP[0] and D into PMOP[1]. */
11348 pmop
[0] = TREE_OPERAND (arg0
, 0);
11350 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11352 pmop
[1] = TREE_OPERAND (arg0
, 1);
11356 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11359 for (; which
>= 0; which
--)
11360 switch (TREE_CODE (pmop
[which
]))
11365 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11368 cst0
= TREE_OPERAND (pmop
[which
], 1);
11370 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11375 else if (cst0
!= 0)
11377 /* If C or D is of the form (A & N) where
11378 (N & M) == M, or of the form (A | N) or
11379 (A ^ N) where (N & M) == 0, replace it with A. */
11380 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11383 /* If C or D is a N where (N & M) == 0, it can be
11384 omitted (assumed 0). */
11385 if ((TREE_CODE (arg0
) == PLUS_EXPR
11386 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11387 && (cst1
& pmop
[which
]) == 0)
11388 pmop
[which
] = NULL
;
11394 /* Only build anything new if we optimized one or both arguments
11396 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11397 || (TREE_CODE (arg0
) != NEGATE_EXPR
11398 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11400 tree utype
= TREE_TYPE (arg0
);
11401 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11403 /* Perform the operations in a type that has defined
11404 overflow behavior. */
11405 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11406 if (pmop
[0] != NULL
)
11407 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11408 if (pmop
[1] != NULL
)
11409 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11412 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11413 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11414 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11416 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11417 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11419 else if (pmop
[0] != NULL
)
11421 else if (pmop
[1] != NULL
)
11424 return build_int_cst (type
, 0);
11426 else if (pmop
[0] == NULL
)
11427 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11429 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11431 /* TEM is now the new binary +, - or unary - replacement. */
11432 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11433 fold_convert_loc (loc
, utype
, arg1
));
11434 return fold_convert_loc (loc
, type
, tem
);
11439 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11440 if (t1
!= NULL_TREE
)
11442 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11443 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11444 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11446 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11448 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11451 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11454 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11456 This results in more efficient code for machines without a NOR
11457 instruction. Combine will canonicalize to the first form
11458 which will allow use of NOR instructions provided by the
11459 backend if they exist. */
11460 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11461 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11463 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11464 build2 (BIT_IOR_EXPR
, type
,
11465 fold_convert_loc (loc
, type
,
11466 TREE_OPERAND (arg0
, 0)),
11467 fold_convert_loc (loc
, type
,
11468 TREE_OPERAND (arg1
, 0))));
11471 /* If arg0 is derived from the address of an object or function, we may
11472 be able to fold this expression using the object or function's
11474 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11476 unsigned HOST_WIDE_INT modulus
, residue
;
11477 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11479 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11480 integer_onep (arg1
));
11482 /* This works because modulus is a power of 2. If this weren't the
11483 case, we'd have to replace it by its greatest power-of-2
11484 divisor: modulus & -modulus. */
11486 return build_int_cst (type
, residue
& low
);
11489 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11490 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11491 if the new mask might be further optimized. */
11492 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11493 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11494 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11495 && TREE_CODE (arg1
) == INTEGER_CST
11496 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11497 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11498 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11499 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11501 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11502 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11503 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11504 tree shift_type
= TREE_TYPE (arg0
);
11506 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11507 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11508 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11509 && TYPE_PRECISION (TREE_TYPE (arg0
))
11510 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11512 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11513 tree arg00
= TREE_OPERAND (arg0
, 0);
11514 /* See if more bits can be proven as zero because of
11516 if (TREE_CODE (arg00
) == NOP_EXPR
11517 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11519 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11520 if (TYPE_PRECISION (inner_type
)
11521 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11522 && TYPE_PRECISION (inner_type
) < prec
)
11524 prec
= TYPE_PRECISION (inner_type
);
11525 /* See if we can shorten the right shift. */
11527 shift_type
= inner_type
;
11528 /* Otherwise X >> C1 is all zeros, so we'll optimize
11529 it into (X, 0) later on by making sure zerobits
11533 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11536 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11537 zerobits
<<= prec
- shiftc
;
11539 /* For arithmetic shift if sign bit could be set, zerobits
11540 can contain actually sign bits, so no transformation is
11541 possible, unless MASK masks them all away. In that
11542 case the shift needs to be converted into logical shift. */
11543 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11544 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11546 if ((mask
& zerobits
) == 0)
11547 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11553 /* ((X << 16) & 0xff00) is (X, 0). */
11554 if ((mask
& zerobits
) == mask
)
11555 return omit_one_operand_loc (loc
, type
,
11556 build_int_cst (type
, 0), arg0
);
11558 newmask
= mask
| zerobits
;
11559 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11561 /* Only do the transformation if NEWMASK is some integer
11563 for (prec
= BITS_PER_UNIT
;
11564 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11565 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11567 if (prec
< HOST_BITS_PER_WIDE_INT
11568 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11572 if (shift_type
!= TREE_TYPE (arg0
))
11574 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11575 fold_convert_loc (loc
, shift_type
,
11576 TREE_OPERAND (arg0
, 0)),
11577 TREE_OPERAND (arg0
, 1));
11578 tem
= fold_convert_loc (loc
, type
, tem
);
11582 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11583 if (!tree_int_cst_equal (newmaskt
, arg1
))
11584 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11592 /* Don't touch a floating-point divide by zero unless the mode
11593 of the constant can represent infinity. */
11594 if (TREE_CODE (arg1
) == REAL_CST
11595 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11596 && real_zerop (arg1
))
11599 /* Optimize A / A to 1.0 if we don't care about
11600 NaNs or Infinities. Skip the transformation
11601 for non-real operands. */
11602 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11603 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11604 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11605 && operand_equal_p (arg0
, arg1
, 0))
11607 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11609 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11612 /* The complex version of the above A / A optimization. */
11613 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11614 && operand_equal_p (arg0
, arg1
, 0))
11616 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11617 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11618 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11620 tree r
= build_real (elem_type
, dconst1
);
11621 /* omit_two_operands will call fold_convert for us. */
11622 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11626 /* (-A) / (-B) -> A / B */
11627 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11628 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11629 TREE_OPERAND (arg0
, 0),
11630 negate_expr (arg1
));
11631 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11632 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11633 negate_expr (arg0
),
11634 TREE_OPERAND (arg1
, 0));
11636 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11637 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11638 && real_onep (arg1
))
11639 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11641 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11642 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11643 && real_minus_onep (arg1
))
11644 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11645 negate_expr (arg0
)));
11647 /* If ARG1 is a constant, we can convert this to a multiply by the
11648 reciprocal. This does not have the same rounding properties,
11649 so only do this if -freciprocal-math. We can actually
11650 always safely do it if ARG1 is a power of two, but it's hard to
11651 tell if it is or not in a portable manner. */
11653 && (TREE_CODE (arg1
) == REAL_CST
11654 || (TREE_CODE (arg1
) == COMPLEX_CST
11655 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11656 || (TREE_CODE (arg1
) == VECTOR_CST
11657 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11659 if (flag_reciprocal_math
11660 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11661 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11662 /* Find the reciprocal if optimizing and the result is exact.
11663 TODO: Complex reciprocal not implemented. */
11664 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11666 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11669 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11672 /* Convert A/B/C to A/(B*C). */
11673 if (flag_reciprocal_math
11674 && TREE_CODE (arg0
) == RDIV_EXPR
)
11675 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11676 fold_build2_loc (loc
, MULT_EXPR
, type
,
11677 TREE_OPERAND (arg0
, 1), arg1
));
11679 /* Convert A/(B/C) to (A/B)*C. */
11680 if (flag_reciprocal_math
11681 && TREE_CODE (arg1
) == RDIV_EXPR
)
11682 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11683 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11684 TREE_OPERAND (arg1
, 0)),
11685 TREE_OPERAND (arg1
, 1));
11687 /* Convert C1/(X*C2) into (C1/C2)/X. */
11688 if (flag_reciprocal_math
11689 && TREE_CODE (arg1
) == MULT_EXPR
11690 && TREE_CODE (arg0
) == REAL_CST
11691 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11693 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11694 TREE_OPERAND (arg1
, 1));
11696 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11697 TREE_OPERAND (arg1
, 0));
11700 if (flag_unsafe_math_optimizations
)
11702 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11703 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11705 /* Optimize sin(x)/cos(x) as tan(x). */
11706 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11707 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11708 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11709 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11710 CALL_EXPR_ARG (arg1
, 0), 0))
11712 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11714 if (tanfn
!= NULL_TREE
)
11715 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11718 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11719 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11720 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11721 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11722 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11723 CALL_EXPR_ARG (arg1
, 0), 0))
11725 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11727 if (tanfn
!= NULL_TREE
)
11729 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11730 CALL_EXPR_ARG (arg0
, 0));
11731 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11732 build_real (type
, dconst1
), tmp
);
11736 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11737 NaNs or Infinities. */
11738 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11739 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11740 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11742 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11743 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11745 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11746 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11747 && operand_equal_p (arg00
, arg01
, 0))
11749 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11751 if (cosfn
!= NULL_TREE
)
11752 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11756 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11757 NaNs or Infinities. */
11758 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11759 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11760 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11762 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11763 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11765 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11766 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11767 && operand_equal_p (arg00
, arg01
, 0))
11769 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11771 if (cosfn
!= NULL_TREE
)
11773 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11774 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11775 build_real (type
, dconst1
),
11781 /* Optimize pow(x,c)/x as pow(x,c-1). */
11782 if (fcode0
== BUILT_IN_POW
11783 || fcode0
== BUILT_IN_POWF
11784 || fcode0
== BUILT_IN_POWL
)
11786 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11787 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11788 if (TREE_CODE (arg01
) == REAL_CST
11789 && !TREE_OVERFLOW (arg01
)
11790 && operand_equal_p (arg1
, arg00
, 0))
11792 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11796 c
= TREE_REAL_CST (arg01
);
11797 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11798 arg
= build_real (type
, c
);
11799 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11803 /* Optimize a/root(b/c) into a*root(c/b). */
11804 if (BUILTIN_ROOT_P (fcode1
))
11806 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11808 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11810 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11811 tree b
= TREE_OPERAND (rootarg
, 0);
11812 tree c
= TREE_OPERAND (rootarg
, 1);
11814 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11816 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11817 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11821 /* Optimize x/expN(y) into x*expN(-y). */
11822 if (BUILTIN_EXPONENT_P (fcode1
))
11824 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11825 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11826 arg1
= build_call_expr_loc (loc
,
11828 fold_convert_loc (loc
, type
, arg
));
11829 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11832 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11833 if (fcode1
== BUILT_IN_POW
11834 || fcode1
== BUILT_IN_POWF
11835 || fcode1
== BUILT_IN_POWL
)
11837 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11838 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11839 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11840 tree neg11
= fold_convert_loc (loc
, type
,
11841 negate_expr (arg11
));
11842 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11843 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11848 case TRUNC_DIV_EXPR
:
11849 /* Optimize (X & (-A)) / A where A is a power of 2,
11851 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11852 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11853 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11855 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11856 arg1
, TREE_OPERAND (arg0
, 1));
11857 if (sum
&& integer_zerop (sum
)) {
11858 tree pow2
= build_int_cst (integer_type_node
,
11859 wi::exact_log2 (arg1
));
11860 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11861 TREE_OPERAND (arg0
, 0), pow2
);
11867 case FLOOR_DIV_EXPR
:
11868 /* Simplify A / (B << N) where A and B are positive and B is
11869 a power of 2, to A >> (N + log2(B)). */
11870 strict_overflow_p
= false;
11871 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11872 && (TYPE_UNSIGNED (type
)
11873 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11875 tree sval
= TREE_OPERAND (arg1
, 0);
11876 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11878 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11879 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11880 wi::exact_log2 (sval
));
11882 if (strict_overflow_p
)
11883 fold_overflow_warning (("assuming signed overflow does not "
11884 "occur when simplifying A / (B << N)"),
11885 WARN_STRICT_OVERFLOW_MISC
);
11887 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11889 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11890 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11894 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11895 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11896 if (INTEGRAL_TYPE_P (type
)
11897 && TYPE_UNSIGNED (type
)
11898 && code
== FLOOR_DIV_EXPR
)
11899 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11903 case ROUND_DIV_EXPR
:
11904 case CEIL_DIV_EXPR
:
11905 case EXACT_DIV_EXPR
:
11906 if (integer_zerop (arg1
))
11908 /* X / -1 is -X. */
11909 if (!TYPE_UNSIGNED (type
)
11910 && TREE_CODE (arg1
) == INTEGER_CST
11911 && wi::eq_p (arg1
, -1))
11912 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11914 /* Convert -A / -B to A / B when the type is signed and overflow is
11916 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11917 && TREE_CODE (arg0
) == NEGATE_EXPR
11918 && negate_expr_p (arg1
))
11920 if (INTEGRAL_TYPE_P (type
))
11921 fold_overflow_warning (("assuming signed overflow does not occur "
11922 "when distributing negation across "
11924 WARN_STRICT_OVERFLOW_MISC
);
11925 return fold_build2_loc (loc
, code
, type
,
11926 fold_convert_loc (loc
, type
,
11927 TREE_OPERAND (arg0
, 0)),
11928 fold_convert_loc (loc
, type
,
11929 negate_expr (arg1
)));
11931 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11932 && TREE_CODE (arg1
) == NEGATE_EXPR
11933 && negate_expr_p (arg0
))
11935 if (INTEGRAL_TYPE_P (type
))
11936 fold_overflow_warning (("assuming signed overflow does not occur "
11937 "when distributing negation across "
11939 WARN_STRICT_OVERFLOW_MISC
);
11940 return fold_build2_loc (loc
, code
, type
,
11941 fold_convert_loc (loc
, type
,
11942 negate_expr (arg0
)),
11943 fold_convert_loc (loc
, type
,
11944 TREE_OPERAND (arg1
, 0)));
11947 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11948 operation, EXACT_DIV_EXPR.
11950 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11951 At one time others generated faster code, it's not clear if they do
11952 after the last round to changes to the DIV code in expmed.c. */
11953 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11954 && multiple_of_p (type
, arg0
, arg1
))
11955 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11957 strict_overflow_p
= false;
11958 if (TREE_CODE (arg1
) == INTEGER_CST
11959 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11960 &strict_overflow_p
)))
11962 if (strict_overflow_p
)
11963 fold_overflow_warning (("assuming signed overflow does not occur "
11964 "when simplifying division"),
11965 WARN_STRICT_OVERFLOW_MISC
);
11966 return fold_convert_loc (loc
, type
, tem
);
11971 case CEIL_MOD_EXPR
:
11972 case FLOOR_MOD_EXPR
:
11973 case ROUND_MOD_EXPR
:
11974 case TRUNC_MOD_EXPR
:
11975 /* X % -1 is zero. */
11976 if (!TYPE_UNSIGNED (type
)
11977 && TREE_CODE (arg1
) == INTEGER_CST
11978 && wi::eq_p (arg1
, -1))
11979 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11981 /* X % -C is the same as X % C. */
11982 if (code
== TRUNC_MOD_EXPR
11983 && TYPE_SIGN (type
) == SIGNED
11984 && TREE_CODE (arg1
) == INTEGER_CST
11985 && !TREE_OVERFLOW (arg1
)
11986 && wi::neg_p (arg1
)
11987 && !TYPE_OVERFLOW_TRAPS (type
)
11988 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11989 && !sign_bit_p (arg1
, arg1
))
11990 return fold_build2_loc (loc
, code
, type
,
11991 fold_convert_loc (loc
, type
, arg0
),
11992 fold_convert_loc (loc
, type
,
11993 negate_expr (arg1
)));
11995 /* X % -Y is the same as X % Y. */
11996 if (code
== TRUNC_MOD_EXPR
11997 && !TYPE_UNSIGNED (type
)
11998 && TREE_CODE (arg1
) == NEGATE_EXPR
11999 && !TYPE_OVERFLOW_TRAPS (type
))
12000 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12001 fold_convert_loc (loc
, type
,
12002 TREE_OPERAND (arg1
, 0)));
12004 strict_overflow_p
= false;
12005 if (TREE_CODE (arg1
) == INTEGER_CST
12006 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12007 &strict_overflow_p
)))
12009 if (strict_overflow_p
)
12010 fold_overflow_warning (("assuming signed overflow does not occur "
12011 "when simplifying modulus"),
12012 WARN_STRICT_OVERFLOW_MISC
);
12013 return fold_convert_loc (loc
, type
, tem
);
12016 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12017 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12018 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12019 && (TYPE_UNSIGNED (type
)
12020 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12023 /* Also optimize A % (C << N) where C is a power of 2,
12024 to A & ((C << N) - 1). */
12025 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12026 c
= TREE_OPERAND (arg1
, 0);
12028 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12031 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12032 build_int_cst (TREE_TYPE (arg1
), 1));
12033 if (strict_overflow_p
)
12034 fold_overflow_warning (("assuming signed overflow does not "
12035 "occur when simplifying "
12036 "X % (power of two)"),
12037 WARN_STRICT_OVERFLOW_MISC
);
12038 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12039 fold_convert_loc (loc
, type
, arg0
),
12040 fold_convert_loc (loc
, type
, mask
));
12048 if (integer_all_onesp (arg0
))
12049 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12053 /* Optimize -1 >> x for arithmetic right shifts. */
12054 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12055 && tree_expr_nonnegative_p (arg1
))
12056 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12057 /* ... fall through ... */
12061 if (integer_zerop (arg1
))
12062 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12063 if (integer_zerop (arg0
))
12064 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12066 /* Prefer vector1 << scalar to vector1 << vector2
12067 if vector2 is uniform. */
12068 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12069 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12070 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12072 /* Since negative shift count is not well-defined,
12073 don't try to compute it in the compiler. */
12074 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12077 prec
= element_precision (type
);
12079 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12080 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12081 && tree_to_uhwi (arg1
) < prec
12082 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12083 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12085 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12086 + tree_to_uhwi (arg1
));
12088 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12089 being well defined. */
12092 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12094 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12095 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12096 TREE_OPERAND (arg0
, 0));
12101 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12102 build_int_cst (TREE_TYPE (arg1
), low
));
12105 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12106 into x & ((unsigned)-1 >> c) for unsigned types. */
12107 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12108 || (TYPE_UNSIGNED (type
)
12109 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12110 && tree_fits_uhwi_p (arg1
)
12111 && tree_to_uhwi (arg1
) < prec
12112 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12113 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12115 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12116 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12122 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12124 lshift
= build_minus_one_cst (type
);
12125 lshift
= const_binop (code
, lshift
, arg1
);
12127 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12131 /* Rewrite an LROTATE_EXPR by a constant into an
12132 RROTATE_EXPR by a new constant. */
12133 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12135 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12136 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12137 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12140 /* If we have a rotate of a bit operation with the rotate count and
12141 the second operand of the bit operation both constant,
12142 permute the two operations. */
12143 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12144 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12145 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12146 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12147 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12148 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12149 fold_build2_loc (loc
, code
, type
,
12150 TREE_OPERAND (arg0
, 0), arg1
),
12151 fold_build2_loc (loc
, code
, type
,
12152 TREE_OPERAND (arg0
, 1), arg1
));
12154 /* Two consecutive rotates adding up to the some integer
12155 multiple of the precision of the type can be ignored. */
12156 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12157 && TREE_CODE (arg0
) == RROTATE_EXPR
12158 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12159 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12161 return TREE_OPERAND (arg0
, 0);
12163 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12164 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12165 if the latter can be further optimized. */
12166 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12167 && TREE_CODE (arg0
) == BIT_AND_EXPR
12168 && TREE_CODE (arg1
) == INTEGER_CST
12169 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12171 tree mask
= fold_build2_loc (loc
, code
, type
,
12172 fold_convert_loc (loc
, type
,
12173 TREE_OPERAND (arg0
, 1)),
12175 tree shift
= fold_build2_loc (loc
, code
, type
,
12176 fold_convert_loc (loc
, type
,
12177 TREE_OPERAND (arg0
, 0)),
12179 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12187 if (operand_equal_p (arg0
, arg1
, 0))
12188 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12189 if (INTEGRAL_TYPE_P (type
)
12190 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12191 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12192 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12198 if (operand_equal_p (arg0
, arg1
, 0))
12199 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12200 if (INTEGRAL_TYPE_P (type
)
12201 && TYPE_MAX_VALUE (type
)
12202 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12203 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12204 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12209 case TRUTH_ANDIF_EXPR
:
12210 /* Note that the operands of this must be ints
12211 and their values must be 0 or 1.
12212 ("true" is a fixed value perhaps depending on the language.) */
12213 /* If first arg is constant zero, return it. */
12214 if (integer_zerop (arg0
))
12215 return fold_convert_loc (loc
, type
, arg0
);
12216 case TRUTH_AND_EXPR
:
12217 /* If either arg is constant true, drop it. */
12218 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12219 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12220 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12221 /* Preserve sequence points. */
12222 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12223 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12224 /* If second arg is constant zero, result is zero, but first arg
12225 must be evaluated. */
12226 if (integer_zerop (arg1
))
12227 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12228 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12229 case will be handled here. */
12230 if (integer_zerop (arg0
))
12231 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12233 /* !X && X is always false. */
12234 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12235 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12236 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12237 /* X && !X is always false. */
12238 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12239 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12240 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12242 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12243 means A >= Y && A != MAX, but in this case we know that
12246 if (!TREE_SIDE_EFFECTS (arg0
)
12247 && !TREE_SIDE_EFFECTS (arg1
))
12249 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12250 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12251 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12253 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12254 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12255 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12258 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12264 case TRUTH_ORIF_EXPR
:
12265 /* Note that the operands of this must be ints
12266 and their values must be 0 or true.
12267 ("true" is a fixed value perhaps depending on the language.) */
12268 /* If first arg is constant true, return it. */
12269 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12270 return fold_convert_loc (loc
, type
, arg0
);
12271 case TRUTH_OR_EXPR
:
12272 /* If either arg is constant zero, drop it. */
12273 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12274 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12275 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12276 /* Preserve sequence points. */
12277 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12278 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12279 /* If second arg is constant true, result is true, but we must
12280 evaluate first arg. */
12281 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12282 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12283 /* Likewise for first arg, but note this only occurs here for
12285 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12286 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12288 /* !X || X is always true. */
12289 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12290 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12291 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12292 /* X || !X is always true. */
12293 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12294 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12295 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12297 /* (X && !Y) || (!X && Y) is X ^ Y */
12298 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12299 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12301 tree a0
, a1
, l0
, l1
, n0
, n1
;
12303 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12304 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12306 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12307 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12309 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12310 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12312 if ((operand_equal_p (n0
, a0
, 0)
12313 && operand_equal_p (n1
, a1
, 0))
12314 || (operand_equal_p (n0
, a1
, 0)
12315 && operand_equal_p (n1
, a0
, 0)))
12316 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12319 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12325 case TRUTH_XOR_EXPR
:
12326 /* If the second arg is constant zero, drop it. */
12327 if (integer_zerop (arg1
))
12328 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12329 /* If the second arg is constant true, this is a logical inversion. */
12330 if (integer_onep (arg1
))
12332 tem
= invert_truthvalue_loc (loc
, arg0
);
12333 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12335 /* Identical arguments cancel to zero. */
12336 if (operand_equal_p (arg0
, arg1
, 0))
12337 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12339 /* !X ^ X is always true. */
12340 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12341 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12342 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12344 /* X ^ !X is always true. */
12345 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12346 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12347 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12356 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12357 if (tem
!= NULL_TREE
)
12360 /* bool_var != 0 becomes bool_var. */
12361 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12362 && code
== NE_EXPR
)
12363 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12365 /* bool_var == 1 becomes bool_var. */
12366 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12367 && code
== EQ_EXPR
)
12368 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12370 /* bool_var != 1 becomes !bool_var. */
12371 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12372 && code
== NE_EXPR
)
12373 return fold_convert_loc (loc
, type
,
12374 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12375 TREE_TYPE (arg0
), arg0
));
12377 /* bool_var == 0 becomes !bool_var. */
12378 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12379 && code
== EQ_EXPR
)
12380 return fold_convert_loc (loc
, type
,
12381 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12382 TREE_TYPE (arg0
), arg0
));
12384 /* !exp != 0 becomes !exp */
12385 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12386 && code
== NE_EXPR
)
12387 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12389 /* If this is an equality comparison of the address of two non-weak,
12390 unaliased symbols neither of which are extern (since we do not
12391 have access to attributes for externs), then we know the result. */
12392 if (TREE_CODE (arg0
) == ADDR_EXPR
12393 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12394 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12395 && ! lookup_attribute ("alias",
12396 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12397 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12398 && TREE_CODE (arg1
) == ADDR_EXPR
12399 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12400 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12401 && ! lookup_attribute ("alias",
12402 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12403 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12405 /* We know that we're looking at the address of two
12406 non-weak, unaliased, static _DECL nodes.
12408 It is both wasteful and incorrect to call operand_equal_p
12409 to compare the two ADDR_EXPR nodes. It is wasteful in that
12410 all we need to do is test pointer equality for the arguments
12411 to the two ADDR_EXPR nodes. It is incorrect to use
12412 operand_equal_p as that function is NOT equivalent to a
12413 C equality test. It can in fact return false for two
12414 objects which would test as equal using the C equality
12416 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12417 return constant_boolean_node (equal
12418 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12422 /* Similarly for a NEGATE_EXPR. */
12423 if (TREE_CODE (arg0
) == NEGATE_EXPR
12424 && TREE_CODE (arg1
) == INTEGER_CST
12425 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12427 && TREE_CODE (tem
) == INTEGER_CST
12428 && !TREE_OVERFLOW (tem
))
12429 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12431 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12432 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12433 && TREE_CODE (arg1
) == INTEGER_CST
12434 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12435 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12436 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12437 fold_convert_loc (loc
,
12440 TREE_OPERAND (arg0
, 1)));
12442 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12443 if ((TREE_CODE (arg0
) == PLUS_EXPR
12444 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12445 || TREE_CODE (arg0
) == MINUS_EXPR
)
12446 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12449 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12450 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12452 tree val
= TREE_OPERAND (arg0
, 1);
12453 return omit_two_operands_loc (loc
, type
,
12454 fold_build2_loc (loc
, code
, type
,
12456 build_int_cst (TREE_TYPE (val
),
12458 TREE_OPERAND (arg0
, 0), arg1
);
12461 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12462 if (TREE_CODE (arg0
) == MINUS_EXPR
12463 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12464 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12467 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12469 return omit_two_operands_loc (loc
, type
,
12471 ? boolean_true_node
: boolean_false_node
,
12472 TREE_OPERAND (arg0
, 1), arg1
);
12475 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12476 if (TREE_CODE (arg0
) == ABS_EXPR
12477 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12478 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12480 /* If this is an EQ or NE comparison with zero and ARG0 is
12481 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12482 two operations, but the latter can be done in one less insn
12483 on machines that have only two-operand insns or on which a
12484 constant cannot be the first operand. */
12485 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12486 && integer_zerop (arg1
))
12488 tree arg00
= TREE_OPERAND (arg0
, 0);
12489 tree arg01
= TREE_OPERAND (arg0
, 1);
12490 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12491 && integer_onep (TREE_OPERAND (arg00
, 0)))
12493 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12494 arg01
, TREE_OPERAND (arg00
, 1));
12495 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12496 build_int_cst (TREE_TYPE (arg0
), 1));
12497 return fold_build2_loc (loc
, code
, type
,
12498 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12501 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12502 && integer_onep (TREE_OPERAND (arg01
, 0)))
12504 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12505 arg00
, TREE_OPERAND (arg01
, 1));
12506 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12507 build_int_cst (TREE_TYPE (arg0
), 1));
12508 return fold_build2_loc (loc
, code
, type
,
12509 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12514 /* If this is an NE or EQ comparison of zero against the result of a
12515 signed MOD operation whose second operand is a power of 2, make
12516 the MOD operation unsigned since it is simpler and equivalent. */
12517 if (integer_zerop (arg1
)
12518 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12519 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12520 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12521 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12522 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12523 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12525 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12526 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12527 fold_convert_loc (loc
, newtype
,
12528 TREE_OPERAND (arg0
, 0)),
12529 fold_convert_loc (loc
, newtype
,
12530 TREE_OPERAND (arg0
, 1)));
12532 return fold_build2_loc (loc
, code
, type
, newmod
,
12533 fold_convert_loc (loc
, newtype
, arg1
));
12536 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12537 C1 is a valid shift constant, and C2 is a power of two, i.e.
12539 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12540 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12541 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12543 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12544 && integer_zerop (arg1
))
12546 tree itype
= TREE_TYPE (arg0
);
12547 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12548 prec
= TYPE_PRECISION (itype
);
12550 /* Check for a valid shift count. */
12551 if (wi::ltu_p (arg001
, prec
))
12553 tree arg01
= TREE_OPERAND (arg0
, 1);
12554 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12555 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12556 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12557 can be rewritten as (X & (C2 << C1)) != 0. */
12558 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12560 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12561 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12562 return fold_build2_loc (loc
, code
, type
, tem
,
12563 fold_convert_loc (loc
, itype
, arg1
));
12565 /* Otherwise, for signed (arithmetic) shifts,
12566 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12567 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12568 else if (!TYPE_UNSIGNED (itype
))
12569 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12570 arg000
, build_int_cst (itype
, 0));
12571 /* Otherwise, of unsigned (logical) shifts,
12572 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12573 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12575 return omit_one_operand_loc (loc
, type
,
12576 code
== EQ_EXPR
? integer_one_node
12577 : integer_zero_node
,
12582 /* If we have (A & C) == C where C is a power of 2, convert this into
12583 (A & C) != 0. Similarly for NE_EXPR. */
12584 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12585 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12586 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12587 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12588 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12589 integer_zero_node
));
12591 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12592 bit, then fold the expression into A < 0 or A >= 0. */
12593 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12597 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12598 Similarly for NE_EXPR. */
12599 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12600 && TREE_CODE (arg1
) == INTEGER_CST
12601 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12603 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12604 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12605 TREE_OPERAND (arg0
, 1));
12607 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12608 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12610 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12611 if (integer_nonzerop (dandnotc
))
12612 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12615 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12616 Similarly for NE_EXPR. */
12617 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12618 && TREE_CODE (arg1
) == INTEGER_CST
12619 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12621 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12623 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12624 TREE_OPERAND (arg0
, 1),
12625 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12626 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12627 if (integer_nonzerop (candnotd
))
12628 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12631 /* If this is a comparison of a field, we may be able to simplify it. */
12632 if ((TREE_CODE (arg0
) == COMPONENT_REF
12633 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12634 /* Handle the constant case even without -O
12635 to make sure the warnings are given. */
12636 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12638 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12643 /* Optimize comparisons of strlen vs zero to a compare of the
12644 first character of the string vs zero. To wit,
12645 strlen(ptr) == 0 => *ptr == 0
12646 strlen(ptr) != 0 => *ptr != 0
12647 Other cases should reduce to one of these two (or a constant)
12648 due to the return value of strlen being unsigned. */
12649 if (TREE_CODE (arg0
) == CALL_EXPR
12650 && integer_zerop (arg1
))
12652 tree fndecl
= get_callee_fndecl (arg0
);
12655 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12656 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12657 && call_expr_nargs (arg0
) == 1
12658 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12660 tree iref
= build_fold_indirect_ref_loc (loc
,
12661 CALL_EXPR_ARG (arg0
, 0));
12662 return fold_build2_loc (loc
, code
, type
, iref
,
12663 build_int_cst (TREE_TYPE (iref
), 0));
12667 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12668 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12669 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12670 && integer_zerop (arg1
)
12671 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12673 tree arg00
= TREE_OPERAND (arg0
, 0);
12674 tree arg01
= TREE_OPERAND (arg0
, 1);
12675 tree itype
= TREE_TYPE (arg00
);
12676 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
12678 if (TYPE_UNSIGNED (itype
))
12680 itype
= signed_type_for (itype
);
12681 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12683 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12684 type
, arg00
, build_zero_cst (itype
));
12688 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12689 if (integer_zerop (arg1
)
12690 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12691 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12692 TREE_OPERAND (arg0
, 1));
12694 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12695 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12696 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12697 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12698 build_zero_cst (TREE_TYPE (arg0
)));
12699 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12700 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12701 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12702 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12703 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12704 build_zero_cst (TREE_TYPE (arg0
)));
12706 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12707 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12708 && TREE_CODE (arg1
) == INTEGER_CST
12709 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12710 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12711 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12712 TREE_OPERAND (arg0
, 1), arg1
));
12714 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12715 (X & C) == 0 when C is a single bit. */
12716 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12717 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12718 && integer_zerop (arg1
)
12719 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12721 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12722 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12723 TREE_OPERAND (arg0
, 1));
12724 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12726 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12730 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12731 constant C is a power of two, i.e. a single bit. */
12732 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12733 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12734 && integer_zerop (arg1
)
12735 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12736 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12737 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12739 tree arg00
= TREE_OPERAND (arg0
, 0);
12740 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12741 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12744 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12745 when is C is a power of two, i.e. a single bit. */
12746 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12747 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12748 && integer_zerop (arg1
)
12749 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12750 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12751 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12753 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12754 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12755 arg000
, TREE_OPERAND (arg0
, 1));
12756 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12757 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12760 if (integer_zerop (arg1
)
12761 && tree_expr_nonzero_p (arg0
))
12763 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12764 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12767 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12768 if (TREE_CODE (arg0
) == NEGATE_EXPR
12769 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12770 return fold_build2_loc (loc
, code
, type
,
12771 TREE_OPERAND (arg0
, 0),
12772 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12773 TREE_OPERAND (arg1
, 0)));
12775 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12776 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12777 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12779 tree arg00
= TREE_OPERAND (arg0
, 0);
12780 tree arg01
= TREE_OPERAND (arg0
, 1);
12781 tree arg10
= TREE_OPERAND (arg1
, 0);
12782 tree arg11
= TREE_OPERAND (arg1
, 1);
12783 tree itype
= TREE_TYPE (arg0
);
12785 if (operand_equal_p (arg01
, arg11
, 0))
12786 return fold_build2_loc (loc
, code
, type
,
12787 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12788 fold_build2_loc (loc
,
12789 BIT_XOR_EXPR
, itype
,
12792 build_zero_cst (itype
));
12794 if (operand_equal_p (arg01
, arg10
, 0))
12795 return fold_build2_loc (loc
, code
, type
,
12796 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12797 fold_build2_loc (loc
,
12798 BIT_XOR_EXPR
, itype
,
12801 build_zero_cst (itype
));
12803 if (operand_equal_p (arg00
, arg11
, 0))
12804 return fold_build2_loc (loc
, code
, type
,
12805 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12806 fold_build2_loc (loc
,
12807 BIT_XOR_EXPR
, itype
,
12810 build_zero_cst (itype
));
12812 if (operand_equal_p (arg00
, arg10
, 0))
12813 return fold_build2_loc (loc
, code
, type
,
12814 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12815 fold_build2_loc (loc
,
12816 BIT_XOR_EXPR
, itype
,
12819 build_zero_cst (itype
));
12822 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12823 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12825 tree arg00
= TREE_OPERAND (arg0
, 0);
12826 tree arg01
= TREE_OPERAND (arg0
, 1);
12827 tree arg10
= TREE_OPERAND (arg1
, 0);
12828 tree arg11
= TREE_OPERAND (arg1
, 1);
12829 tree itype
= TREE_TYPE (arg0
);
12831 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12832 operand_equal_p guarantees no side-effects so we don't need
12833 to use omit_one_operand on Z. */
12834 if (operand_equal_p (arg01
, arg11
, 0))
12835 return fold_build2_loc (loc
, code
, type
, arg00
,
12836 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12838 if (operand_equal_p (arg01
, arg10
, 0))
12839 return fold_build2_loc (loc
, code
, type
, arg00
,
12840 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12842 if (operand_equal_p (arg00
, arg11
, 0))
12843 return fold_build2_loc (loc
, code
, type
, arg01
,
12844 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12846 if (operand_equal_p (arg00
, arg10
, 0))
12847 return fold_build2_loc (loc
, code
, type
, arg01
,
12848 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12851 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12852 if (TREE_CODE (arg01
) == INTEGER_CST
12853 && TREE_CODE (arg11
) == INTEGER_CST
)
12855 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12856 fold_convert_loc (loc
, itype
, arg11
));
12857 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12858 return fold_build2_loc (loc
, code
, type
, tem
,
12859 fold_convert_loc (loc
, itype
, arg10
));
12863 /* Attempt to simplify equality/inequality comparisons of complex
12864 values. Only lower the comparison if the result is known or
12865 can be simplified to a single scalar comparison. */
12866 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12867 || TREE_CODE (arg0
) == COMPLEX_CST
)
12868 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12869 || TREE_CODE (arg1
) == COMPLEX_CST
))
12871 tree real0
, imag0
, real1
, imag1
;
12874 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12876 real0
= TREE_OPERAND (arg0
, 0);
12877 imag0
= TREE_OPERAND (arg0
, 1);
12881 real0
= TREE_REALPART (arg0
);
12882 imag0
= TREE_IMAGPART (arg0
);
12885 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12887 real1
= TREE_OPERAND (arg1
, 0);
12888 imag1
= TREE_OPERAND (arg1
, 1);
12892 real1
= TREE_REALPART (arg1
);
12893 imag1
= TREE_IMAGPART (arg1
);
12896 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12897 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12899 if (integer_zerop (rcond
))
12901 if (code
== EQ_EXPR
)
12902 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12904 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12908 if (code
== NE_EXPR
)
12909 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12911 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12915 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12916 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12918 if (integer_zerop (icond
))
12920 if (code
== EQ_EXPR
)
12921 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12923 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12927 if (code
== NE_EXPR
)
12928 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12930 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12941 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12942 if (tem
!= NULL_TREE
)
12945 /* Transform comparisons of the form X +- C CMP X. */
12946 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12947 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12948 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12949 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12950 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12951 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12953 tree arg01
= TREE_OPERAND (arg0
, 1);
12954 enum tree_code code0
= TREE_CODE (arg0
);
12957 if (TREE_CODE (arg01
) == REAL_CST
)
12958 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12960 is_positive
= tree_int_cst_sgn (arg01
);
12962 /* (X - c) > X becomes false. */
12963 if (code
== GT_EXPR
12964 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12965 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12967 if (TREE_CODE (arg01
) == INTEGER_CST
12968 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12969 fold_overflow_warning (("assuming signed overflow does not "
12970 "occur when assuming that (X - c) > X "
12971 "is always false"),
12972 WARN_STRICT_OVERFLOW_ALL
);
12973 return constant_boolean_node (0, type
);
12976 /* Likewise (X + c) < X becomes false. */
12977 if (code
== LT_EXPR
12978 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12979 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12981 if (TREE_CODE (arg01
) == INTEGER_CST
12982 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12983 fold_overflow_warning (("assuming signed overflow does not "
12984 "occur when assuming that "
12985 "(X + c) < X is always false"),
12986 WARN_STRICT_OVERFLOW_ALL
);
12987 return constant_boolean_node (0, type
);
12990 /* Convert (X - c) <= X to true. */
12991 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12993 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12994 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12996 if (TREE_CODE (arg01
) == INTEGER_CST
12997 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12998 fold_overflow_warning (("assuming signed overflow does not "
12999 "occur when assuming that "
13000 "(X - c) <= X is always true"),
13001 WARN_STRICT_OVERFLOW_ALL
);
13002 return constant_boolean_node (1, type
);
13005 /* Convert (X + c) >= X to true. */
13006 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13008 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13009 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13011 if (TREE_CODE (arg01
) == INTEGER_CST
13012 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13013 fold_overflow_warning (("assuming signed overflow does not "
13014 "occur when assuming that "
13015 "(X + c) >= X is always true"),
13016 WARN_STRICT_OVERFLOW_ALL
);
13017 return constant_boolean_node (1, type
);
13020 if (TREE_CODE (arg01
) == INTEGER_CST
)
13022 /* Convert X + c > X and X - c < X to true for integers. */
13023 if (code
== GT_EXPR
13024 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13025 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13027 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13028 fold_overflow_warning (("assuming signed overflow does "
13029 "not occur when assuming that "
13030 "(X + c) > X is always true"),
13031 WARN_STRICT_OVERFLOW_ALL
);
13032 return constant_boolean_node (1, type
);
13035 if (code
== LT_EXPR
13036 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13037 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13039 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13040 fold_overflow_warning (("assuming signed overflow does "
13041 "not occur when assuming that "
13042 "(X - c) < X is always true"),
13043 WARN_STRICT_OVERFLOW_ALL
);
13044 return constant_boolean_node (1, type
);
13047 /* Convert X + c <= X and X - c >= X to false for integers. */
13048 if (code
== LE_EXPR
13049 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13050 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13052 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13053 fold_overflow_warning (("assuming signed overflow does "
13054 "not occur when assuming that "
13055 "(X + c) <= X is always false"),
13056 WARN_STRICT_OVERFLOW_ALL
);
13057 return constant_boolean_node (0, type
);
13060 if (code
== GE_EXPR
13061 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13062 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13064 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13065 fold_overflow_warning (("assuming signed overflow does "
13066 "not occur when assuming that "
13067 "(X - c) >= X is always false"),
13068 WARN_STRICT_OVERFLOW_ALL
);
13069 return constant_boolean_node (0, type
);
13074 /* Comparisons with the highest or lowest possible integer of
13075 the specified precision will have known values. */
13077 tree arg1_type
= TREE_TYPE (arg1
);
13078 unsigned int prec
= TYPE_PRECISION (arg1_type
);
13080 if (TREE_CODE (arg1
) == INTEGER_CST
13081 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13083 wide_int max
= wi::max_value (arg1_type
);
13084 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
13085 wide_int min
= wi::min_value (arg1_type
);
13087 if (wi::eq_p (arg1
, max
))
13091 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13094 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13097 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13100 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13102 /* The GE_EXPR and LT_EXPR cases above are not normally
13103 reached because of previous transformations. */
13108 else if (wi::eq_p (arg1
, max
- 1))
13112 arg1
= const_binop (PLUS_EXPR
, arg1
,
13113 build_int_cst (TREE_TYPE (arg1
), 1));
13114 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13115 fold_convert_loc (loc
,
13116 TREE_TYPE (arg1
), arg0
),
13119 arg1
= const_binop (PLUS_EXPR
, arg1
,
13120 build_int_cst (TREE_TYPE (arg1
), 1));
13121 return fold_build2_loc (loc
, NE_EXPR
, type
,
13122 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13128 else if (wi::eq_p (arg1
, min
))
13132 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13135 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13138 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13141 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13146 else if (wi::eq_p (arg1
, min
+ 1))
13150 arg1
= const_binop (MINUS_EXPR
, arg1
,
13151 build_int_cst (TREE_TYPE (arg1
), 1));
13152 return fold_build2_loc (loc
, NE_EXPR
, type
,
13153 fold_convert_loc (loc
,
13154 TREE_TYPE (arg1
), arg0
),
13157 arg1
= const_binop (MINUS_EXPR
, arg1
,
13158 build_int_cst (TREE_TYPE (arg1
), 1));
13159 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13160 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13167 else if (wi::eq_p (arg1
, signed_max
)
13168 && TYPE_UNSIGNED (arg1_type
)
13169 /* We will flip the signedness of the comparison operator
13170 associated with the mode of arg1, so the sign bit is
13171 specified by this mode. Check that arg1 is the signed
13172 max associated with this sign bit. */
13173 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13174 /* signed_type does not work on pointer types. */
13175 && INTEGRAL_TYPE_P (arg1_type
))
13177 /* The following case also applies to X < signed_max+1
13178 and X >= signed_max+1 because previous transformations. */
13179 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13181 tree st
= signed_type_for (arg1_type
);
13182 return fold_build2_loc (loc
,
13183 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13184 type
, fold_convert_loc (loc
, st
, arg0
),
13185 build_int_cst (st
, 0));
13191 /* If we are comparing an ABS_EXPR with a constant, we can
13192 convert all the cases into explicit comparisons, but they may
13193 well not be faster than doing the ABS and one comparison.
13194 But ABS (X) <= C is a range comparison, which becomes a subtraction
13195 and a comparison, and is probably faster. */
13196 if (code
== LE_EXPR
13197 && TREE_CODE (arg1
) == INTEGER_CST
13198 && TREE_CODE (arg0
) == ABS_EXPR
13199 && ! TREE_SIDE_EFFECTS (arg0
)
13200 && (0 != (tem
= negate_expr (arg1
)))
13201 && TREE_CODE (tem
) == INTEGER_CST
13202 && !TREE_OVERFLOW (tem
))
13203 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13204 build2 (GE_EXPR
, type
,
13205 TREE_OPERAND (arg0
, 0), tem
),
13206 build2 (LE_EXPR
, type
,
13207 TREE_OPERAND (arg0
, 0), arg1
));
13209 /* Convert ABS_EXPR<x> >= 0 to true. */
13210 strict_overflow_p
= false;
13211 if (code
== GE_EXPR
13212 && (integer_zerop (arg1
)
13213 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13214 && real_zerop (arg1
)))
13215 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13217 if (strict_overflow_p
)
13218 fold_overflow_warning (("assuming signed overflow does not occur "
13219 "when simplifying comparison of "
13220 "absolute value and zero"),
13221 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13222 return omit_one_operand_loc (loc
, type
,
13223 constant_boolean_node (true, type
),
13227 /* Convert ABS_EXPR<x> < 0 to false. */
13228 strict_overflow_p
= false;
13229 if (code
== LT_EXPR
13230 && (integer_zerop (arg1
) || real_zerop (arg1
))
13231 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13233 if (strict_overflow_p
)
13234 fold_overflow_warning (("assuming signed overflow does not occur "
13235 "when simplifying comparison of "
13236 "absolute value and zero"),
13237 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13238 return omit_one_operand_loc (loc
, type
,
13239 constant_boolean_node (false, type
),
13243 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13244 and similarly for >= into !=. */
13245 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13246 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13247 && TREE_CODE (arg1
) == LSHIFT_EXPR
13248 && integer_onep (TREE_OPERAND (arg1
, 0)))
13249 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13250 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13251 TREE_OPERAND (arg1
, 1)),
13252 build_zero_cst (TREE_TYPE (arg0
)));
13254 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13255 otherwise Y might be >= # of bits in X's type and thus e.g.
13256 (unsigned char) (1 << Y) for Y 15 might be 0.
13257 If the cast is widening, then 1 << Y should have unsigned type,
13258 otherwise if Y is number of bits in the signed shift type minus 1,
13259 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13260 31 might be 0xffffffff80000000. */
13261 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13262 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13263 && CONVERT_EXPR_P (arg1
)
13264 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13265 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13266 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13267 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13268 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13269 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13270 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13272 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13273 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13274 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13275 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13276 build_zero_cst (TREE_TYPE (arg0
)));
13281 case UNORDERED_EXPR
:
13289 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13291 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13292 if (t1
!= NULL_TREE
)
13296 /* If the first operand is NaN, the result is constant. */
13297 if (TREE_CODE (arg0
) == REAL_CST
13298 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13299 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13301 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13302 ? integer_zero_node
13303 : integer_one_node
;
13304 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13307 /* If the second operand is NaN, the result is constant. */
13308 if (TREE_CODE (arg1
) == REAL_CST
13309 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13310 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13312 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13313 ? integer_zero_node
13314 : integer_one_node
;
13315 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13318 /* Simplify unordered comparison of something with itself. */
13319 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13320 && operand_equal_p (arg0
, arg1
, 0))
13321 return constant_boolean_node (1, type
);
13323 if (code
== LTGT_EXPR
13324 && !flag_trapping_math
13325 && operand_equal_p (arg0
, arg1
, 0))
13326 return constant_boolean_node (0, type
);
13328 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13330 tree targ0
= strip_float_extensions (arg0
);
13331 tree targ1
= strip_float_extensions (arg1
);
13332 tree newtype
= TREE_TYPE (targ0
);
13334 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13335 newtype
= TREE_TYPE (targ1
);
13337 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13338 return fold_build2_loc (loc
, code
, type
,
13339 fold_convert_loc (loc
, newtype
, targ0
),
13340 fold_convert_loc (loc
, newtype
, targ1
));
13345 case COMPOUND_EXPR
:
13346 /* When pedantic, a compound expression can be neither an lvalue
13347 nor an integer constant expression. */
13348 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13350 /* Don't let (0, 0) be null pointer constant. */
13351 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13352 : fold_convert_loc (loc
, type
, arg1
);
13353 return pedantic_non_lvalue_loc (loc
, tem
);
13356 if ((TREE_CODE (arg0
) == REAL_CST
13357 && TREE_CODE (arg1
) == REAL_CST
)
13358 || (TREE_CODE (arg0
) == INTEGER_CST
13359 && TREE_CODE (arg1
) == INTEGER_CST
))
13360 return build_complex (type
, arg0
, arg1
);
13361 if (TREE_CODE (arg0
) == REALPART_EXPR
13362 && TREE_CODE (arg1
) == IMAGPART_EXPR
13363 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13364 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13365 TREE_OPERAND (arg1
, 0), 0))
13366 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13367 TREE_OPERAND (arg1
, 0));
13371 /* An ASSERT_EXPR should never be passed to fold_binary. */
13372 gcc_unreachable ();
13374 case VEC_PACK_TRUNC_EXPR
:
13375 case VEC_PACK_FIX_TRUNC_EXPR
:
13377 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13380 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13381 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13382 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13385 elts
= XALLOCAVEC (tree
, nelts
);
13386 if (!vec_cst_ctor_to_array (arg0
, elts
)
13387 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13390 for (i
= 0; i
< nelts
; i
++)
13392 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13393 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13394 TREE_TYPE (type
), elts
[i
]);
13395 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13399 return build_vector (type
, elts
);
13402 case VEC_WIDEN_MULT_LO_EXPR
:
13403 case VEC_WIDEN_MULT_HI_EXPR
:
13404 case VEC_WIDEN_MULT_EVEN_EXPR
:
13405 case VEC_WIDEN_MULT_ODD_EXPR
:
13407 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13408 unsigned int out
, ofs
, scale
;
13411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13412 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13413 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13416 elts
= XALLOCAVEC (tree
, nelts
* 4);
13417 if (!vec_cst_ctor_to_array (arg0
, elts
)
13418 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13421 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13422 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13423 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13424 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13425 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13426 scale
= 1, ofs
= 0;
13427 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13428 scale
= 1, ofs
= 1;
13430 for (out
= 0; out
< nelts
; out
++)
13432 unsigned int in1
= (out
<< scale
) + ofs
;
13433 unsigned int in2
= in1
+ nelts
* 2;
13436 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13437 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13439 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13441 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13442 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13446 return build_vector (type
, elts
);
13451 } /* switch (code) */
13454 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13455 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13459 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13461 switch (TREE_CODE (*tp
))
13467 *walk_subtrees
= 0;
13469 /* ... fall through ... */
13476 /* Return whether the sub-tree ST contains a label which is accessible from
13477 outside the sub-tree. */
13480 contains_label_p (tree st
)
13483 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13486 /* Fold a ternary expression of code CODE and type TYPE with operands
13487 OP0, OP1, and OP2. Return the folded expression if folding is
13488 successful. Otherwise, return NULL_TREE. */
13491 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13492 tree op0
, tree op1
, tree op2
)
13495 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13496 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13498 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13499 && TREE_CODE_LENGTH (code
) == 3);
13501 /* If this is a commutative operation, and OP0 is a constant, move it
13502 to OP1 to reduce the number of tests below. */
13503 if (commutative_ternary_tree_code (code
)
13504 && tree_swap_operands_p (op0
, op1
, true))
13505 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13507 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13511 /* Strip any conversions that don't change the mode. This is safe
13512 for every expression, except for a comparison expression because
13513 its signedness is derived from its operands. So, in the latter
13514 case, only strip conversions that don't change the signedness.
13516 Note that this is done as an internal manipulation within the
13517 constant folder, in order to find the simplest representation of
13518 the arguments so that their form can be studied. In any cases,
13519 the appropriate type conversions should be put back in the tree
13520 that will get out of the constant folder. */
13541 case COMPONENT_REF
:
13542 if (TREE_CODE (arg0
) == CONSTRUCTOR
13543 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13545 unsigned HOST_WIDE_INT idx
;
13547 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13554 case VEC_COND_EXPR
:
13555 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13556 so all simple results must be passed through pedantic_non_lvalue. */
13557 if (TREE_CODE (arg0
) == INTEGER_CST
)
13559 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13560 tem
= integer_zerop (arg0
) ? op2
: op1
;
13561 /* Only optimize constant conditions when the selected branch
13562 has the same type as the COND_EXPR. This avoids optimizing
13563 away "c ? x : throw", where the throw has a void type.
13564 Avoid throwing away that operand which contains label. */
13565 if ((!TREE_SIDE_EFFECTS (unused_op
)
13566 || !contains_label_p (unused_op
))
13567 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13568 || VOID_TYPE_P (type
)))
13569 return pedantic_non_lvalue_loc (loc
, tem
);
13572 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13574 if (integer_all_onesp (arg0
))
13575 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
13576 if (integer_zerop (arg0
))
13577 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
13579 if ((TREE_CODE (arg1
) == VECTOR_CST
13580 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13581 && (TREE_CODE (arg2
) == VECTOR_CST
13582 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13584 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13585 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13586 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13587 for (i
= 0; i
< nelts
; i
++)
13589 tree val
= VECTOR_CST_ELT (arg0
, i
);
13590 if (integer_all_onesp (val
))
13592 else if (integer_zerop (val
))
13593 sel
[i
] = nelts
+ i
;
13594 else /* Currently unreachable. */
13597 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13598 if (t
!= NULL_TREE
)
13603 if (operand_equal_p (arg1
, op2
, 0))
13604 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13606 /* If we have A op B ? A : C, we may be able to convert this to a
13607 simpler expression, depending on the operation and the values
13608 of B and C. Signed zeros prevent all of these transformations,
13609 for reasons given above each one.
13611 Also try swapping the arguments and inverting the conditional. */
13612 if (COMPARISON_CLASS_P (arg0
)
13613 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13614 arg1
, TREE_OPERAND (arg0
, 1))
13615 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13617 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13622 if (COMPARISON_CLASS_P (arg0
)
13623 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13625 TREE_OPERAND (arg0
, 1))
13626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13628 location_t loc0
= expr_location_or (arg0
, loc
);
13629 tem
= fold_invert_truthvalue (loc0
, arg0
);
13630 if (tem
&& COMPARISON_CLASS_P (tem
))
13632 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13638 /* If the second operand is simpler than the third, swap them
13639 since that produces better jump optimization results. */
13640 if (truth_value_p (TREE_CODE (arg0
))
13641 && tree_swap_operands_p (op1
, op2
, false))
13643 location_t loc0
= expr_location_or (arg0
, loc
);
13644 /* See if this can be inverted. If it can't, possibly because
13645 it was a floating-point inequality comparison, don't do
13647 tem
= fold_invert_truthvalue (loc0
, arg0
);
13649 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13652 /* Convert A ? 1 : 0 to simply A. */
13653 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13654 : (integer_onep (op1
)
13655 && !VECTOR_TYPE_P (type
)))
13656 && integer_zerop (op2
)
13657 /* If we try to convert OP0 to our type, the
13658 call to fold will try to move the conversion inside
13659 a COND, which will recurse. In that case, the COND_EXPR
13660 is probably the best choice, so leave it alone. */
13661 && type
== TREE_TYPE (arg0
))
13662 return pedantic_non_lvalue_loc (loc
, arg0
);
13664 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13665 over COND_EXPR in cases such as floating point comparisons. */
13666 if (integer_zerop (op1
)
13667 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13668 : (integer_onep (op2
)
13669 && !VECTOR_TYPE_P (type
)))
13670 && truth_value_p (TREE_CODE (arg0
)))
13671 return pedantic_non_lvalue_loc (loc
,
13672 fold_convert_loc (loc
, type
,
13673 invert_truthvalue_loc (loc
,
13676 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13677 if (TREE_CODE (arg0
) == LT_EXPR
13678 && integer_zerop (TREE_OPERAND (arg0
, 1))
13679 && integer_zerop (op2
)
13680 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13682 /* sign_bit_p looks through both zero and sign extensions,
13683 but for this optimization only sign extensions are
13685 tree tem2
= TREE_OPERAND (arg0
, 0);
13686 while (tem
!= tem2
)
13688 if (TREE_CODE (tem2
) != NOP_EXPR
13689 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
13694 tem2
= TREE_OPERAND (tem2
, 0);
13696 /* sign_bit_p only checks ARG1 bits within A's precision.
13697 If <sign bit of A> has wider type than A, bits outside
13698 of A's precision in <sign bit of A> need to be checked.
13699 If they are all 0, this optimization needs to be done
13700 in unsigned A's type, if they are all 1 in signed A's type,
13701 otherwise this can't be done. */
13703 && TYPE_PRECISION (TREE_TYPE (tem
))
13704 < TYPE_PRECISION (TREE_TYPE (arg1
))
13705 && TYPE_PRECISION (TREE_TYPE (tem
))
13706 < TYPE_PRECISION (type
))
13708 int inner_width
, outer_width
;
13711 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13712 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13713 if (outer_width
> TYPE_PRECISION (type
))
13714 outer_width
= TYPE_PRECISION (type
);
13716 wide_int mask
= wi::shifted_mask
13717 (inner_width
, outer_width
- inner_width
, false,
13718 TYPE_PRECISION (TREE_TYPE (arg1
)));
13720 wide_int common
= mask
& arg1
;
13721 if (common
== mask
)
13723 tem_type
= signed_type_for (TREE_TYPE (tem
));
13724 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13726 else if (common
== 0)
13728 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13729 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13737 fold_convert_loc (loc
, type
,
13738 fold_build2_loc (loc
, BIT_AND_EXPR
,
13739 TREE_TYPE (tem
), tem
,
13740 fold_convert_loc (loc
,
13745 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13746 already handled above. */
13747 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13748 && integer_onep (TREE_OPERAND (arg0
, 1))
13749 && integer_zerop (op2
)
13750 && integer_pow2p (arg1
))
13752 tree tem
= TREE_OPERAND (arg0
, 0);
13754 if (TREE_CODE (tem
) == RSHIFT_EXPR
13755 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
13756 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13757 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
13758 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13759 TREE_OPERAND (tem
, 0), arg1
);
13762 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13763 is probably obsolete because the first operand should be a
13764 truth value (that's why we have the two cases above), but let's
13765 leave it in until we can confirm this for all front-ends. */
13766 if (integer_zerop (op2
)
13767 && TREE_CODE (arg0
) == NE_EXPR
13768 && integer_zerop (TREE_OPERAND (arg0
, 1))
13769 && integer_pow2p (arg1
)
13770 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13771 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13772 arg1
, OEP_ONLY_CONST
))
13773 return pedantic_non_lvalue_loc (loc
,
13774 fold_convert_loc (loc
, type
,
13775 TREE_OPERAND (arg0
, 0)));
13777 /* Disable the transformations below for vectors, since
13778 fold_binary_op_with_conditional_arg may undo them immediately,
13779 yielding an infinite loop. */
13780 if (code
== VEC_COND_EXPR
)
13783 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13784 if (integer_zerop (op2
)
13785 && truth_value_p (TREE_CODE (arg0
))
13786 && truth_value_p (TREE_CODE (arg1
))
13787 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13788 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
13789 : TRUTH_ANDIF_EXPR
,
13790 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
13792 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13793 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
13794 && truth_value_p (TREE_CODE (arg0
))
13795 && truth_value_p (TREE_CODE (arg1
))
13796 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13798 location_t loc0
= expr_location_or (arg0
, loc
);
13799 /* Only perform transformation if ARG0 is easily inverted. */
13800 tem
= fold_invert_truthvalue (loc0
, arg0
);
13802 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13805 type
, fold_convert_loc (loc
, type
, tem
),
13809 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13810 if (integer_zerop (arg1
)
13811 && truth_value_p (TREE_CODE (arg0
))
13812 && truth_value_p (TREE_CODE (op2
))
13813 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13815 location_t loc0
= expr_location_or (arg0
, loc
);
13816 /* Only perform transformation if ARG0 is easily inverted. */
13817 tem
= fold_invert_truthvalue (loc0
, arg0
);
13819 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13820 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
13821 type
, fold_convert_loc (loc
, type
, tem
),
13825 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13826 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
13827 && truth_value_p (TREE_CODE (arg0
))
13828 && truth_value_p (TREE_CODE (op2
))
13829 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
13830 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
13831 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
13832 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
13837 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13838 of fold_ternary on them. */
13839 gcc_unreachable ();
13841 case BIT_FIELD_REF
:
13842 if ((TREE_CODE (arg0
) == VECTOR_CST
13843 || (TREE_CODE (arg0
) == CONSTRUCTOR
13844 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
13845 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
13846 || (TREE_CODE (type
) == VECTOR_TYPE
13847 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
13849 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
13850 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
13851 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
13852 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
13855 && (idx
% width
) == 0
13856 && (n
% width
) == 0
13857 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13862 if (TREE_CODE (arg0
) == VECTOR_CST
)
13865 return VECTOR_CST_ELT (arg0
, idx
);
13867 tree
*vals
= XALLOCAVEC (tree
, n
);
13868 for (unsigned i
= 0; i
< n
; ++i
)
13869 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
13870 return build_vector (type
, vals
);
13873 /* Constructor elements can be subvectors. */
13874 unsigned HOST_WIDE_INT k
= 1;
13875 if (CONSTRUCTOR_NELTS (arg0
) != 0)
13877 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
13878 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
13879 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
13882 /* We keep an exact subset of the constructor elements. */
13883 if ((idx
% k
) == 0 && (n
% k
) == 0)
13885 if (CONSTRUCTOR_NELTS (arg0
) == 0)
13886 return build_constructor (type
, NULL
);
13891 if (idx
< CONSTRUCTOR_NELTS (arg0
))
13892 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13893 return build_zero_cst (type
);
13896 vec
<constructor_elt
, va_gc
> *vals
;
13897 vec_alloc (vals
, n
);
13898 for (unsigned i
= 0;
13899 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
13901 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
13903 (arg0
, idx
+ i
)->value
);
13904 return build_constructor (type
, vals
);
13906 /* The bitfield references a single constructor element. */
13907 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
13909 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
13910 return build_zero_cst (type
);
13912 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
13914 return fold_build3_loc (loc
, code
, type
,
13915 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
13916 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
13921 /* A bit-field-ref that referenced the full argument can be stripped. */
13922 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13923 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
13924 && integer_zerop (op2
))
13925 return fold_convert_loc (loc
, type
, arg0
);
13927 /* On constants we can use native encode/interpret to constant
13928 fold (nearly) all BIT_FIELD_REFs. */
13929 if (CONSTANT_CLASS_P (arg0
)
13930 && can_native_interpret_type_p (type
)
13931 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
13932 /* This limitation should not be necessary, we just need to
13933 round this up to mode size. */
13934 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
13935 /* Need bit-shifting of the buffer to relax the following. */
13936 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
13938 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
13939 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
13940 unsigned HOST_WIDE_INT clen
;
13941 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
13942 /* ??? We cannot tell native_encode_expr to start at
13943 some random byte only. So limit us to a reasonable amount
13947 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
13948 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
13950 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
13952 tree v
= native_interpret_expr (type
,
13953 b
+ bitpos
/ BITS_PER_UNIT
,
13954 bitsize
/ BITS_PER_UNIT
);
13964 /* For integers we can decompose the FMA if possible. */
13965 if (TREE_CODE (arg0
) == INTEGER_CST
13966 && TREE_CODE (arg1
) == INTEGER_CST
)
13967 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13968 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13969 if (integer_zerop (arg2
))
13970 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13972 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13974 case VEC_PERM_EXPR
:
13975 if (TREE_CODE (arg2
) == VECTOR_CST
)
13977 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
13978 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
13979 unsigned char *sel2
= sel
+ nelts
;
13980 bool need_mask_canon
= false;
13981 bool need_mask_canon2
= false;
13982 bool all_in_vec0
= true;
13983 bool all_in_vec1
= true;
13984 bool maybe_identity
= true;
13985 bool single_arg
= (op0
== op1
);
13986 bool changed
= false;
13988 mask2
= 2 * nelts
- 1;
13989 mask
= single_arg
? (nelts
- 1) : mask2
;
13990 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
13991 for (i
= 0; i
< nelts
; i
++)
13993 tree val
= VECTOR_CST_ELT (arg2
, i
);
13994 if (TREE_CODE (val
) != INTEGER_CST
)
13997 /* Make sure that the perm value is in an acceptable
14000 need_mask_canon
|= wi::gtu_p (t
, mask
);
14001 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
14002 sel
[i
] = t
.to_uhwi () & mask
;
14003 sel2
[i
] = t
.to_uhwi () & mask2
;
14005 if (sel
[i
] < nelts
)
14006 all_in_vec1
= false;
14008 all_in_vec0
= false;
14010 if ((sel
[i
] & (nelts
-1)) != i
)
14011 maybe_identity
= false;
14014 if (maybe_identity
)
14024 else if (all_in_vec1
)
14027 for (i
= 0; i
< nelts
; i
++)
14029 need_mask_canon
= true;
14032 if ((TREE_CODE (op0
) == VECTOR_CST
14033 || TREE_CODE (op0
) == CONSTRUCTOR
)
14034 && (TREE_CODE (op1
) == VECTOR_CST
14035 || TREE_CODE (op1
) == CONSTRUCTOR
))
14037 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
14038 if (t
!= NULL_TREE
)
14042 if (op0
== op1
&& !single_arg
)
14045 /* Some targets are deficient and fail to expand a single
14046 argument permutation while still allowing an equivalent
14047 2-argument version. */
14048 if (need_mask_canon
&& arg2
== op2
14049 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
14050 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
14052 need_mask_canon
= need_mask_canon2
;
14056 if (need_mask_canon
&& arg2
== op2
)
14058 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14059 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14060 for (i
= 0; i
< nelts
; i
++)
14061 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14062 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14067 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14073 } /* switch (code) */
14076 /* Perform constant folding and related simplification of EXPR.
14077 The related simplifications include x*1 => x, x*0 => 0, etc.,
14078 and application of the associative law.
14079 NOP_EXPR conversions may be removed freely (as long as we
14080 are careful not to change the type of the overall expression).
14081 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14082 but we can constant-fold them if they have constant operands. */
14084 #ifdef ENABLE_FOLD_CHECKING
14085 # define fold(x) fold_1 (x)
14086 static tree
fold_1 (tree
);
14092 const tree t
= expr
;
14093 enum tree_code code
= TREE_CODE (t
);
14094 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14096 location_t loc
= EXPR_LOCATION (expr
);
14098 /* Return right away if a constant. */
14099 if (kind
== tcc_constant
)
14102 /* CALL_EXPR-like objects with variable numbers of operands are
14103 treated specially. */
14104 if (kind
== tcc_vl_exp
)
14106 if (code
== CALL_EXPR
)
14108 tem
= fold_call_expr (loc
, expr
, false);
14109 return tem
? tem
: expr
;
14114 if (IS_EXPR_CODE_CLASS (kind
))
14116 tree type
= TREE_TYPE (t
);
14117 tree op0
, op1
, op2
;
14119 switch (TREE_CODE_LENGTH (code
))
14122 op0
= TREE_OPERAND (t
, 0);
14123 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14124 return tem
? tem
: expr
;
14126 op0
= TREE_OPERAND (t
, 0);
14127 op1
= TREE_OPERAND (t
, 1);
14128 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14129 return tem
? tem
: expr
;
14131 op0
= TREE_OPERAND (t
, 0);
14132 op1
= TREE_OPERAND (t
, 1);
14133 op2
= TREE_OPERAND (t
, 2);
14134 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14135 return tem
? tem
: expr
;
14145 tree op0
= TREE_OPERAND (t
, 0);
14146 tree op1
= TREE_OPERAND (t
, 1);
14148 if (TREE_CODE (op1
) == INTEGER_CST
14149 && TREE_CODE (op0
) == CONSTRUCTOR
14150 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14152 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14153 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14154 unsigned HOST_WIDE_INT begin
= 0;
14156 /* Find a matching index by means of a binary search. */
14157 while (begin
!= end
)
14159 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14160 tree index
= (*elts
)[middle
].index
;
14162 if (TREE_CODE (index
) == INTEGER_CST
14163 && tree_int_cst_lt (index
, op1
))
14164 begin
= middle
+ 1;
14165 else if (TREE_CODE (index
) == INTEGER_CST
14166 && tree_int_cst_lt (op1
, index
))
14168 else if (TREE_CODE (index
) == RANGE_EXPR
14169 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14170 begin
= middle
+ 1;
14171 else if (TREE_CODE (index
) == RANGE_EXPR
14172 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14175 return (*elts
)[middle
].value
;
14182 /* Return a VECTOR_CST if possible. */
14185 tree type
= TREE_TYPE (t
);
14186 if (TREE_CODE (type
) != VECTOR_TYPE
)
14189 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14190 unsigned HOST_WIDE_INT idx
, pos
= 0;
14193 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14195 if (!CONSTANT_CLASS_P (value
))
14197 if (TREE_CODE (value
) == VECTOR_CST
)
14199 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14200 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14203 vec
[pos
++] = value
;
14205 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14206 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14208 return build_vector (type
, vec
);
14212 return fold (DECL_INITIAL (t
));
14216 } /* switch (code) */
14219 #ifdef ENABLE_FOLD_CHECKING
14222 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14223 hash_table
<pointer_hash
<const tree_node
> > *);
14224 static void fold_check_failed (const_tree
, const_tree
);
14225 void print_fold_checksum (const_tree
);
14227 /* When --enable-checking=fold, compute a digest of expr before
14228 and after actual fold call to see if fold did not accidentally
14229 change original expr. */
14235 struct md5_ctx ctx
;
14236 unsigned char checksum_before
[16], checksum_after
[16];
14237 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14239 md5_init_ctx (&ctx
);
14240 fold_checksum_tree (expr
, &ctx
, &ht
);
14241 md5_finish_ctx (&ctx
, checksum_before
);
14244 ret
= fold_1 (expr
);
14246 md5_init_ctx (&ctx
);
14247 fold_checksum_tree (expr
, &ctx
, &ht
);
14248 md5_finish_ctx (&ctx
, checksum_after
);
14250 if (memcmp (checksum_before
, checksum_after
, 16))
14251 fold_check_failed (expr
, ret
);
14257 print_fold_checksum (const_tree expr
)
14259 struct md5_ctx ctx
;
14260 unsigned char checksum
[16], cnt
;
14261 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14263 md5_init_ctx (&ctx
);
14264 fold_checksum_tree (expr
, &ctx
, &ht
);
14265 md5_finish_ctx (&ctx
, checksum
);
14266 for (cnt
= 0; cnt
< 16; ++cnt
)
14267 fprintf (stderr
, "%02x", checksum
[cnt
]);
14268 putc ('\n', stderr
);
14272 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14274 internal_error ("fold check: original tree changed by fold");
14278 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14279 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14281 const tree_node
**slot
;
14282 enum tree_code code
;
14283 union tree_node buf
;
14289 slot
= ht
->find_slot (expr
, INSERT
);
14293 code
= TREE_CODE (expr
);
14294 if (TREE_CODE_CLASS (code
) == tcc_declaration
14295 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14297 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14298 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14299 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14300 expr
= (tree
) &buf
;
14302 else if (TREE_CODE_CLASS (code
) == tcc_type
14303 && (TYPE_POINTER_TO (expr
)
14304 || TYPE_REFERENCE_TO (expr
)
14305 || TYPE_CACHED_VALUES_P (expr
)
14306 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14307 || TYPE_NEXT_VARIANT (expr
)))
14309 /* Allow these fields to be modified. */
14311 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14312 expr
= tmp
= (tree
) &buf
;
14313 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14314 TYPE_POINTER_TO (tmp
) = NULL
;
14315 TYPE_REFERENCE_TO (tmp
) = NULL
;
14316 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14317 if (TYPE_CACHED_VALUES_P (tmp
))
14319 TYPE_CACHED_VALUES_P (tmp
) = 0;
14320 TYPE_CACHED_VALUES (tmp
) = NULL
;
14323 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14324 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14325 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14326 if (TREE_CODE_CLASS (code
) != tcc_type
14327 && TREE_CODE_CLASS (code
) != tcc_declaration
14328 && code
!= TREE_LIST
14329 && code
!= SSA_NAME
14330 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14331 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14332 switch (TREE_CODE_CLASS (code
))
14338 md5_process_bytes (TREE_STRING_POINTER (expr
),
14339 TREE_STRING_LENGTH (expr
), ctx
);
14342 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14343 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14346 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14347 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14353 case tcc_exceptional
:
14357 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14358 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14359 expr
= TREE_CHAIN (expr
);
14360 goto recursive_label
;
14363 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14364 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14370 case tcc_expression
:
14371 case tcc_reference
:
14372 case tcc_comparison
:
14375 case tcc_statement
:
14377 len
= TREE_OPERAND_LENGTH (expr
);
14378 for (i
= 0; i
< len
; ++i
)
14379 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14381 case tcc_declaration
:
14382 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14383 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14384 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14386 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14387 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14388 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14389 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14390 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14393 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14395 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14397 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14398 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14400 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14404 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14405 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14406 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14407 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14408 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14409 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14410 if (INTEGRAL_TYPE_P (expr
)
14411 || SCALAR_FLOAT_TYPE_P (expr
))
14413 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14414 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14416 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14417 if (TREE_CODE (expr
) == RECORD_TYPE
14418 || TREE_CODE (expr
) == UNION_TYPE
14419 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14420 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14421 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14428 /* Helper function for outputting the checksum of a tree T. When
14429 debugging with gdb, you can "define mynext" to be "next" followed
14430 by "call debug_fold_checksum (op0)", then just trace down till the
14433 DEBUG_FUNCTION
void
14434 debug_fold_checksum (const_tree t
)
14437 unsigned char checksum
[16];
14438 struct md5_ctx ctx
;
14439 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14441 md5_init_ctx (&ctx
);
14442 fold_checksum_tree (t
, &ctx
, &ht
);
14443 md5_finish_ctx (&ctx
, checksum
);
14446 for (i
= 0; i
< 16; i
++)
14447 fprintf (stderr
, "%d ", checksum
[i
]);
14449 fprintf (stderr
, "\n");
14454 /* Fold a unary tree expression with code CODE of type TYPE with an
14455 operand OP0. LOC is the location of the resulting expression.
14456 Return a folded expression if successful. Otherwise, return a tree
14457 expression with code CODE of type TYPE with an operand OP0. */
14460 fold_build1_stat_loc (location_t loc
,
14461 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14464 #ifdef ENABLE_FOLD_CHECKING
14465 unsigned char checksum_before
[16], checksum_after
[16];
14466 struct md5_ctx ctx
;
14467 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14469 md5_init_ctx (&ctx
);
14470 fold_checksum_tree (op0
, &ctx
, &ht
);
14471 md5_finish_ctx (&ctx
, checksum_before
);
14475 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14477 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14479 #ifdef ENABLE_FOLD_CHECKING
14480 md5_init_ctx (&ctx
);
14481 fold_checksum_tree (op0
, &ctx
, &ht
);
14482 md5_finish_ctx (&ctx
, checksum_after
);
14484 if (memcmp (checksum_before
, checksum_after
, 16))
14485 fold_check_failed (op0
, tem
);
14490 /* Fold a binary tree expression with code CODE of type TYPE with
14491 operands OP0 and OP1. LOC is the location of the resulting
14492 expression. Return a folded expression if successful. Otherwise,
14493 return a tree expression with code CODE of type TYPE with operands
14497 fold_build2_stat_loc (location_t loc
,
14498 enum tree_code code
, tree type
, tree op0
, tree op1
14502 #ifdef ENABLE_FOLD_CHECKING
14503 unsigned char checksum_before_op0
[16],
14504 checksum_before_op1
[16],
14505 checksum_after_op0
[16],
14506 checksum_after_op1
[16];
14507 struct md5_ctx ctx
;
14508 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14510 md5_init_ctx (&ctx
);
14511 fold_checksum_tree (op0
, &ctx
, &ht
);
14512 md5_finish_ctx (&ctx
, checksum_before_op0
);
14515 md5_init_ctx (&ctx
);
14516 fold_checksum_tree (op1
, &ctx
, &ht
);
14517 md5_finish_ctx (&ctx
, checksum_before_op1
);
14521 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14523 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14525 #ifdef ENABLE_FOLD_CHECKING
14526 md5_init_ctx (&ctx
);
14527 fold_checksum_tree (op0
, &ctx
, &ht
);
14528 md5_finish_ctx (&ctx
, checksum_after_op0
);
14531 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14532 fold_check_failed (op0
, tem
);
14534 md5_init_ctx (&ctx
);
14535 fold_checksum_tree (op1
, &ctx
, &ht
);
14536 md5_finish_ctx (&ctx
, checksum_after_op1
);
14538 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14539 fold_check_failed (op1
, tem
);
14544 /* Fold a ternary tree expression with code CODE of type TYPE with
14545 operands OP0, OP1, and OP2. Return a folded expression if
14546 successful. Otherwise, return a tree expression with code CODE of
14547 type TYPE with operands OP0, OP1, and OP2. */
14550 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14551 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14554 #ifdef ENABLE_FOLD_CHECKING
14555 unsigned char checksum_before_op0
[16],
14556 checksum_before_op1
[16],
14557 checksum_before_op2
[16],
14558 checksum_after_op0
[16],
14559 checksum_after_op1
[16],
14560 checksum_after_op2
[16];
14561 struct md5_ctx ctx
;
14562 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14564 md5_init_ctx (&ctx
);
14565 fold_checksum_tree (op0
, &ctx
, &ht
);
14566 md5_finish_ctx (&ctx
, checksum_before_op0
);
14569 md5_init_ctx (&ctx
);
14570 fold_checksum_tree (op1
, &ctx
, &ht
);
14571 md5_finish_ctx (&ctx
, checksum_before_op1
);
14574 md5_init_ctx (&ctx
);
14575 fold_checksum_tree (op2
, &ctx
, &ht
);
14576 md5_finish_ctx (&ctx
, checksum_before_op2
);
14580 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14581 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14583 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14585 #ifdef ENABLE_FOLD_CHECKING
14586 md5_init_ctx (&ctx
);
14587 fold_checksum_tree (op0
, &ctx
, &ht
);
14588 md5_finish_ctx (&ctx
, checksum_after_op0
);
14591 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14592 fold_check_failed (op0
, tem
);
14594 md5_init_ctx (&ctx
);
14595 fold_checksum_tree (op1
, &ctx
, &ht
);
14596 md5_finish_ctx (&ctx
, checksum_after_op1
);
14599 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14600 fold_check_failed (op1
, tem
);
14602 md5_init_ctx (&ctx
);
14603 fold_checksum_tree (op2
, &ctx
, &ht
);
14604 md5_finish_ctx (&ctx
, checksum_after_op2
);
14606 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14607 fold_check_failed (op2
, tem
);
14612 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14613 arguments in ARGARRAY, and a null static chain.
14614 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14615 of type TYPE from the given operands as constructed by build_call_array. */
14618 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14619 int nargs
, tree
*argarray
)
14622 #ifdef ENABLE_FOLD_CHECKING
14623 unsigned char checksum_before_fn
[16],
14624 checksum_before_arglist
[16],
14625 checksum_after_fn
[16],
14626 checksum_after_arglist
[16];
14627 struct md5_ctx ctx
;
14628 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14631 md5_init_ctx (&ctx
);
14632 fold_checksum_tree (fn
, &ctx
, &ht
);
14633 md5_finish_ctx (&ctx
, checksum_before_fn
);
14636 md5_init_ctx (&ctx
);
14637 for (i
= 0; i
< nargs
; i
++)
14638 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14639 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14643 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14645 #ifdef ENABLE_FOLD_CHECKING
14646 md5_init_ctx (&ctx
);
14647 fold_checksum_tree (fn
, &ctx
, &ht
);
14648 md5_finish_ctx (&ctx
, checksum_after_fn
);
14651 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14652 fold_check_failed (fn
, tem
);
14654 md5_init_ctx (&ctx
);
14655 for (i
= 0; i
< nargs
; i
++)
14656 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14657 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14659 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14660 fold_check_failed (NULL_TREE
, tem
);
14665 /* Perform constant folding and related simplification of initializer
14666 expression EXPR. These behave identically to "fold_buildN" but ignore
14667 potential run-time traps and exceptions that fold must preserve. */
14669 #define START_FOLD_INIT \
14670 int saved_signaling_nans = flag_signaling_nans;\
14671 int saved_trapping_math = flag_trapping_math;\
14672 int saved_rounding_math = flag_rounding_math;\
14673 int saved_trapv = flag_trapv;\
14674 int saved_folding_initializer = folding_initializer;\
14675 flag_signaling_nans = 0;\
14676 flag_trapping_math = 0;\
14677 flag_rounding_math = 0;\
14679 folding_initializer = 1;
14681 #define END_FOLD_INIT \
14682 flag_signaling_nans = saved_signaling_nans;\
14683 flag_trapping_math = saved_trapping_math;\
14684 flag_rounding_math = saved_rounding_math;\
14685 flag_trapv = saved_trapv;\
14686 folding_initializer = saved_folding_initializer;
14689 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14690 tree type
, tree op
)
14695 result
= fold_build1_loc (loc
, code
, type
, op
);
14702 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14703 tree type
, tree op0
, tree op1
)
14708 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14715 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14716 int nargs
, tree
*argarray
)
14721 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14727 #undef START_FOLD_INIT
14728 #undef END_FOLD_INIT
14730 /* Determine if first argument is a multiple of second argument. Return 0 if
14731 it is not, or we cannot easily determined it to be.
14733 An example of the sort of thing we care about (at this point; this routine
14734 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14735 fold cases do now) is discovering that
14737 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14743 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14745 This code also handles discovering that
14747 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14749 is a multiple of 8 so we don't have to worry about dealing with a
14750 possible remainder.
14752 Note that we *look* inside a SAVE_EXPR only to determine how it was
14753 calculated; it is not safe for fold to do much of anything else with the
14754 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14755 at run time. For example, the latter example above *cannot* be implemented
14756 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14757 evaluation time of the original SAVE_EXPR is not necessarily the same at
14758 the time the new expression is evaluated. The only optimization of this
14759 sort that would be valid is changing
14761 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14765 SAVE_EXPR (I) * SAVE_EXPR (J)
14767 (where the same SAVE_EXPR (J) is used in the original and the
14768 transformed version). */
14771 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14773 if (operand_equal_p (top
, bottom
, 0))
14776 if (TREE_CODE (type
) != INTEGER_TYPE
)
14779 switch (TREE_CODE (top
))
14782 /* Bitwise and provides a power of two multiple. If the mask is
14783 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14784 if (!integer_pow2p (bottom
))
14789 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14790 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14794 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14795 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14798 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14802 op1
= TREE_OPERAND (top
, 1);
14803 /* const_binop may not detect overflow correctly,
14804 so check for it explicitly here. */
14805 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
14806 && 0 != (t1
= fold_convert (type
,
14807 const_binop (LSHIFT_EXPR
,
14810 && !TREE_OVERFLOW (t1
))
14811 return multiple_of_p (type
, t1
, bottom
);
14816 /* Can't handle conversions from non-integral or wider integral type. */
14817 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14818 || (TYPE_PRECISION (type
)
14819 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14822 /* .. fall through ... */
14825 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14828 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14829 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14832 if (TREE_CODE (bottom
) != INTEGER_CST
14833 || integer_zerop (bottom
)
14834 || (TYPE_UNSIGNED (type
)
14835 && (tree_int_cst_sgn (top
) < 0
14836 || tree_int_cst_sgn (bottom
) < 0)))
14838 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
14846 /* Return true if CODE or TYPE is known to be non-negative. */
14849 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14851 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14852 && truth_value_p (code
))
14853 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14854 have a signed:1 type (where the value is -1 and 0). */
14859 /* Return true if (CODE OP0) is known to be non-negative. If the return
14860 value is based on the assumption that signed overflow is undefined,
14861 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14862 *STRICT_OVERFLOW_P. */
14865 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14866 bool *strict_overflow_p
)
14868 if (TYPE_UNSIGNED (type
))
14874 /* We can't return 1 if flag_wrapv is set because
14875 ABS_EXPR<INT_MIN> = INT_MIN. */
14876 if (!INTEGRAL_TYPE_P (type
))
14878 if (TYPE_OVERFLOW_UNDEFINED (type
))
14880 *strict_overflow_p
= true;
14885 case NON_LVALUE_EXPR
:
14887 case FIX_TRUNC_EXPR
:
14888 return tree_expr_nonnegative_warnv_p (op0
,
14889 strict_overflow_p
);
14893 tree inner_type
= TREE_TYPE (op0
);
14894 tree outer_type
= type
;
14896 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14898 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14899 return tree_expr_nonnegative_warnv_p (op0
,
14900 strict_overflow_p
);
14901 if (INTEGRAL_TYPE_P (inner_type
))
14903 if (TYPE_UNSIGNED (inner_type
))
14905 return tree_expr_nonnegative_warnv_p (op0
,
14906 strict_overflow_p
);
14909 else if (INTEGRAL_TYPE_P (outer_type
))
14911 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14912 return tree_expr_nonnegative_warnv_p (op0
,
14913 strict_overflow_p
);
14914 if (INTEGRAL_TYPE_P (inner_type
))
14915 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14916 && TYPE_UNSIGNED (inner_type
);
14922 return tree_simple_nonnegative_warnv_p (code
, type
);
14925 /* We don't know sign of `t', so be conservative and return false. */
14929 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14930 value is based on the assumption that signed overflow is undefined,
14931 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14932 *STRICT_OVERFLOW_P. */
14935 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14936 tree op1
, bool *strict_overflow_p
)
14938 if (TYPE_UNSIGNED (type
))
14943 case POINTER_PLUS_EXPR
:
14945 if (FLOAT_TYPE_P (type
))
14946 return (tree_expr_nonnegative_warnv_p (op0
,
14948 && tree_expr_nonnegative_warnv_p (op1
,
14949 strict_overflow_p
));
14951 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14952 both unsigned and at least 2 bits shorter than the result. */
14953 if (TREE_CODE (type
) == INTEGER_TYPE
14954 && TREE_CODE (op0
) == NOP_EXPR
14955 && TREE_CODE (op1
) == NOP_EXPR
)
14957 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14958 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14959 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14960 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14962 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14963 TYPE_PRECISION (inner2
)) + 1;
14964 return prec
< TYPE_PRECISION (type
);
14970 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
14972 /* x * x is always non-negative for floating point x
14973 or without overflow. */
14974 if (operand_equal_p (op0
, op1
, 0)
14975 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
14976 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
14978 if (TYPE_OVERFLOW_UNDEFINED (type
))
14979 *strict_overflow_p
= true;
14984 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14985 both unsigned and their total bits is shorter than the result. */
14986 if (TREE_CODE (type
) == INTEGER_TYPE
14987 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14988 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14990 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14991 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14993 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14994 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14997 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14998 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15000 if (TREE_CODE (op0
) == INTEGER_CST
)
15001 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15003 if (TREE_CODE (op1
) == INTEGER_CST
)
15004 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15006 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15007 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15009 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15010 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
15011 : TYPE_PRECISION (inner0
);
15013 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15014 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
15015 : TYPE_PRECISION (inner1
);
15017 return precision0
+ precision1
< TYPE_PRECISION (type
);
15024 return (tree_expr_nonnegative_warnv_p (op0
,
15026 || tree_expr_nonnegative_warnv_p (op1
,
15027 strict_overflow_p
));
15033 case TRUNC_DIV_EXPR
:
15034 case CEIL_DIV_EXPR
:
15035 case FLOOR_DIV_EXPR
:
15036 case ROUND_DIV_EXPR
:
15037 return (tree_expr_nonnegative_warnv_p (op0
,
15039 && tree_expr_nonnegative_warnv_p (op1
,
15040 strict_overflow_p
));
15042 case TRUNC_MOD_EXPR
:
15043 case CEIL_MOD_EXPR
:
15044 case FLOOR_MOD_EXPR
:
15045 case ROUND_MOD_EXPR
:
15046 return tree_expr_nonnegative_warnv_p (op0
,
15047 strict_overflow_p
);
15049 return tree_simple_nonnegative_warnv_p (code
, type
);
15052 /* We don't know sign of `t', so be conservative and return false. */
15056 /* Return true if T is known to be non-negative. If the return
15057 value is based on the assumption that signed overflow is undefined,
15058 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15059 *STRICT_OVERFLOW_P. */
15062 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15064 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15067 switch (TREE_CODE (t
))
15070 return tree_int_cst_sgn (t
) >= 0;
15073 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15076 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15079 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15081 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15082 strict_overflow_p
));
15084 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15087 /* We don't know sign of `t', so be conservative and return false. */
15091 /* Return true if T is known to be non-negative. If the return
15092 value is based on the assumption that signed overflow is undefined,
15093 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15094 *STRICT_OVERFLOW_P. */
15097 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15098 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15100 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15101 switch (DECL_FUNCTION_CODE (fndecl
))
15103 CASE_FLT_FN (BUILT_IN_ACOS
):
15104 CASE_FLT_FN (BUILT_IN_ACOSH
):
15105 CASE_FLT_FN (BUILT_IN_CABS
):
15106 CASE_FLT_FN (BUILT_IN_COSH
):
15107 CASE_FLT_FN (BUILT_IN_ERFC
):
15108 CASE_FLT_FN (BUILT_IN_EXP
):
15109 CASE_FLT_FN (BUILT_IN_EXP10
):
15110 CASE_FLT_FN (BUILT_IN_EXP2
):
15111 CASE_FLT_FN (BUILT_IN_FABS
):
15112 CASE_FLT_FN (BUILT_IN_FDIM
):
15113 CASE_FLT_FN (BUILT_IN_HYPOT
):
15114 CASE_FLT_FN (BUILT_IN_POW10
):
15115 CASE_INT_FN (BUILT_IN_FFS
):
15116 CASE_INT_FN (BUILT_IN_PARITY
):
15117 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15118 CASE_INT_FN (BUILT_IN_CLZ
):
15119 CASE_INT_FN (BUILT_IN_CLRSB
):
15120 case BUILT_IN_BSWAP32
:
15121 case BUILT_IN_BSWAP64
:
15125 CASE_FLT_FN (BUILT_IN_SQRT
):
15126 /* sqrt(-0.0) is -0.0. */
15127 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15129 return tree_expr_nonnegative_warnv_p (arg0
,
15130 strict_overflow_p
);
15132 CASE_FLT_FN (BUILT_IN_ASINH
):
15133 CASE_FLT_FN (BUILT_IN_ATAN
):
15134 CASE_FLT_FN (BUILT_IN_ATANH
):
15135 CASE_FLT_FN (BUILT_IN_CBRT
):
15136 CASE_FLT_FN (BUILT_IN_CEIL
):
15137 CASE_FLT_FN (BUILT_IN_ERF
):
15138 CASE_FLT_FN (BUILT_IN_EXPM1
):
15139 CASE_FLT_FN (BUILT_IN_FLOOR
):
15140 CASE_FLT_FN (BUILT_IN_FMOD
):
15141 CASE_FLT_FN (BUILT_IN_FREXP
):
15142 CASE_FLT_FN (BUILT_IN_ICEIL
):
15143 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15144 CASE_FLT_FN (BUILT_IN_IRINT
):
15145 CASE_FLT_FN (BUILT_IN_IROUND
):
15146 CASE_FLT_FN (BUILT_IN_LCEIL
):
15147 CASE_FLT_FN (BUILT_IN_LDEXP
):
15148 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15149 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15150 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15151 CASE_FLT_FN (BUILT_IN_LLRINT
):
15152 CASE_FLT_FN (BUILT_IN_LLROUND
):
15153 CASE_FLT_FN (BUILT_IN_LRINT
):
15154 CASE_FLT_FN (BUILT_IN_LROUND
):
15155 CASE_FLT_FN (BUILT_IN_MODF
):
15156 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15157 CASE_FLT_FN (BUILT_IN_RINT
):
15158 CASE_FLT_FN (BUILT_IN_ROUND
):
15159 CASE_FLT_FN (BUILT_IN_SCALB
):
15160 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15161 CASE_FLT_FN (BUILT_IN_SCALBN
):
15162 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15163 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15164 CASE_FLT_FN (BUILT_IN_SINH
):
15165 CASE_FLT_FN (BUILT_IN_TANH
):
15166 CASE_FLT_FN (BUILT_IN_TRUNC
):
15167 /* True if the 1st argument is nonnegative. */
15168 return tree_expr_nonnegative_warnv_p (arg0
,
15169 strict_overflow_p
);
15171 CASE_FLT_FN (BUILT_IN_FMAX
):
15172 /* True if the 1st OR 2nd arguments are nonnegative. */
15173 return (tree_expr_nonnegative_warnv_p (arg0
,
15175 || (tree_expr_nonnegative_warnv_p (arg1
,
15176 strict_overflow_p
)));
15178 CASE_FLT_FN (BUILT_IN_FMIN
):
15179 /* True if the 1st AND 2nd arguments are nonnegative. */
15180 return (tree_expr_nonnegative_warnv_p (arg0
,
15182 && (tree_expr_nonnegative_warnv_p (arg1
,
15183 strict_overflow_p
)));
15185 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15186 /* True if the 2nd argument is nonnegative. */
15187 return tree_expr_nonnegative_warnv_p (arg1
,
15188 strict_overflow_p
);
15190 CASE_FLT_FN (BUILT_IN_POWI
):
15191 /* True if the 1st argument is nonnegative or the second
15192 argument is an even integer. */
15193 if (TREE_CODE (arg1
) == INTEGER_CST
15194 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15196 return tree_expr_nonnegative_warnv_p (arg0
,
15197 strict_overflow_p
);
15199 CASE_FLT_FN (BUILT_IN_POW
):
15200 /* True if the 1st argument is nonnegative or the second
15201 argument is an even integer valued real. */
15202 if (TREE_CODE (arg1
) == REAL_CST
)
15207 c
= TREE_REAL_CST (arg1
);
15208 n
= real_to_integer (&c
);
15211 REAL_VALUE_TYPE cint
;
15212 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
15213 if (real_identical (&c
, &cint
))
15217 return tree_expr_nonnegative_warnv_p (arg0
,
15218 strict_overflow_p
);
15223 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15227 /* Return true if T is known to be non-negative. If the return
15228 value is based on the assumption that signed overflow is undefined,
15229 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15230 *STRICT_OVERFLOW_P. */
15233 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15235 enum tree_code code
= TREE_CODE (t
);
15236 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15243 tree temp
= TARGET_EXPR_SLOT (t
);
15244 t
= TARGET_EXPR_INITIAL (t
);
15246 /* If the initializer is non-void, then it's a normal expression
15247 that will be assigned to the slot. */
15248 if (!VOID_TYPE_P (t
))
15249 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15251 /* Otherwise, the initializer sets the slot in some way. One common
15252 way is an assignment statement at the end of the initializer. */
15255 if (TREE_CODE (t
) == BIND_EXPR
)
15256 t
= expr_last (BIND_EXPR_BODY (t
));
15257 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15258 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15259 t
= expr_last (TREE_OPERAND (t
, 0));
15260 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15265 if (TREE_CODE (t
) == MODIFY_EXPR
15266 && TREE_OPERAND (t
, 0) == temp
)
15267 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15268 strict_overflow_p
);
15275 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15276 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15278 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15279 get_callee_fndecl (t
),
15282 strict_overflow_p
);
15284 case COMPOUND_EXPR
:
15286 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15287 strict_overflow_p
);
15289 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15290 strict_overflow_p
);
15292 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15293 strict_overflow_p
);
15296 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15300 /* We don't know sign of `t', so be conservative and return false. */
15304 /* Return true if T is known to be non-negative. If the return
15305 value is based on the assumption that signed overflow is undefined,
15306 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15307 *STRICT_OVERFLOW_P. */
15310 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15312 enum tree_code code
;
15313 if (t
== error_mark_node
)
15316 code
= TREE_CODE (t
);
15317 switch (TREE_CODE_CLASS (code
))
15320 case tcc_comparison
:
15321 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15323 TREE_OPERAND (t
, 0),
15324 TREE_OPERAND (t
, 1),
15325 strict_overflow_p
);
15328 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15330 TREE_OPERAND (t
, 0),
15331 strict_overflow_p
);
15334 case tcc_declaration
:
15335 case tcc_reference
:
15336 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15344 case TRUTH_AND_EXPR
:
15345 case TRUTH_OR_EXPR
:
15346 case TRUTH_XOR_EXPR
:
15347 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15349 TREE_OPERAND (t
, 0),
15350 TREE_OPERAND (t
, 1),
15351 strict_overflow_p
);
15352 case TRUTH_NOT_EXPR
:
15353 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15355 TREE_OPERAND (t
, 0),
15356 strict_overflow_p
);
15363 case WITH_SIZE_EXPR
:
15365 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15368 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15372 /* Return true if `t' is known to be non-negative. Handle warnings
15373 about undefined signed overflow. */
15376 tree_expr_nonnegative_p (tree t
)
15378 bool ret
, strict_overflow_p
;
15380 strict_overflow_p
= false;
15381 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15382 if (strict_overflow_p
)
15383 fold_overflow_warning (("assuming signed overflow does not occur when "
15384 "determining that expression is always "
15386 WARN_STRICT_OVERFLOW_MISC
);
15391 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15392 For floating point we further ensure that T is not denormal.
15393 Similar logic is present in nonzero_address in rtlanal.h.
15395 If the return value is based on the assumption that signed overflow
15396 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15397 change *STRICT_OVERFLOW_P. */
15400 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15401 bool *strict_overflow_p
)
15406 return tree_expr_nonzero_warnv_p (op0
,
15407 strict_overflow_p
);
15411 tree inner_type
= TREE_TYPE (op0
);
15412 tree outer_type
= type
;
15414 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15415 && tree_expr_nonzero_warnv_p (op0
,
15416 strict_overflow_p
));
15420 case NON_LVALUE_EXPR
:
15421 return tree_expr_nonzero_warnv_p (op0
,
15422 strict_overflow_p
);
15431 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15432 For floating point we further ensure that T is not denormal.
15433 Similar logic is present in nonzero_address in rtlanal.h.
15435 If the return value is based on the assumption that signed overflow
15436 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15437 change *STRICT_OVERFLOW_P. */
15440 tree_binary_nonzero_warnv_p (enum tree_code code
,
15443 tree op1
, bool *strict_overflow_p
)
15445 bool sub_strict_overflow_p
;
15448 case POINTER_PLUS_EXPR
:
15450 if (TYPE_OVERFLOW_UNDEFINED (type
))
15452 /* With the presence of negative values it is hard
15453 to say something. */
15454 sub_strict_overflow_p
= false;
15455 if (!tree_expr_nonnegative_warnv_p (op0
,
15456 &sub_strict_overflow_p
)
15457 || !tree_expr_nonnegative_warnv_p (op1
,
15458 &sub_strict_overflow_p
))
15460 /* One of operands must be positive and the other non-negative. */
15461 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15462 overflows, on a twos-complement machine the sum of two
15463 nonnegative numbers can never be zero. */
15464 return (tree_expr_nonzero_warnv_p (op0
,
15466 || tree_expr_nonzero_warnv_p (op1
,
15467 strict_overflow_p
));
15472 if (TYPE_OVERFLOW_UNDEFINED (type
))
15474 if (tree_expr_nonzero_warnv_p (op0
,
15476 && tree_expr_nonzero_warnv_p (op1
,
15477 strict_overflow_p
))
15479 *strict_overflow_p
= true;
15486 sub_strict_overflow_p
= false;
15487 if (tree_expr_nonzero_warnv_p (op0
,
15488 &sub_strict_overflow_p
)
15489 && tree_expr_nonzero_warnv_p (op1
,
15490 &sub_strict_overflow_p
))
15492 if (sub_strict_overflow_p
)
15493 *strict_overflow_p
= true;
15498 sub_strict_overflow_p
= false;
15499 if (tree_expr_nonzero_warnv_p (op0
,
15500 &sub_strict_overflow_p
))
15502 if (sub_strict_overflow_p
)
15503 *strict_overflow_p
= true;
15505 /* When both operands are nonzero, then MAX must be too. */
15506 if (tree_expr_nonzero_warnv_p (op1
,
15507 strict_overflow_p
))
15510 /* MAX where operand 0 is positive is positive. */
15511 return tree_expr_nonnegative_warnv_p (op0
,
15512 strict_overflow_p
);
15514 /* MAX where operand 1 is positive is positive. */
15515 else if (tree_expr_nonzero_warnv_p (op1
,
15516 &sub_strict_overflow_p
)
15517 && tree_expr_nonnegative_warnv_p (op1
,
15518 &sub_strict_overflow_p
))
15520 if (sub_strict_overflow_p
)
15521 *strict_overflow_p
= true;
15527 return (tree_expr_nonzero_warnv_p (op1
,
15529 || tree_expr_nonzero_warnv_p (op0
,
15530 strict_overflow_p
));
15539 /* Return true when T is an address and is known to be nonzero.
15540 For floating point we further ensure that T is not denormal.
15541 Similar logic is present in nonzero_address in rtlanal.h.
15543 If the return value is based on the assumption that signed overflow
15544 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15545 change *STRICT_OVERFLOW_P. */
15548 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15550 bool sub_strict_overflow_p
;
15551 switch (TREE_CODE (t
))
15554 return !integer_zerop (t
);
15558 tree base
= TREE_OPERAND (t
, 0);
15560 if (!DECL_P (base
))
15561 base
= get_base_address (base
);
15566 /* For objects in symbol table check if we know they are non-zero.
15567 Don't do anything for variables and functions before symtab is built;
15568 it is quite possible that they will be declared weak later. */
15569 if (DECL_P (base
) && decl_in_symtab_p (base
))
15571 struct symtab_node
*symbol
;
15573 symbol
= symtab_node::get_create (base
);
15575 return symbol
->nonzero_address ();
15580 /* Function local objects are never NULL. */
15582 && (DECL_CONTEXT (base
)
15583 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15584 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15587 /* Constants are never weak. */
15588 if (CONSTANT_CLASS_P (base
))
15595 sub_strict_overflow_p
= false;
15596 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15597 &sub_strict_overflow_p
)
15598 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15599 &sub_strict_overflow_p
))
15601 if (sub_strict_overflow_p
)
15602 *strict_overflow_p
= true;
15613 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15614 attempt to fold the expression to a constant without modifying TYPE,
15617 If the expression could be simplified to a constant, then return
15618 the constant. If the expression would not be simplified to a
15619 constant, then return NULL_TREE. */
15622 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15624 tree tem
= fold_binary (code
, type
, op0
, op1
);
15625 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15628 /* Given the components of a unary expression CODE, TYPE and OP0,
15629 attempt to fold the expression to a constant without modifying
15632 If the expression could be simplified to a constant, then return
15633 the constant. If the expression would not be simplified to a
15634 constant, then return NULL_TREE. */
15637 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15639 tree tem
= fold_unary (code
, type
, op0
);
15640 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15643 /* If EXP represents referencing an element in a constant string
15644 (either via pointer arithmetic or array indexing), return the
15645 tree representing the value accessed, otherwise return NULL. */
15648 fold_read_from_constant_string (tree exp
)
15650 if ((TREE_CODE (exp
) == INDIRECT_REF
15651 || TREE_CODE (exp
) == ARRAY_REF
)
15652 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15654 tree exp1
= TREE_OPERAND (exp
, 0);
15657 location_t loc
= EXPR_LOCATION (exp
);
15659 if (TREE_CODE (exp
) == INDIRECT_REF
)
15660 string
= string_constant (exp1
, &index
);
15663 tree low_bound
= array_ref_low_bound (exp
);
15664 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15666 /* Optimize the special-case of a zero lower bound.
15668 We convert the low_bound to sizetype to avoid some problems
15669 with constant folding. (E.g. suppose the lower bound is 1,
15670 and its mode is QI. Without the conversion,l (ARRAY
15671 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15672 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15673 if (! integer_zerop (low_bound
))
15674 index
= size_diffop_loc (loc
, index
,
15675 fold_convert_loc (loc
, sizetype
, low_bound
));
15681 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15682 && TREE_CODE (string
) == STRING_CST
15683 && TREE_CODE (index
) == INTEGER_CST
15684 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15685 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15687 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15688 return build_int_cst_type (TREE_TYPE (exp
),
15689 (TREE_STRING_POINTER (string
)
15690 [TREE_INT_CST_LOW (index
)]));
15695 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15696 an integer constant, real, or fixed-point constant.
15698 TYPE is the type of the result. */
15701 fold_negate_const (tree arg0
, tree type
)
15703 tree t
= NULL_TREE
;
15705 switch (TREE_CODE (arg0
))
15710 wide_int val
= wi::neg (arg0
, &overflow
);
15711 t
= force_fit_type (type
, val
, 1,
15712 (overflow
| TREE_OVERFLOW (arg0
))
15713 && !TYPE_UNSIGNED (type
));
15718 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15723 FIXED_VALUE_TYPE f
;
15724 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15725 &(TREE_FIXED_CST (arg0
)), NULL
,
15726 TYPE_SATURATING (type
));
15727 t
= build_fixed (type
, f
);
15728 /* Propagate overflow flags. */
15729 if (overflow_p
| TREE_OVERFLOW (arg0
))
15730 TREE_OVERFLOW (t
) = 1;
15735 gcc_unreachable ();
15741 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15742 an integer constant or real constant.
15744 TYPE is the type of the result. */
15747 fold_abs_const (tree arg0
, tree type
)
15749 tree t
= NULL_TREE
;
15751 switch (TREE_CODE (arg0
))
15755 /* If the value is unsigned or non-negative, then the absolute value
15756 is the same as the ordinary value. */
15757 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
15760 /* If the value is negative, then the absolute value is
15765 wide_int val
= wi::neg (arg0
, &overflow
);
15766 t
= force_fit_type (type
, val
, -1,
15767 overflow
| TREE_OVERFLOW (arg0
));
15773 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15774 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15780 gcc_unreachable ();
15786 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15787 constant. TYPE is the type of the result. */
15790 fold_not_const (const_tree arg0
, tree type
)
15792 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15794 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
15797 /* Given CODE, a relational operator, the target type, TYPE and two
15798 constant operands OP0 and OP1, return the result of the
15799 relational operation. If the result is not a compile time
15800 constant, then return NULL_TREE. */
15803 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15805 int result
, invert
;
15807 /* From here on, the only cases we handle are when the result is
15808 known to be a constant. */
15810 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15812 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15813 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15815 /* Handle the cases where either operand is a NaN. */
15816 if (real_isnan (c0
) || real_isnan (c1
))
15826 case UNORDERED_EXPR
:
15840 if (flag_trapping_math
)
15846 gcc_unreachable ();
15849 return constant_boolean_node (result
, type
);
15852 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15855 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15857 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15858 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15859 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15862 /* Handle equality/inequality of complex constants. */
15863 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15865 tree rcond
= fold_relational_const (code
, type
,
15866 TREE_REALPART (op0
),
15867 TREE_REALPART (op1
));
15868 tree icond
= fold_relational_const (code
, type
,
15869 TREE_IMAGPART (op0
),
15870 TREE_IMAGPART (op1
));
15871 if (code
== EQ_EXPR
)
15872 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15873 else if (code
== NE_EXPR
)
15874 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15879 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
15881 unsigned count
= VECTOR_CST_NELTS (op0
);
15882 tree
*elts
= XALLOCAVEC (tree
, count
);
15883 gcc_assert (VECTOR_CST_NELTS (op1
) == count
15884 && TYPE_VECTOR_SUBPARTS (type
) == count
);
15886 for (unsigned i
= 0; i
< count
; i
++)
15888 tree elem_type
= TREE_TYPE (type
);
15889 tree elem0
= VECTOR_CST_ELT (op0
, i
);
15890 tree elem1
= VECTOR_CST_ELT (op1
, i
);
15892 tree tem
= fold_relational_const (code
, elem_type
,
15895 if (tem
== NULL_TREE
)
15898 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
15901 return build_vector (type
, elts
);
15904 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15906 To compute GT, swap the arguments and do LT.
15907 To compute GE, do LT and invert the result.
15908 To compute LE, swap the arguments, do LT and invert the result.
15909 To compute NE, do EQ and invert the result.
15911 Therefore, the code below must handle only EQ and LT. */
15913 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15918 code
= swap_tree_comparison (code
);
15921 /* Note that it is safe to invert for real values here because we
15922 have already handled the one case that it matters. */
15925 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15928 code
= invert_tree_comparison (code
, false);
15931 /* Compute a result for LT or EQ if args permit;
15932 Otherwise return T. */
15933 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15935 if (code
== EQ_EXPR
)
15936 result
= tree_int_cst_equal (op0
, op1
);
15938 result
= tree_int_cst_lt (op0
, op1
);
15945 return constant_boolean_node (result
, type
);
15948 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15949 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15953 fold_build_cleanup_point_expr (tree type
, tree expr
)
15955 /* If the expression does not have side effects then we don't have to wrap
15956 it with a cleanup point expression. */
15957 if (!TREE_SIDE_EFFECTS (expr
))
15960 /* If the expression is a return, check to see if the expression inside the
15961 return has no side effects or the right hand side of the modify expression
15962 inside the return. If either don't have side effects set we don't need to
15963 wrap the expression in a cleanup point expression. Note we don't check the
15964 left hand side of the modify because it should always be a return decl. */
15965 if (TREE_CODE (expr
) == RETURN_EXPR
)
15967 tree op
= TREE_OPERAND (expr
, 0);
15968 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15970 op
= TREE_OPERAND (op
, 1);
15971 if (!TREE_SIDE_EFFECTS (op
))
15975 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15978 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15979 of an indirection through OP0, or NULL_TREE if no simplification is
15983 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15989 subtype
= TREE_TYPE (sub
);
15990 if (!POINTER_TYPE_P (subtype
))
15993 if (TREE_CODE (sub
) == ADDR_EXPR
)
15995 tree op
= TREE_OPERAND (sub
, 0);
15996 tree optype
= TREE_TYPE (op
);
15997 /* *&CONST_DECL -> to the value of the const decl. */
15998 if (TREE_CODE (op
) == CONST_DECL
)
15999 return DECL_INITIAL (op
);
16000 /* *&p => p; make sure to handle *&"str"[cst] here. */
16001 if (type
== optype
)
16003 tree fop
= fold_read_from_constant_string (op
);
16009 /* *(foo *)&fooarray => fooarray[0] */
16010 else if (TREE_CODE (optype
) == ARRAY_TYPE
16011 && type
== TREE_TYPE (optype
)
16012 && (!in_gimple_form
16013 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16015 tree type_domain
= TYPE_DOMAIN (optype
);
16016 tree min_val
= size_zero_node
;
16017 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16018 min_val
= TYPE_MIN_VALUE (type_domain
);
16020 && TREE_CODE (min_val
) != INTEGER_CST
)
16022 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16023 NULL_TREE
, NULL_TREE
);
16025 /* *(foo *)&complexfoo => __real__ complexfoo */
16026 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16027 && type
== TREE_TYPE (optype
))
16028 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16029 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16030 else if (TREE_CODE (optype
) == VECTOR_TYPE
16031 && type
== TREE_TYPE (optype
))
16033 tree part_width
= TYPE_SIZE (type
);
16034 tree index
= bitsize_int (0);
16035 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16039 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16040 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16042 tree op00
= TREE_OPERAND (sub
, 0);
16043 tree op01
= TREE_OPERAND (sub
, 1);
16046 if (TREE_CODE (op00
) == ADDR_EXPR
)
16049 op00
= TREE_OPERAND (op00
, 0);
16050 op00type
= TREE_TYPE (op00
);
16052 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16053 if (TREE_CODE (op00type
) == VECTOR_TYPE
16054 && type
== TREE_TYPE (op00type
))
16056 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16057 tree part_width
= TYPE_SIZE (type
);
16058 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16059 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16060 tree index
= bitsize_int (indexi
);
16062 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
16063 return fold_build3_loc (loc
,
16064 BIT_FIELD_REF
, type
, op00
,
16065 part_width
, index
);
16068 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16069 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16070 && type
== TREE_TYPE (op00type
))
16072 tree size
= TYPE_SIZE_UNIT (type
);
16073 if (tree_int_cst_equal (size
, op01
))
16074 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16076 /* ((foo *)&fooarray)[1] => fooarray[1] */
16077 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16078 && type
== TREE_TYPE (op00type
))
16080 tree type_domain
= TYPE_DOMAIN (op00type
);
16081 tree min_val
= size_zero_node
;
16082 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16083 min_val
= TYPE_MIN_VALUE (type_domain
);
16084 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16085 TYPE_SIZE_UNIT (type
));
16086 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16087 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16088 NULL_TREE
, NULL_TREE
);
16093 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16094 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16095 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16096 && (!in_gimple_form
16097 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16100 tree min_val
= size_zero_node
;
16101 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16102 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16103 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16104 min_val
= TYPE_MIN_VALUE (type_domain
);
16106 && TREE_CODE (min_val
) != INTEGER_CST
)
16108 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16115 /* Builds an expression for an indirection through T, simplifying some
16119 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16121 tree type
= TREE_TYPE (TREE_TYPE (t
));
16122 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16127 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16130 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16133 fold_indirect_ref_loc (location_t loc
, tree t
)
16135 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16143 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16144 whose result is ignored. The type of the returned tree need not be
16145 the same as the original expression. */
16148 fold_ignored_result (tree t
)
16150 if (!TREE_SIDE_EFFECTS (t
))
16151 return integer_zero_node
;
16154 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16157 t
= TREE_OPERAND (t
, 0);
16161 case tcc_comparison
:
16162 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16163 t
= TREE_OPERAND (t
, 0);
16164 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16165 t
= TREE_OPERAND (t
, 1);
16170 case tcc_expression
:
16171 switch (TREE_CODE (t
))
16173 case COMPOUND_EXPR
:
16174 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16176 t
= TREE_OPERAND (t
, 0);
16180 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16181 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16183 t
= TREE_OPERAND (t
, 0);
16196 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16199 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16201 tree div
= NULL_TREE
;
16206 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16207 have to do anything. Only do this when we are not given a const,
16208 because in that case, this check is more expensive than just
16210 if (TREE_CODE (value
) != INTEGER_CST
)
16212 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16214 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16218 /* If divisor is a power of two, simplify this to bit manipulation. */
16219 if (divisor
== (divisor
& -divisor
))
16221 if (TREE_CODE (value
) == INTEGER_CST
)
16223 wide_int val
= value
;
16226 if ((val
& (divisor
- 1)) == 0)
16229 overflow_p
= TREE_OVERFLOW (value
);
16230 val
&= ~(divisor
- 1);
16235 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16241 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16242 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16243 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16244 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16250 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16251 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16252 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16258 /* Likewise, but round down. */
16261 round_down_loc (location_t loc
, tree value
, int divisor
)
16263 tree div
= NULL_TREE
;
16265 gcc_assert (divisor
> 0);
16269 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16270 have to do anything. Only do this when we are not given a const,
16271 because in that case, this check is more expensive than just
16273 if (TREE_CODE (value
) != INTEGER_CST
)
16275 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16277 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16281 /* If divisor is a power of two, simplify this to bit manipulation. */
16282 if (divisor
== (divisor
& -divisor
))
16286 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16287 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16292 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16293 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16294 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16300 /* Returns the pointer to the base of the object addressed by EXP and
16301 extracts the information about the offset of the access, storing it
16302 to PBITPOS and POFFSET. */
16305 split_address_to_core_and_offset (tree exp
,
16306 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16310 int unsignedp
, volatilep
;
16311 HOST_WIDE_INT bitsize
;
16312 location_t loc
= EXPR_LOCATION (exp
);
16314 if (TREE_CODE (exp
) == ADDR_EXPR
)
16316 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16317 poffset
, &mode
, &unsignedp
, &volatilep
,
16319 core
= build_fold_addr_expr_loc (loc
, core
);
16325 *poffset
= NULL_TREE
;
16331 /* Returns true if addresses of E1 and E2 differ by a constant, false
16332 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16335 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16338 HOST_WIDE_INT bitpos1
, bitpos2
;
16339 tree toffset1
, toffset2
, tdiff
, type
;
16341 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16342 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16344 if (bitpos1
% BITS_PER_UNIT
!= 0
16345 || bitpos2
% BITS_PER_UNIT
!= 0
16346 || !operand_equal_p (core1
, core2
, 0))
16349 if (toffset1
&& toffset2
)
16351 type
= TREE_TYPE (toffset1
);
16352 if (type
!= TREE_TYPE (toffset2
))
16353 toffset2
= fold_convert (type
, toffset2
);
16355 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16356 if (!cst_and_fits_in_hwi (tdiff
))
16359 *diff
= int_cst_value (tdiff
);
16361 else if (toffset1
|| toffset2
)
16363 /* If only one of the offsets is non-constant, the difference cannot
16370 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16374 /* Simplify the floating point expression EXP when the sign of the
16375 result is not significant. Return NULL_TREE if no simplification
16379 fold_strip_sign_ops (tree exp
)
16382 location_t loc
= EXPR_LOCATION (exp
);
16384 switch (TREE_CODE (exp
))
16388 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16389 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16393 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16395 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16396 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16397 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16398 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16399 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16400 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16403 case COMPOUND_EXPR
:
16404 arg0
= TREE_OPERAND (exp
, 0);
16405 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16407 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16411 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16412 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16414 return fold_build3_loc (loc
,
16415 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16416 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16417 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16422 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16425 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16426 /* Strip copysign function call, return the 1st argument. */
16427 arg0
= CALL_EXPR_ARG (exp
, 0);
16428 arg1
= CALL_EXPR_ARG (exp
, 1);
16429 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16432 /* Strip sign ops from the argument of "odd" math functions. */
16433 if (negate_mathfn_p (fcode
))
16435 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16437 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);