builtins.c (fold_builtin_fpclassify): Change to take array of arguments instead of...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_mathfn_compare (location_t,
149 enum built_in_function, enum tree_code,
150 tree, tree, tree);
151 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
152 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
153 static bool reorder_operands_p (const_tree, const_tree);
154 static tree fold_negate_const (tree, tree);
155 static tree fold_not_const (const_tree, tree);
156 static tree fold_relational_const (enum tree_code, tree, tree, tree);
157 static tree fold_convert_const (enum tree_code, tree, tree);
158 static tree fold_view_convert_expr (tree, tree);
159 static bool vec_cst_ctor_to_array (tree, tree *);
160
161
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
164
165 static location_t
166 expr_location_or (tree t, location_t loc)
167 {
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
170 }
171
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
174
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
177 {
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
183 {
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
186 }
187 return x;
188 }
189 \f
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
193
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 {
197 widest_int quo;
198
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202
203 return NULL_TREE;
204 }
205 \f
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
214
215 static int fold_deferring_overflow_warnings;
216
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
221
222 static const char* fold_deferred_overflow_warning;
223
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
226
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
231
232 void
233 fold_defer_overflow_warnings (void)
234 {
235 ++fold_deferring_overflow_warnings;
236 }
237
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
246
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 {
250 const char *warnmsg;
251 location_t locus;
252
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
256 {
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
262 }
263
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
266
267 if (!issue || warnmsg == NULL)
268 return;
269
270 if (gimple_no_warning_p (stmt))
271 return;
272
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
277
278 if (!issue_strict_overflow_warning (code))
279 return;
280
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
286 }
287
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
290
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
293 {
294 fold_undefer_overflow_warnings (false, NULL, 0);
295 }
296
297 /* Whether we are deferring overflow warnings. */
298
299 bool
300 fold_deferring_overflow_warnings_p (void)
301 {
302 return fold_deferring_overflow_warnings > 0;
303 }
304
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
307
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 {
311 if (fold_deferring_overflow_warnings > 0)
312 {
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
315 {
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
318 }
319 }
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
322 }
323 \f
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
326
327 static bool
328 negate_mathfn_p (enum built_in_function code)
329 {
330 switch (code)
331 {
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
356
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
362
363 default:
364 break;
365 }
366 return false;
367 }
368
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
371
372 bool
373 may_negate_without_overflow_p (const_tree t)
374 {
375 tree type;
376
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
382
383 return !wi::only_sign_bit_p (t);
384 }
385
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
388
389 static bool
390 negate_expr_p (tree t)
391 {
392 tree type;
393
394 if (t == 0)
395 return false;
396
397 type = TREE_TYPE (t);
398
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
401 {
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
405
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
411
412 case FIXED_CST:
413 return true;
414
415 case NEGATE_EXPR:
416 return !TYPE_OVERFLOW_SANITIZED (type);
417
418 case REAL_CST:
419 /* We want to canonicalize to positive real constants. Pretend
420 that only negative ones can be easily negated. */
421 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422
423 case COMPLEX_CST:
424 return negate_expr_p (TREE_REALPART (t))
425 && negate_expr_p (TREE_IMAGPART (t));
426
427 case VECTOR_CST:
428 {
429 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
430 return true;
431
432 int count = TYPE_VECTOR_SUBPARTS (type), i;
433
434 for (i = 0; i < count; i++)
435 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
436 return false;
437
438 return true;
439 }
440
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
444
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
447
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
450 || HONOR_SIGNED_ZEROS (element_mode (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
459
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
463 && !HONOR_SIGNED_ZEROS (element_mode (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
466
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
470
471 /* Fall through. */
472
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
478
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case EXACT_DIV_EXPR:
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
486 overflow. */
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
488 {
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
490 break;
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
500 return true;
501 }
502 else if (negate_expr_p (TREE_OPERAND (t, 0)))
503 return true;
504 return negate_expr_p (TREE_OPERAND (t, 1));
505
506 case NOP_EXPR:
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type) == REAL_TYPE)
509 {
510 tree tem = strip_float_extensions (t);
511 if (tem != t)
512 return negate_expr_p (tem);
513 }
514 break;
515
516 case CALL_EXPR:
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t)))
519 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 break;
521
522 case RSHIFT_EXPR:
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
524 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
525 {
526 tree op1 = TREE_OPERAND (t, 1);
527 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
528 return true;
529 }
530 break;
531
532 default:
533 break;
534 }
535 return false;
536 }
537
538 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
539 simplification is possible.
540 If negate_expr_p would return true for T, NULL_TREE will never be
541 returned. */
542
543 static tree
544 fold_negate_expr (location_t loc, tree t)
545 {
546 tree type = TREE_TYPE (t);
547 tree tem;
548
549 switch (TREE_CODE (t))
550 {
551 /* Convert - (~A) to A + 1. */
552 case BIT_NOT_EXPR:
553 if (INTEGRAL_TYPE_P (type))
554 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
555 build_one_cst (type));
556 break;
557
558 case INTEGER_CST:
559 tem = fold_negate_const (t, type);
560 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
561 || (!TYPE_OVERFLOW_TRAPS (type)
562 && TYPE_OVERFLOW_WRAPS (type))
563 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
564 return tem;
565 break;
566
567 case REAL_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case FIXED_CST:
572 tem = fold_negate_const (t, type);
573 return tem;
574
575 case COMPLEX_CST:
576 {
577 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
578 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
579 if (rpart && ipart)
580 return build_complex (type, rpart, ipart);
581 }
582 break;
583
584 case VECTOR_CST:
585 {
586 int count = TYPE_VECTOR_SUBPARTS (type), i;
587 tree *elts = XALLOCAVEC (tree, count);
588
589 for (i = 0; i < count; i++)
590 {
591 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
592 if (elts[i] == NULL_TREE)
593 return NULL_TREE;
594 }
595
596 return build_vector (type, elts);
597 }
598
599 case COMPLEX_EXPR:
600 if (negate_expr_p (t))
601 return fold_build2_loc (loc, COMPLEX_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
603 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
604 break;
605
606 case CONJ_EXPR:
607 if (negate_expr_p (t))
608 return fold_build1_loc (loc, CONJ_EXPR, type,
609 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
610 break;
611
612 case NEGATE_EXPR:
613 if (!TYPE_OVERFLOW_SANITIZED (type))
614 return TREE_OPERAND (t, 0);
615 break;
616
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
619 && !HONOR_SIGNED_ZEROS (element_mode (type)))
620 {
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
625 {
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
629 }
630
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
633 {
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
637 }
638 }
639 break;
640
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
644 && !HONOR_SIGNED_ZEROS (element_mode (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
649
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
653
654 /* Fall through. */
655
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
658 {
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
667 }
668 break;
669
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
679 {
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
684 {
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
691 }
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
706 }
707 break;
708
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
712 {
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
716 }
717 break;
718
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
723 {
724 tree fndecl, arg;
725
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
729 }
730 break;
731
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
735 {
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
738 {
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
745 }
746 }
747 break;
748
749 default:
750 break;
751 }
752
753 return NULL_TREE;
754 }
755
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
759
760 static tree
761 negate_expr (tree t)
762 {
763 tree type, tem;
764 location_t loc;
765
766 if (t == NULL_TREE)
767 return NULL_TREE;
768
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
772
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
777 }
778 \f
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
786
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
790
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
793
794 If IN is itself a literal or constant, return it as appropriate.
795
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
798
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
802 {
803 tree var = 0;
804
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
808
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
811
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
824 {
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
829
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
837
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
842
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
851
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
859 }
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
862 {
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
866 }
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
871
872 if (negate_p)
873 {
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
880 }
881
882 return var;
883 }
884
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
889
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
892 {
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
897
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
903 {
904 if (code == PLUS_EXPR)
905 {
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
918 }
919 else if (code == MINUS_EXPR)
920 {
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
923 }
924
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
927 }
928
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
931 }
932 \f
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
935
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
938 {
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
943
944 switch (code)
945 {
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
951
952 default:
953 break;
954 }
955
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
959 }
960
961
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
965
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
969 {
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
975
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
978
979 switch (code)
980 {
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
984
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
988
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
992
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
996 {
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1002 }
1003
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1012
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1016 {
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1022 }
1023
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1029
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1033
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1037
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1041
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1045
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1076
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1082
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1088
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1094
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1098
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1102
1103 default:
1104 return NULL_TREE;
1105 }
1106
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1111
1112 return t;
1113 }
1114
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1117 {
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1119 }
1120
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1125
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1128 {
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1132
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1135
1136 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1137 {
1138 if (code == POINTER_PLUS_EXPR)
1139 return int_const_binop (PLUS_EXPR,
1140 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1141
1142 return int_const_binop (code, arg1, arg2);
1143 }
1144
1145 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1146 {
1147 machine_mode mode;
1148 REAL_VALUE_TYPE d1;
1149 REAL_VALUE_TYPE d2;
1150 REAL_VALUE_TYPE value;
1151 REAL_VALUE_TYPE result;
1152 bool inexact;
1153 tree t, type;
1154
1155 /* The following codes are handled by real_arithmetic. */
1156 switch (code)
1157 {
1158 case PLUS_EXPR:
1159 case MINUS_EXPR:
1160 case MULT_EXPR:
1161 case RDIV_EXPR:
1162 case MIN_EXPR:
1163 case MAX_EXPR:
1164 break;
1165
1166 default:
1167 return NULL_TREE;
1168 }
1169
1170 d1 = TREE_REAL_CST (arg1);
1171 d2 = TREE_REAL_CST (arg2);
1172
1173 type = TREE_TYPE (arg1);
1174 mode = TYPE_MODE (type);
1175
1176 /* Don't perform operation if we honor signaling NaNs and
1177 either operand is a NaN. */
1178 if (HONOR_SNANS (mode)
1179 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1180 return NULL_TREE;
1181
1182 /* Don't perform operation if it would raise a division
1183 by zero exception. */
1184 if (code == RDIV_EXPR
1185 && REAL_VALUES_EQUAL (d2, dconst0)
1186 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1187 return NULL_TREE;
1188
1189 /* If either operand is a NaN, just return it. Otherwise, set up
1190 for floating-point trap; we return an overflow. */
1191 if (REAL_VALUE_ISNAN (d1))
1192 return arg1;
1193 else if (REAL_VALUE_ISNAN (d2))
1194 return arg2;
1195
1196 inexact = real_arithmetic (&value, code, &d1, &d2);
1197 real_convert (&result, mode, &value);
1198
1199 /* Don't constant fold this floating point operation if
1200 the result has overflowed and flag_trapping_math. */
1201 if (flag_trapping_math
1202 && MODE_HAS_INFINITIES (mode)
1203 && REAL_VALUE_ISINF (result)
1204 && !REAL_VALUE_ISINF (d1)
1205 && !REAL_VALUE_ISINF (d2))
1206 return NULL_TREE;
1207
1208 /* Don't constant fold this floating point operation if the
1209 result may dependent upon the run-time rounding mode and
1210 flag_rounding_math is set, or if GCC's software emulation
1211 is unable to accurately represent the result. */
1212 if ((flag_rounding_math
1213 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1214 && (inexact || !real_identical (&result, &value)))
1215 return NULL_TREE;
1216
1217 t = build_real (type, result);
1218
1219 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1220 return t;
1221 }
1222
1223 if (TREE_CODE (arg1) == FIXED_CST)
1224 {
1225 FIXED_VALUE_TYPE f1;
1226 FIXED_VALUE_TYPE f2;
1227 FIXED_VALUE_TYPE result;
1228 tree t, type;
1229 int sat_p;
1230 bool overflow_p;
1231
1232 /* The following codes are handled by fixed_arithmetic. */
1233 switch (code)
1234 {
1235 case PLUS_EXPR:
1236 case MINUS_EXPR:
1237 case MULT_EXPR:
1238 case TRUNC_DIV_EXPR:
1239 if (TREE_CODE (arg2) != FIXED_CST)
1240 return NULL_TREE;
1241 f2 = TREE_FIXED_CST (arg2);
1242 break;
1243
1244 case LSHIFT_EXPR:
1245 case RSHIFT_EXPR:
1246 {
1247 if (TREE_CODE (arg2) != INTEGER_CST)
1248 return NULL_TREE;
1249 wide_int w2 = arg2;
1250 f2.data.high = w2.elt (1);
1251 f2.data.low = w2.elt (0);
1252 f2.mode = SImode;
1253 }
1254 break;
1255
1256 default:
1257 return NULL_TREE;
1258 }
1259
1260 f1 = TREE_FIXED_CST (arg1);
1261 type = TREE_TYPE (arg1);
1262 sat_p = TYPE_SATURATING (type);
1263 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1264 t = build_fixed (type, result);
1265 /* Propagate overflow flags. */
1266 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1267 TREE_OVERFLOW (t) = 1;
1268 return t;
1269 }
1270
1271 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1272 {
1273 tree type = TREE_TYPE (arg1);
1274 tree r1 = TREE_REALPART (arg1);
1275 tree i1 = TREE_IMAGPART (arg1);
1276 tree r2 = TREE_REALPART (arg2);
1277 tree i2 = TREE_IMAGPART (arg2);
1278 tree real, imag;
1279
1280 switch (code)
1281 {
1282 case PLUS_EXPR:
1283 case MINUS_EXPR:
1284 real = const_binop (code, r1, r2);
1285 imag = const_binop (code, i1, i2);
1286 break;
1287
1288 case MULT_EXPR:
1289 if (COMPLEX_FLOAT_TYPE_P (type))
1290 return do_mpc_arg2 (arg1, arg2, type,
1291 /* do_nonfinite= */ folding_initializer,
1292 mpc_mul);
1293
1294 real = const_binop (MINUS_EXPR,
1295 const_binop (MULT_EXPR, r1, r2),
1296 const_binop (MULT_EXPR, i1, i2));
1297 imag = const_binop (PLUS_EXPR,
1298 const_binop (MULT_EXPR, r1, i2),
1299 const_binop (MULT_EXPR, i1, r2));
1300 break;
1301
1302 case RDIV_EXPR:
1303 if (COMPLEX_FLOAT_TYPE_P (type))
1304 return do_mpc_arg2 (arg1, arg2, type,
1305 /* do_nonfinite= */ folding_initializer,
1306 mpc_div);
1307 /* Fallthru ... */
1308 case TRUNC_DIV_EXPR:
1309 case CEIL_DIV_EXPR:
1310 case FLOOR_DIV_EXPR:
1311 case ROUND_DIV_EXPR:
1312 if (flag_complex_method == 0)
1313 {
1314 /* Keep this algorithm in sync with
1315 tree-complex.c:expand_complex_div_straight().
1316
1317 Expand complex division to scalars, straightforward algorithm.
1318 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1319 t = br*br + bi*bi
1320 */
1321 tree magsquared
1322 = const_binop (PLUS_EXPR,
1323 const_binop (MULT_EXPR, r2, r2),
1324 const_binop (MULT_EXPR, i2, i2));
1325 tree t1
1326 = const_binop (PLUS_EXPR,
1327 const_binop (MULT_EXPR, r1, r2),
1328 const_binop (MULT_EXPR, i1, i2));
1329 tree t2
1330 = const_binop (MINUS_EXPR,
1331 const_binop (MULT_EXPR, i1, r2),
1332 const_binop (MULT_EXPR, r1, i2));
1333
1334 real = const_binop (code, t1, magsquared);
1335 imag = const_binop (code, t2, magsquared);
1336 }
1337 else
1338 {
1339 /* Keep this algorithm in sync with
1340 tree-complex.c:expand_complex_div_wide().
1341
1342 Expand complex division to scalars, modified algorithm to minimize
1343 overflow with wide input ranges. */
1344 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1345 fold_abs_const (r2, TREE_TYPE (type)),
1346 fold_abs_const (i2, TREE_TYPE (type)));
1347
1348 if (integer_nonzerop (compare))
1349 {
1350 /* In the TRUE branch, we compute
1351 ratio = br/bi;
1352 div = (br * ratio) + bi;
1353 tr = (ar * ratio) + ai;
1354 ti = (ai * ratio) - ar;
1355 tr = tr / div;
1356 ti = ti / div; */
1357 tree ratio = const_binop (code, r2, i2);
1358 tree div = const_binop (PLUS_EXPR, i2,
1359 const_binop (MULT_EXPR, r2, ratio));
1360 real = const_binop (MULT_EXPR, r1, ratio);
1361 real = const_binop (PLUS_EXPR, real, i1);
1362 real = const_binop (code, real, div);
1363
1364 imag = const_binop (MULT_EXPR, i1, ratio);
1365 imag = const_binop (MINUS_EXPR, imag, r1);
1366 imag = const_binop (code, imag, div);
1367 }
1368 else
1369 {
1370 /* In the FALSE branch, we compute
1371 ratio = d/c;
1372 divisor = (d * ratio) + c;
1373 tr = (b * ratio) + a;
1374 ti = b - (a * ratio);
1375 tr = tr / div;
1376 ti = ti / div; */
1377 tree ratio = const_binop (code, i2, r2);
1378 tree div = const_binop (PLUS_EXPR, r2,
1379 const_binop (MULT_EXPR, i2, ratio));
1380
1381 real = const_binop (MULT_EXPR, i1, ratio);
1382 real = const_binop (PLUS_EXPR, real, r1);
1383 real = const_binop (code, real, div);
1384
1385 imag = const_binop (MULT_EXPR, r1, ratio);
1386 imag = const_binop (MINUS_EXPR, i1, imag);
1387 imag = const_binop (code, imag, div);
1388 }
1389 }
1390 break;
1391
1392 default:
1393 return NULL_TREE;
1394 }
1395
1396 if (real && imag)
1397 return build_complex (type, real, imag);
1398 }
1399
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == VECTOR_CST)
1402 {
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1406
1407 for (i = 0; i < count; i++)
1408 {
1409 tree elem1 = VECTOR_CST_ELT (arg1, i);
1410 tree elem2 = VECTOR_CST_ELT (arg2, i);
1411
1412 elts[i] = const_binop (code, elem1, elem2);
1413
1414 /* It is possible that const_binop cannot handle the given
1415 code and return NULL_TREE */
1416 if (elts[i] == NULL_TREE)
1417 return NULL_TREE;
1418 }
1419
1420 return build_vector (type, elts);
1421 }
1422
1423 /* Shifts allow a scalar offset for a vector. */
1424 if (TREE_CODE (arg1) == VECTOR_CST
1425 && TREE_CODE (arg2) == INTEGER_CST)
1426 {
1427 tree type = TREE_TYPE (arg1);
1428 int count = TYPE_VECTOR_SUBPARTS (type), i;
1429 tree *elts = XALLOCAVEC (tree, count);
1430
1431 for (i = 0; i < count; i++)
1432 {
1433 tree elem1 = VECTOR_CST_ELT (arg1, i);
1434
1435 elts[i] = const_binop (code, elem1, arg2);
1436
1437 /* It is possible that const_binop cannot handle the given
1438 code and return NULL_TREE. */
1439 if (elts[i] == NULL_TREE)
1440 return NULL_TREE;
1441 }
1442
1443 return build_vector (type, elts);
1444 }
1445 return NULL_TREE;
1446 }
1447
1448 /* Overload that adds a TYPE parameter to be able to dispatch
1449 to fold_relational_const. */
1450
1451 tree
1452 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1453 {
1454 if (TREE_CODE_CLASS (code) == tcc_comparison)
1455 return fold_relational_const (code, type, arg1, arg2);
1456
1457 /* ??? Until we make the const_binop worker take the type of the
1458 result as argument put those cases that need it here. */
1459 switch (code)
1460 {
1461 case COMPLEX_EXPR:
1462 if ((TREE_CODE (arg1) == REAL_CST
1463 && TREE_CODE (arg2) == REAL_CST)
1464 || (TREE_CODE (arg1) == INTEGER_CST
1465 && TREE_CODE (arg2) == INTEGER_CST))
1466 return build_complex (type, arg1, arg2);
1467 return NULL_TREE;
1468
1469 case VEC_PACK_TRUNC_EXPR:
1470 case VEC_PACK_FIX_TRUNC_EXPR:
1471 {
1472 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1473 tree *elts;
1474
1475 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1476 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1477 if (TREE_CODE (arg1) != VECTOR_CST
1478 || TREE_CODE (arg2) != VECTOR_CST)
1479 return NULL_TREE;
1480
1481 elts = XALLOCAVEC (tree, nelts);
1482 if (!vec_cst_ctor_to_array (arg1, elts)
1483 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1484 return NULL_TREE;
1485
1486 for (i = 0; i < nelts; i++)
1487 {
1488 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1489 ? NOP_EXPR : FIX_TRUNC_EXPR,
1490 TREE_TYPE (type), elts[i]);
1491 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1492 return NULL_TREE;
1493 }
1494
1495 return build_vector (type, elts);
1496 }
1497
1498 case VEC_WIDEN_MULT_LO_EXPR:
1499 case VEC_WIDEN_MULT_HI_EXPR:
1500 case VEC_WIDEN_MULT_EVEN_EXPR:
1501 case VEC_WIDEN_MULT_ODD_EXPR:
1502 {
1503 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1504 unsigned int out, ofs, scale;
1505 tree *elts;
1506
1507 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1508 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1509 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1510 return NULL_TREE;
1511
1512 elts = XALLOCAVEC (tree, nelts * 4);
1513 if (!vec_cst_ctor_to_array (arg1, elts)
1514 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1515 return NULL_TREE;
1516
1517 if (code == VEC_WIDEN_MULT_LO_EXPR)
1518 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1519 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1520 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1521 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1522 scale = 1, ofs = 0;
1523 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1524 scale = 1, ofs = 1;
1525
1526 for (out = 0; out < nelts; out++)
1527 {
1528 unsigned int in1 = (out << scale) + ofs;
1529 unsigned int in2 = in1 + nelts * 2;
1530 tree t1, t2;
1531
1532 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1533 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1534
1535 if (t1 == NULL_TREE || t2 == NULL_TREE)
1536 return NULL_TREE;
1537 elts[out] = const_binop (MULT_EXPR, t1, t2);
1538 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1539 return NULL_TREE;
1540 }
1541
1542 return build_vector (type, elts);
1543 }
1544
1545 default:;
1546 }
1547
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1551 return const_binop (code, arg1, arg2);
1552 }
1553
1554 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1555 Return zero if computing the constants is not possible. */
1556
1557 tree
1558 const_unop (enum tree_code code, tree type, tree arg0)
1559 {
1560 switch (code)
1561 {
1562 CASE_CONVERT:
1563 case FLOAT_EXPR:
1564 case FIX_TRUNC_EXPR:
1565 case FIXED_CONVERT_EXPR:
1566 return fold_convert_const (code, type, arg0);
1567
1568 case ADDR_SPACE_CONVERT_EXPR:
1569 if (integer_zerop (arg0))
1570 return fold_convert_const (code, type, arg0);
1571 break;
1572
1573 case VIEW_CONVERT_EXPR:
1574 return fold_view_convert_expr (type, arg0);
1575
1576 case NEGATE_EXPR:
1577 {
1578 /* Can't call fold_negate_const directly here as that doesn't
1579 handle all cases and we might not be able to negate some
1580 constants. */
1581 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1582 if (tem && CONSTANT_CLASS_P (tem))
1583 return tem;
1584 break;
1585 }
1586
1587 case ABS_EXPR:
1588 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1589 return fold_abs_const (arg0, type);
1590 break;
1591
1592 case CONJ_EXPR:
1593 if (TREE_CODE (arg0) == COMPLEX_CST)
1594 {
1595 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1596 TREE_TYPE (type));
1597 return build_complex (type, TREE_REALPART (arg0), ipart);
1598 }
1599 break;
1600
1601 case BIT_NOT_EXPR:
1602 if (TREE_CODE (arg0) == INTEGER_CST)
1603 return fold_not_const (arg0, type);
1604 /* Perform BIT_NOT_EXPR on each element individually. */
1605 else if (TREE_CODE (arg0) == VECTOR_CST)
1606 {
1607 tree *elements;
1608 tree elem;
1609 unsigned count = VECTOR_CST_NELTS (arg0), i;
1610
1611 elements = XALLOCAVEC (tree, count);
1612 for (i = 0; i < count; i++)
1613 {
1614 elem = VECTOR_CST_ELT (arg0, i);
1615 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1616 if (elem == NULL_TREE)
1617 break;
1618 elements[i] = elem;
1619 }
1620 if (i == count)
1621 return build_vector (type, elements);
1622 }
1623 break;
1624
1625 case TRUTH_NOT_EXPR:
1626 if (TREE_CODE (arg0) == INTEGER_CST)
1627 return constant_boolean_node (integer_zerop (arg0), type);
1628 break;
1629
1630 case REALPART_EXPR:
1631 if (TREE_CODE (arg0) == COMPLEX_CST)
1632 return fold_convert (type, TREE_REALPART (arg0));
1633 break;
1634
1635 case IMAGPART_EXPR:
1636 if (TREE_CODE (arg0) == COMPLEX_CST)
1637 return fold_convert (type, TREE_IMAGPART (arg0));
1638 break;
1639
1640 case VEC_UNPACK_LO_EXPR:
1641 case VEC_UNPACK_HI_EXPR:
1642 case VEC_UNPACK_FLOAT_LO_EXPR:
1643 case VEC_UNPACK_FLOAT_HI_EXPR:
1644 {
1645 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1646 tree *elts;
1647 enum tree_code subcode;
1648
1649 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1650 if (TREE_CODE (arg0) != VECTOR_CST)
1651 return NULL_TREE;
1652
1653 elts = XALLOCAVEC (tree, nelts * 2);
1654 if (!vec_cst_ctor_to_array (arg0, elts))
1655 return NULL_TREE;
1656
1657 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1658 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1659 elts += nelts;
1660
1661 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1662 subcode = NOP_EXPR;
1663 else
1664 subcode = FLOAT_EXPR;
1665
1666 for (i = 0; i < nelts; i++)
1667 {
1668 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1669 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1670 return NULL_TREE;
1671 }
1672
1673 return build_vector (type, elts);
1674 }
1675
1676 case REDUC_MIN_EXPR:
1677 case REDUC_MAX_EXPR:
1678 case REDUC_PLUS_EXPR:
1679 {
1680 unsigned int nelts, i;
1681 tree *elts;
1682 enum tree_code subcode;
1683
1684 if (TREE_CODE (arg0) != VECTOR_CST)
1685 return NULL_TREE;
1686 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1687
1688 elts = XALLOCAVEC (tree, nelts);
1689 if (!vec_cst_ctor_to_array (arg0, elts))
1690 return NULL_TREE;
1691
1692 switch (code)
1693 {
1694 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1695 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1696 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1697 default: gcc_unreachable ();
1698 }
1699
1700 for (i = 1; i < nelts; i++)
1701 {
1702 elts[0] = const_binop (subcode, elts[0], elts[i]);
1703 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1704 return NULL_TREE;
1705 }
1706
1707 return elts[0];
1708 }
1709
1710 default:
1711 break;
1712 }
1713
1714 return NULL_TREE;
1715 }
1716
1717 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1718 indicates which particular sizetype to create. */
1719
1720 tree
1721 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1722 {
1723 return build_int_cst (sizetype_tab[(int) kind], number);
1724 }
1725 \f
1726 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1727 is a tree code. The type of the result is taken from the operands.
1728 Both must be equivalent integer types, ala int_binop_types_match_p.
1729 If the operands are constant, so is the result. */
1730
1731 tree
1732 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1733 {
1734 tree type = TREE_TYPE (arg0);
1735
1736 if (arg0 == error_mark_node || arg1 == error_mark_node)
1737 return error_mark_node;
1738
1739 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1740 TREE_TYPE (arg1)));
1741
1742 /* Handle the special case of two integer constants faster. */
1743 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1744 {
1745 /* And some specific cases even faster than that. */
1746 if (code == PLUS_EXPR)
1747 {
1748 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1749 return arg1;
1750 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1751 return arg0;
1752 }
1753 else if (code == MINUS_EXPR)
1754 {
1755 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1756 return arg0;
1757 }
1758 else if (code == MULT_EXPR)
1759 {
1760 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1761 return arg1;
1762 }
1763
1764 /* Handle general case of two integer constants. For sizetype
1765 constant calculations we always want to know about overflow,
1766 even in the unsigned case. */
1767 return int_const_binop_1 (code, arg0, arg1, -1);
1768 }
1769
1770 return fold_build2_loc (loc, code, type, arg0, arg1);
1771 }
1772
1773 /* Given two values, either both of sizetype or both of bitsizetype,
1774 compute the difference between the two values. Return the value
1775 in signed type corresponding to the type of the operands. */
1776
1777 tree
1778 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1779 {
1780 tree type = TREE_TYPE (arg0);
1781 tree ctype;
1782
1783 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1784 TREE_TYPE (arg1)));
1785
1786 /* If the type is already signed, just do the simple thing. */
1787 if (!TYPE_UNSIGNED (type))
1788 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1789
1790 if (type == sizetype)
1791 ctype = ssizetype;
1792 else if (type == bitsizetype)
1793 ctype = sbitsizetype;
1794 else
1795 ctype = signed_type_for (type);
1796
1797 /* If either operand is not a constant, do the conversions to the signed
1798 type and subtract. The hardware will do the right thing with any
1799 overflow in the subtraction. */
1800 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1801 return size_binop_loc (loc, MINUS_EXPR,
1802 fold_convert_loc (loc, ctype, arg0),
1803 fold_convert_loc (loc, ctype, arg1));
1804
1805 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1806 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1807 overflow) and negate (which can't either). Special-case a result
1808 of zero while we're here. */
1809 if (tree_int_cst_equal (arg0, arg1))
1810 return build_int_cst (ctype, 0);
1811 else if (tree_int_cst_lt (arg1, arg0))
1812 return fold_convert_loc (loc, ctype,
1813 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1814 else
1815 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1816 fold_convert_loc (loc, ctype,
1817 size_binop_loc (loc,
1818 MINUS_EXPR,
1819 arg1, arg0)));
1820 }
1821 \f
1822 /* A subroutine of fold_convert_const handling conversions of an
1823 INTEGER_CST to another integer type. */
1824
1825 static tree
1826 fold_convert_const_int_from_int (tree type, const_tree arg1)
1827 {
1828 /* Given an integer constant, make new constant with new type,
1829 appropriately sign-extended or truncated. Use widest_int
1830 so that any extension is done according ARG1's type. */
1831 return force_fit_type (type, wi::to_widest (arg1),
1832 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1833 TREE_OVERFLOW (arg1));
1834 }
1835
1836 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1837 to an integer type. */
1838
1839 static tree
1840 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1841 {
1842 bool overflow = false;
1843 tree t;
1844
1845 /* The following code implements the floating point to integer
1846 conversion rules required by the Java Language Specification,
1847 that IEEE NaNs are mapped to zero and values that overflow
1848 the target precision saturate, i.e. values greater than
1849 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1850 are mapped to INT_MIN. These semantics are allowed by the
1851 C and C++ standards that simply state that the behavior of
1852 FP-to-integer conversion is unspecified upon overflow. */
1853
1854 wide_int val;
1855 REAL_VALUE_TYPE r;
1856 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1857
1858 switch (code)
1859 {
1860 case FIX_TRUNC_EXPR:
1861 real_trunc (&r, VOIDmode, &x);
1862 break;
1863
1864 default:
1865 gcc_unreachable ();
1866 }
1867
1868 /* If R is NaN, return zero and show we have an overflow. */
1869 if (REAL_VALUE_ISNAN (r))
1870 {
1871 overflow = true;
1872 val = wi::zero (TYPE_PRECISION (type));
1873 }
1874
1875 /* See if R is less than the lower bound or greater than the
1876 upper bound. */
1877
1878 if (! overflow)
1879 {
1880 tree lt = TYPE_MIN_VALUE (type);
1881 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1882 if (REAL_VALUES_LESS (r, l))
1883 {
1884 overflow = true;
1885 val = lt;
1886 }
1887 }
1888
1889 if (! overflow)
1890 {
1891 tree ut = TYPE_MAX_VALUE (type);
1892 if (ut)
1893 {
1894 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1895 if (REAL_VALUES_LESS (u, r))
1896 {
1897 overflow = true;
1898 val = ut;
1899 }
1900 }
1901 }
1902
1903 if (! overflow)
1904 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1905
1906 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1907 return t;
1908 }
1909
1910 /* A subroutine of fold_convert_const handling conversions of a
1911 FIXED_CST to an integer type. */
1912
1913 static tree
1914 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1915 {
1916 tree t;
1917 double_int temp, temp_trunc;
1918 unsigned int mode;
1919
1920 /* Right shift FIXED_CST to temp by fbit. */
1921 temp = TREE_FIXED_CST (arg1).data;
1922 mode = TREE_FIXED_CST (arg1).mode;
1923 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1924 {
1925 temp = temp.rshift (GET_MODE_FBIT (mode),
1926 HOST_BITS_PER_DOUBLE_INT,
1927 SIGNED_FIXED_POINT_MODE_P (mode));
1928
1929 /* Left shift temp to temp_trunc by fbit. */
1930 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1931 HOST_BITS_PER_DOUBLE_INT,
1932 SIGNED_FIXED_POINT_MODE_P (mode));
1933 }
1934 else
1935 {
1936 temp = double_int_zero;
1937 temp_trunc = double_int_zero;
1938 }
1939
1940 /* If FIXED_CST is negative, we need to round the value toward 0.
1941 By checking if the fractional bits are not zero to add 1 to temp. */
1942 if (SIGNED_FIXED_POINT_MODE_P (mode)
1943 && temp_trunc.is_negative ()
1944 && TREE_FIXED_CST (arg1).data != temp_trunc)
1945 temp += double_int_one;
1946
1947 /* Given a fixed-point constant, make new constant with new type,
1948 appropriately sign-extended or truncated. */
1949 t = force_fit_type (type, temp, -1,
1950 (temp.is_negative ()
1951 && (TYPE_UNSIGNED (type)
1952 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1953 | TREE_OVERFLOW (arg1));
1954
1955 return t;
1956 }
1957
1958 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1959 to another floating point type. */
1960
1961 static tree
1962 fold_convert_const_real_from_real (tree type, const_tree arg1)
1963 {
1964 REAL_VALUE_TYPE value;
1965 tree t;
1966
1967 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1968 t = build_real (type, value);
1969
1970 /* If converting an infinity or NAN to a representation that doesn't
1971 have one, set the overflow bit so that we can produce some kind of
1972 error message at the appropriate point if necessary. It's not the
1973 most user-friendly message, but it's better than nothing. */
1974 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1975 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1976 TREE_OVERFLOW (t) = 1;
1977 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1978 && !MODE_HAS_NANS (TYPE_MODE (type)))
1979 TREE_OVERFLOW (t) = 1;
1980 /* Regular overflow, conversion produced an infinity in a mode that
1981 can't represent them. */
1982 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1983 && REAL_VALUE_ISINF (value)
1984 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1985 TREE_OVERFLOW (t) = 1;
1986 else
1987 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1988 return t;
1989 }
1990
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to a floating point type. */
1993
1994 static tree
1995 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1996 {
1997 REAL_VALUE_TYPE value;
1998 tree t;
1999
2000 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2001 t = build_real (type, value);
2002
2003 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2004 return t;
2005 }
2006
2007 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2008 to another fixed-point type. */
2009
2010 static tree
2011 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2012 {
2013 FIXED_VALUE_TYPE value;
2014 tree t;
2015 bool overflow_p;
2016
2017 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2018 TYPE_SATURATING (type));
2019 t = build_fixed (type, value);
2020
2021 /* Propagate overflow flags. */
2022 if (overflow_p | TREE_OVERFLOW (arg1))
2023 TREE_OVERFLOW (t) = 1;
2024 return t;
2025 }
2026
2027 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2028 to a fixed-point type. */
2029
2030 static tree
2031 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2032 {
2033 FIXED_VALUE_TYPE value;
2034 tree t;
2035 bool overflow_p;
2036 double_int di;
2037
2038 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2039
2040 di.low = TREE_INT_CST_ELT (arg1, 0);
2041 if (TREE_INT_CST_NUNITS (arg1) == 1)
2042 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2043 else
2044 di.high = TREE_INT_CST_ELT (arg1, 1);
2045
2046 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2047 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2048 TYPE_SATURATING (type));
2049 t = build_fixed (type, value);
2050
2051 /* Propagate overflow flags. */
2052 if (overflow_p | TREE_OVERFLOW (arg1))
2053 TREE_OVERFLOW (t) = 1;
2054 return t;
2055 }
2056
2057 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2058 to a fixed-point type. */
2059
2060 static tree
2061 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2062 {
2063 FIXED_VALUE_TYPE value;
2064 tree t;
2065 bool overflow_p;
2066
2067 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2068 &TREE_REAL_CST (arg1),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2071
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2076 }
2077
2078 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2079 type TYPE. If no simplification can be done return NULL_TREE. */
2080
2081 static tree
2082 fold_convert_const (enum tree_code code, tree type, tree arg1)
2083 {
2084 if (TREE_TYPE (arg1) == type)
2085 return arg1;
2086
2087 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2088 || TREE_CODE (type) == OFFSET_TYPE)
2089 {
2090 if (TREE_CODE (arg1) == INTEGER_CST)
2091 return fold_convert_const_int_from_int (type, arg1);
2092 else if (TREE_CODE (arg1) == REAL_CST)
2093 return fold_convert_const_int_from_real (code, type, arg1);
2094 else if (TREE_CODE (arg1) == FIXED_CST)
2095 return fold_convert_const_int_from_fixed (type, arg1);
2096 }
2097 else if (TREE_CODE (type) == REAL_TYPE)
2098 {
2099 if (TREE_CODE (arg1) == INTEGER_CST)
2100 return build_real_from_int_cst (type, arg1);
2101 else if (TREE_CODE (arg1) == REAL_CST)
2102 return fold_convert_const_real_from_real (type, arg1);
2103 else if (TREE_CODE (arg1) == FIXED_CST)
2104 return fold_convert_const_real_from_fixed (type, arg1);
2105 }
2106 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2107 {
2108 if (TREE_CODE (arg1) == FIXED_CST)
2109 return fold_convert_const_fixed_from_fixed (type, arg1);
2110 else if (TREE_CODE (arg1) == INTEGER_CST)
2111 return fold_convert_const_fixed_from_int (type, arg1);
2112 else if (TREE_CODE (arg1) == REAL_CST)
2113 return fold_convert_const_fixed_from_real (type, arg1);
2114 }
2115 return NULL_TREE;
2116 }
2117
2118 /* Construct a vector of zero elements of vector type TYPE. */
2119
2120 static tree
2121 build_zero_vector (tree type)
2122 {
2123 tree t;
2124
2125 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2126 return build_vector_from_val (type, t);
2127 }
2128
2129 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2130
2131 bool
2132 fold_convertible_p (const_tree type, const_tree arg)
2133 {
2134 tree orig = TREE_TYPE (arg);
2135
2136 if (type == orig)
2137 return true;
2138
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return false;
2143
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2145 return true;
2146
2147 switch (TREE_CODE (type))
2148 {
2149 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2150 case POINTER_TYPE: case REFERENCE_TYPE:
2151 case OFFSET_TYPE:
2152 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2153 || TREE_CODE (orig) == OFFSET_TYPE)
2154 return true;
2155 return (TREE_CODE (orig) == VECTOR_TYPE
2156 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2157
2158 case REAL_TYPE:
2159 case FIXED_POINT_TYPE:
2160 case COMPLEX_TYPE:
2161 case VECTOR_TYPE:
2162 case VOID_TYPE:
2163 return TREE_CODE (type) == TREE_CODE (orig);
2164
2165 default:
2166 return false;
2167 }
2168 }
2169
2170 /* Convert expression ARG to type TYPE. Used by the middle-end for
2171 simple conversions in preference to calling the front-end's convert. */
2172
2173 tree
2174 fold_convert_loc (location_t loc, tree type, tree arg)
2175 {
2176 tree orig = TREE_TYPE (arg);
2177 tree tem;
2178
2179 if (type == orig)
2180 return arg;
2181
2182 if (TREE_CODE (arg) == ERROR_MARK
2183 || TREE_CODE (type) == ERROR_MARK
2184 || TREE_CODE (orig) == ERROR_MARK)
2185 return error_mark_node;
2186
2187 switch (TREE_CODE (type))
2188 {
2189 case POINTER_TYPE:
2190 case REFERENCE_TYPE:
2191 /* Handle conversions between pointers to different address spaces. */
2192 if (POINTER_TYPE_P (orig)
2193 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2194 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2195 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2196 /* fall through */
2197
2198 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2199 case OFFSET_TYPE:
2200 if (TREE_CODE (arg) == INTEGER_CST)
2201 {
2202 tem = fold_convert_const (NOP_EXPR, type, arg);
2203 if (tem != NULL_TREE)
2204 return tem;
2205 }
2206 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2207 || TREE_CODE (orig) == OFFSET_TYPE)
2208 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2209 if (TREE_CODE (orig) == COMPLEX_TYPE)
2210 return fold_convert_loc (loc, type,
2211 fold_build1_loc (loc, REALPART_EXPR,
2212 TREE_TYPE (orig), arg));
2213 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2214 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2215 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2216
2217 case REAL_TYPE:
2218 if (TREE_CODE (arg) == INTEGER_CST)
2219 {
2220 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2221 if (tem != NULL_TREE)
2222 return tem;
2223 }
2224 else if (TREE_CODE (arg) == REAL_CST)
2225 {
2226 tem = fold_convert_const (NOP_EXPR, type, arg);
2227 if (tem != NULL_TREE)
2228 return tem;
2229 }
2230 else if (TREE_CODE (arg) == FIXED_CST)
2231 {
2232 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2233 if (tem != NULL_TREE)
2234 return tem;
2235 }
2236
2237 switch (TREE_CODE (orig))
2238 {
2239 case INTEGER_TYPE:
2240 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2241 case POINTER_TYPE: case REFERENCE_TYPE:
2242 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2243
2244 case REAL_TYPE:
2245 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2246
2247 case FIXED_POINT_TYPE:
2248 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2249
2250 case COMPLEX_TYPE:
2251 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2252 return fold_convert_loc (loc, type, tem);
2253
2254 default:
2255 gcc_unreachable ();
2256 }
2257
2258 case FIXED_POINT_TYPE:
2259 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2260 || TREE_CODE (arg) == REAL_CST)
2261 {
2262 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 goto fold_convert_exit;
2265 }
2266
2267 switch (TREE_CODE (orig))
2268 {
2269 case FIXED_POINT_TYPE:
2270 case INTEGER_TYPE:
2271 case ENUMERAL_TYPE:
2272 case BOOLEAN_TYPE:
2273 case REAL_TYPE:
2274 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2275
2276 case COMPLEX_TYPE:
2277 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2278 return fold_convert_loc (loc, type, tem);
2279
2280 default:
2281 gcc_unreachable ();
2282 }
2283
2284 case COMPLEX_TYPE:
2285 switch (TREE_CODE (orig))
2286 {
2287 case INTEGER_TYPE:
2288 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2289 case POINTER_TYPE: case REFERENCE_TYPE:
2290 case REAL_TYPE:
2291 case FIXED_POINT_TYPE:
2292 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2293 fold_convert_loc (loc, TREE_TYPE (type), arg),
2294 fold_convert_loc (loc, TREE_TYPE (type),
2295 integer_zero_node));
2296 case COMPLEX_TYPE:
2297 {
2298 tree rpart, ipart;
2299
2300 if (TREE_CODE (arg) == COMPLEX_EXPR)
2301 {
2302 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2303 TREE_OPERAND (arg, 0));
2304 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2305 TREE_OPERAND (arg, 1));
2306 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2307 }
2308
2309 arg = save_expr (arg);
2310 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2311 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2312 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2313 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2314 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2315 }
2316
2317 default:
2318 gcc_unreachable ();
2319 }
2320
2321 case VECTOR_TYPE:
2322 if (integer_zerop (arg))
2323 return build_zero_vector (type);
2324 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2325 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2326 || TREE_CODE (orig) == VECTOR_TYPE);
2327 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2328
2329 case VOID_TYPE:
2330 tem = fold_ignored_result (arg);
2331 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2332
2333 default:
2334 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2335 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2336 gcc_unreachable ();
2337 }
2338 fold_convert_exit:
2339 protected_set_expr_location_unshare (tem, loc);
2340 return tem;
2341 }
2342 \f
2343 /* Return false if expr can be assumed not to be an lvalue, true
2344 otherwise. */
2345
2346 static bool
2347 maybe_lvalue_p (const_tree x)
2348 {
2349 /* We only need to wrap lvalue tree codes. */
2350 switch (TREE_CODE (x))
2351 {
2352 case VAR_DECL:
2353 case PARM_DECL:
2354 case RESULT_DECL:
2355 case LABEL_DECL:
2356 case FUNCTION_DECL:
2357 case SSA_NAME:
2358
2359 case COMPONENT_REF:
2360 case MEM_REF:
2361 case INDIRECT_REF:
2362 case ARRAY_REF:
2363 case ARRAY_RANGE_REF:
2364 case BIT_FIELD_REF:
2365 case OBJ_TYPE_REF:
2366
2367 case REALPART_EXPR:
2368 case IMAGPART_EXPR:
2369 case PREINCREMENT_EXPR:
2370 case PREDECREMENT_EXPR:
2371 case SAVE_EXPR:
2372 case TRY_CATCH_EXPR:
2373 case WITH_CLEANUP_EXPR:
2374 case COMPOUND_EXPR:
2375 case MODIFY_EXPR:
2376 case TARGET_EXPR:
2377 case COND_EXPR:
2378 case BIND_EXPR:
2379 break;
2380
2381 default:
2382 /* Assume the worst for front-end tree codes. */
2383 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2384 break;
2385 return false;
2386 }
2387
2388 return true;
2389 }
2390
2391 /* Return an expr equal to X but certainly not valid as an lvalue. */
2392
2393 tree
2394 non_lvalue_loc (location_t loc, tree x)
2395 {
2396 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2397 us. */
2398 if (in_gimple_form)
2399 return x;
2400
2401 if (! maybe_lvalue_p (x))
2402 return x;
2403 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2404 }
2405
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2408
2409 static tree
2410 pedantic_non_lvalue_loc (location_t loc, tree x)
2411 {
2412 return protected_set_expr_location_unshare (x, loc);
2413 }
2414 \f
2415 /* Given a tree comparison code, return the code that is the logical inverse.
2416 It is generally not safe to do this for floating-point comparisons, except
2417 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2418 ERROR_MARK in this case. */
2419
2420 enum tree_code
2421 invert_tree_comparison (enum tree_code code, bool honor_nans)
2422 {
2423 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2424 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2425 return ERROR_MARK;
2426
2427 switch (code)
2428 {
2429 case EQ_EXPR:
2430 return NE_EXPR;
2431 case NE_EXPR:
2432 return EQ_EXPR;
2433 case GT_EXPR:
2434 return honor_nans ? UNLE_EXPR : LE_EXPR;
2435 case GE_EXPR:
2436 return honor_nans ? UNLT_EXPR : LT_EXPR;
2437 case LT_EXPR:
2438 return honor_nans ? UNGE_EXPR : GE_EXPR;
2439 case LE_EXPR:
2440 return honor_nans ? UNGT_EXPR : GT_EXPR;
2441 case LTGT_EXPR:
2442 return UNEQ_EXPR;
2443 case UNEQ_EXPR:
2444 return LTGT_EXPR;
2445 case UNGT_EXPR:
2446 return LE_EXPR;
2447 case UNGE_EXPR:
2448 return LT_EXPR;
2449 case UNLT_EXPR:
2450 return GE_EXPR;
2451 case UNLE_EXPR:
2452 return GT_EXPR;
2453 case ORDERED_EXPR:
2454 return UNORDERED_EXPR;
2455 case UNORDERED_EXPR:
2456 return ORDERED_EXPR;
2457 default:
2458 gcc_unreachable ();
2459 }
2460 }
2461
2462 /* Similar, but return the comparison that results if the operands are
2463 swapped. This is safe for floating-point. */
2464
2465 enum tree_code
2466 swap_tree_comparison (enum tree_code code)
2467 {
2468 switch (code)
2469 {
2470 case EQ_EXPR:
2471 case NE_EXPR:
2472 case ORDERED_EXPR:
2473 case UNORDERED_EXPR:
2474 case LTGT_EXPR:
2475 case UNEQ_EXPR:
2476 return code;
2477 case GT_EXPR:
2478 return LT_EXPR;
2479 case GE_EXPR:
2480 return LE_EXPR;
2481 case LT_EXPR:
2482 return GT_EXPR;
2483 case LE_EXPR:
2484 return GE_EXPR;
2485 case UNGT_EXPR:
2486 return UNLT_EXPR;
2487 case UNGE_EXPR:
2488 return UNLE_EXPR;
2489 case UNLT_EXPR:
2490 return UNGT_EXPR;
2491 case UNLE_EXPR:
2492 return UNGE_EXPR;
2493 default:
2494 gcc_unreachable ();
2495 }
2496 }
2497
2498
2499 /* Convert a comparison tree code from an enum tree_code representation
2500 into a compcode bit-based encoding. This function is the inverse of
2501 compcode_to_comparison. */
2502
2503 static enum comparison_code
2504 comparison_to_compcode (enum tree_code code)
2505 {
2506 switch (code)
2507 {
2508 case LT_EXPR:
2509 return COMPCODE_LT;
2510 case EQ_EXPR:
2511 return COMPCODE_EQ;
2512 case LE_EXPR:
2513 return COMPCODE_LE;
2514 case GT_EXPR:
2515 return COMPCODE_GT;
2516 case NE_EXPR:
2517 return COMPCODE_NE;
2518 case GE_EXPR:
2519 return COMPCODE_GE;
2520 case ORDERED_EXPR:
2521 return COMPCODE_ORD;
2522 case UNORDERED_EXPR:
2523 return COMPCODE_UNORD;
2524 case UNLT_EXPR:
2525 return COMPCODE_UNLT;
2526 case UNEQ_EXPR:
2527 return COMPCODE_UNEQ;
2528 case UNLE_EXPR:
2529 return COMPCODE_UNLE;
2530 case UNGT_EXPR:
2531 return COMPCODE_UNGT;
2532 case LTGT_EXPR:
2533 return COMPCODE_LTGT;
2534 case UNGE_EXPR:
2535 return COMPCODE_UNGE;
2536 default:
2537 gcc_unreachable ();
2538 }
2539 }
2540
2541 /* Convert a compcode bit-based encoding of a comparison operator back
2542 to GCC's enum tree_code representation. This function is the
2543 inverse of comparison_to_compcode. */
2544
2545 static enum tree_code
2546 compcode_to_comparison (enum comparison_code code)
2547 {
2548 switch (code)
2549 {
2550 case COMPCODE_LT:
2551 return LT_EXPR;
2552 case COMPCODE_EQ:
2553 return EQ_EXPR;
2554 case COMPCODE_LE:
2555 return LE_EXPR;
2556 case COMPCODE_GT:
2557 return GT_EXPR;
2558 case COMPCODE_NE:
2559 return NE_EXPR;
2560 case COMPCODE_GE:
2561 return GE_EXPR;
2562 case COMPCODE_ORD:
2563 return ORDERED_EXPR;
2564 case COMPCODE_UNORD:
2565 return UNORDERED_EXPR;
2566 case COMPCODE_UNLT:
2567 return UNLT_EXPR;
2568 case COMPCODE_UNEQ:
2569 return UNEQ_EXPR;
2570 case COMPCODE_UNLE:
2571 return UNLE_EXPR;
2572 case COMPCODE_UNGT:
2573 return UNGT_EXPR;
2574 case COMPCODE_LTGT:
2575 return LTGT_EXPR;
2576 case COMPCODE_UNGE:
2577 return UNGE_EXPR;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Return a tree for the comparison which is the combination of
2584 doing the AND or OR (depending on CODE) of the two operations LCODE
2585 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2586 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2587 if this makes the transformation invalid. */
2588
2589 tree
2590 combine_comparisons (location_t loc,
2591 enum tree_code code, enum tree_code lcode,
2592 enum tree_code rcode, tree truth_type,
2593 tree ll_arg, tree lr_arg)
2594 {
2595 bool honor_nans = HONOR_NANS (element_mode (ll_arg));
2596 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2597 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2598 int compcode;
2599
2600 switch (code)
2601 {
2602 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2603 compcode = lcompcode & rcompcode;
2604 break;
2605
2606 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2607 compcode = lcompcode | rcompcode;
2608 break;
2609
2610 default:
2611 return NULL_TREE;
2612 }
2613
2614 if (!honor_nans)
2615 {
2616 /* Eliminate unordered comparisons, as well as LTGT and ORD
2617 which are not used unless the mode has NaNs. */
2618 compcode &= ~COMPCODE_UNORD;
2619 if (compcode == COMPCODE_LTGT)
2620 compcode = COMPCODE_NE;
2621 else if (compcode == COMPCODE_ORD)
2622 compcode = COMPCODE_TRUE;
2623 }
2624 else if (flag_trapping_math)
2625 {
2626 /* Check that the original operation and the optimized ones will trap
2627 under the same condition. */
2628 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2629 && (lcompcode != COMPCODE_EQ)
2630 && (lcompcode != COMPCODE_ORD);
2631 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2632 && (rcompcode != COMPCODE_EQ)
2633 && (rcompcode != COMPCODE_ORD);
2634 bool trap = (compcode & COMPCODE_UNORD) == 0
2635 && (compcode != COMPCODE_EQ)
2636 && (compcode != COMPCODE_ORD);
2637
2638 /* In a short-circuited boolean expression the LHS might be
2639 such that the RHS, if evaluated, will never trap. For
2640 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2641 if neither x nor y is NaN. (This is a mixed blessing: for
2642 example, the expression above will never trap, hence
2643 optimizing it to x < y would be invalid). */
2644 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2645 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2646 rtrap = false;
2647
2648 /* If the comparison was short-circuited, and only the RHS
2649 trapped, we may now generate a spurious trap. */
2650 if (rtrap && !ltrap
2651 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2652 return NULL_TREE;
2653
2654 /* If we changed the conditions that cause a trap, we lose. */
2655 if ((ltrap || rtrap) != trap)
2656 return NULL_TREE;
2657 }
2658
2659 if (compcode == COMPCODE_TRUE)
2660 return constant_boolean_node (true, truth_type);
2661 else if (compcode == COMPCODE_FALSE)
2662 return constant_boolean_node (false, truth_type);
2663 else
2664 {
2665 enum tree_code tcode;
2666
2667 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2668 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2669 }
2670 }
2671 \f
2672 /* Return nonzero if two operands (typically of the same tree node)
2673 are necessarily equal. If either argument has side-effects this
2674 function returns zero. FLAGS modifies behavior as follows:
2675
2676 If OEP_ONLY_CONST is set, only return nonzero for constants.
2677 This function tests whether the operands are indistinguishable;
2678 it does not test whether they are equal using C's == operation.
2679 The distinction is important for IEEE floating point, because
2680 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2681 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2682
2683 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2684 even though it may hold multiple values during a function.
2685 This is because a GCC tree node guarantees that nothing else is
2686 executed between the evaluation of its "operands" (which may often
2687 be evaluated in arbitrary order). Hence if the operands themselves
2688 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2689 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2690 unset means assuming isochronic (or instantaneous) tree equivalence.
2691 Unless comparing arbitrary expression trees, such as from different
2692 statements, this flag can usually be left unset.
2693
2694 If OEP_PURE_SAME is set, then pure functions with identical arguments
2695 are considered the same. It is used when the caller has other ways
2696 to ensure that global memory is unchanged in between. */
2697
2698 int
2699 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2700 {
2701 /* If either is ERROR_MARK, they aren't equal. */
2702 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2703 || TREE_TYPE (arg0) == error_mark_node
2704 || TREE_TYPE (arg1) == error_mark_node)
2705 return 0;
2706
2707 /* Similar, if either does not have a type (like a released SSA name),
2708 they aren't equal. */
2709 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2710 return 0;
2711
2712 /* Check equality of integer constants before bailing out due to
2713 precision differences. */
2714 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2715 return tree_int_cst_equal (arg0, arg1);
2716
2717 /* If both types don't have the same signedness, then we can't consider
2718 them equal. We must check this before the STRIP_NOPS calls
2719 because they may change the signedness of the arguments. As pointers
2720 strictly don't have a signedness, require either two pointers or
2721 two non-pointers as well. */
2722 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2723 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2724 return 0;
2725
2726 /* We cannot consider pointers to different address space equal. */
2727 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2728 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2729 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2730 return 0;
2731
2732 /* If both types don't have the same precision, then it is not safe
2733 to strip NOPs. */
2734 if (element_precision (TREE_TYPE (arg0))
2735 != element_precision (TREE_TYPE (arg1)))
2736 return 0;
2737
2738 STRIP_NOPS (arg0);
2739 STRIP_NOPS (arg1);
2740
2741 /* In case both args are comparisons but with different comparison
2742 code, try to swap the comparison operands of one arg to produce
2743 a match and compare that variant. */
2744 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2745 && COMPARISON_CLASS_P (arg0)
2746 && COMPARISON_CLASS_P (arg1))
2747 {
2748 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2749
2750 if (TREE_CODE (arg0) == swap_code)
2751 return operand_equal_p (TREE_OPERAND (arg0, 0),
2752 TREE_OPERAND (arg1, 1), flags)
2753 && operand_equal_p (TREE_OPERAND (arg0, 1),
2754 TREE_OPERAND (arg1, 0), flags);
2755 }
2756
2757 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2758 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2759 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2760 return 0;
2761
2762 /* This is needed for conversions and for COMPONENT_REF.
2763 Might as well play it safe and always test this. */
2764 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2765 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2766 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2767 return 0;
2768
2769 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2770 We don't care about side effects in that case because the SAVE_EXPR
2771 takes care of that for us. In all other cases, two expressions are
2772 equal if they have no side effects. If we have two identical
2773 expressions with side effects that should be treated the same due
2774 to the only side effects being identical SAVE_EXPR's, that will
2775 be detected in the recursive calls below.
2776 If we are taking an invariant address of two identical objects
2777 they are necessarily equal as well. */
2778 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2779 && (TREE_CODE (arg0) == SAVE_EXPR
2780 || (flags & OEP_CONSTANT_ADDRESS_OF)
2781 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2782 return 1;
2783
2784 /* Next handle constant cases, those for which we can return 1 even
2785 if ONLY_CONST is set. */
2786 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2787 switch (TREE_CODE (arg0))
2788 {
2789 case INTEGER_CST:
2790 return tree_int_cst_equal (arg0, arg1);
2791
2792 case FIXED_CST:
2793 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2794 TREE_FIXED_CST (arg1));
2795
2796 case REAL_CST:
2797 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2798 TREE_REAL_CST (arg1)))
2799 return 1;
2800
2801
2802 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2803 {
2804 /* If we do not distinguish between signed and unsigned zero,
2805 consider them equal. */
2806 if (real_zerop (arg0) && real_zerop (arg1))
2807 return 1;
2808 }
2809 return 0;
2810
2811 case VECTOR_CST:
2812 {
2813 unsigned i;
2814
2815 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2816 return 0;
2817
2818 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2819 {
2820 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2821 VECTOR_CST_ELT (arg1, i), flags))
2822 return 0;
2823 }
2824 return 1;
2825 }
2826
2827 case COMPLEX_CST:
2828 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2829 flags)
2830 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2831 flags));
2832
2833 case STRING_CST:
2834 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2835 && ! memcmp (TREE_STRING_POINTER (arg0),
2836 TREE_STRING_POINTER (arg1),
2837 TREE_STRING_LENGTH (arg0)));
2838
2839 case ADDR_EXPR:
2840 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2841 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2842 ? OEP_CONSTANT_ADDRESS_OF : 0);
2843 default:
2844 break;
2845 }
2846
2847 if (flags & OEP_ONLY_CONST)
2848 return 0;
2849
2850 /* Define macros to test an operand from arg0 and arg1 for equality and a
2851 variant that allows null and views null as being different from any
2852 non-null value. In the latter case, if either is null, the both
2853 must be; otherwise, do the normal comparison. */
2854 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2855 TREE_OPERAND (arg1, N), flags)
2856
2857 #define OP_SAME_WITH_NULL(N) \
2858 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2859 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2860
2861 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2862 {
2863 case tcc_unary:
2864 /* Two conversions are equal only if signedness and modes match. */
2865 switch (TREE_CODE (arg0))
2866 {
2867 CASE_CONVERT:
2868 case FIX_TRUNC_EXPR:
2869 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2870 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2871 return 0;
2872 break;
2873 default:
2874 break;
2875 }
2876
2877 return OP_SAME (0);
2878
2879
2880 case tcc_comparison:
2881 case tcc_binary:
2882 if (OP_SAME (0) && OP_SAME (1))
2883 return 1;
2884
2885 /* For commutative ops, allow the other order. */
2886 return (commutative_tree_code (TREE_CODE (arg0))
2887 && operand_equal_p (TREE_OPERAND (arg0, 0),
2888 TREE_OPERAND (arg1, 1), flags)
2889 && operand_equal_p (TREE_OPERAND (arg0, 1),
2890 TREE_OPERAND (arg1, 0), flags));
2891
2892 case tcc_reference:
2893 /* If either of the pointer (or reference) expressions we are
2894 dereferencing contain a side effect, these cannot be equal,
2895 but their addresses can be. */
2896 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2897 && (TREE_SIDE_EFFECTS (arg0)
2898 || TREE_SIDE_EFFECTS (arg1)))
2899 return 0;
2900
2901 switch (TREE_CODE (arg0))
2902 {
2903 case INDIRECT_REF:
2904 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2905 return OP_SAME (0);
2906
2907 case REALPART_EXPR:
2908 case IMAGPART_EXPR:
2909 return OP_SAME (0);
2910
2911 case TARGET_MEM_REF:
2912 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2913 /* Require equal extra operands and then fall through to MEM_REF
2914 handling of the two common operands. */
2915 if (!OP_SAME_WITH_NULL (2)
2916 || !OP_SAME_WITH_NULL (3)
2917 || !OP_SAME_WITH_NULL (4))
2918 return 0;
2919 /* Fallthru. */
2920 case MEM_REF:
2921 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2922 /* Require equal access sizes, and similar pointer types.
2923 We can have incomplete types for array references of
2924 variable-sized arrays from the Fortran frontend
2925 though. Also verify the types are compatible. */
2926 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2927 || (TYPE_SIZE (TREE_TYPE (arg0))
2928 && TYPE_SIZE (TREE_TYPE (arg1))
2929 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2930 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2931 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2932 && alias_ptr_types_compatible_p
2933 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2934 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2935 && OP_SAME (0) && OP_SAME (1));
2936
2937 case ARRAY_REF:
2938 case ARRAY_RANGE_REF:
2939 /* Operands 2 and 3 may be null.
2940 Compare the array index by value if it is constant first as we
2941 may have different types but same value here. */
2942 if (!OP_SAME (0))
2943 return 0;
2944 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2945 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2946 TREE_OPERAND (arg1, 1))
2947 || OP_SAME (1))
2948 && OP_SAME_WITH_NULL (2)
2949 && OP_SAME_WITH_NULL (3));
2950
2951 case COMPONENT_REF:
2952 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2953 may be NULL when we're called to compare MEM_EXPRs. */
2954 if (!OP_SAME_WITH_NULL (0)
2955 || !OP_SAME (1))
2956 return 0;
2957 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2958 return OP_SAME_WITH_NULL (2);
2959
2960 case BIT_FIELD_REF:
2961 if (!OP_SAME (0))
2962 return 0;
2963 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2964 return OP_SAME (1) && OP_SAME (2);
2965
2966 default:
2967 return 0;
2968 }
2969
2970 case tcc_expression:
2971 switch (TREE_CODE (arg0))
2972 {
2973 case ADDR_EXPR:
2974 case TRUTH_NOT_EXPR:
2975 return OP_SAME (0);
2976
2977 case TRUTH_ANDIF_EXPR:
2978 case TRUTH_ORIF_EXPR:
2979 return OP_SAME (0) && OP_SAME (1);
2980
2981 case FMA_EXPR:
2982 case WIDEN_MULT_PLUS_EXPR:
2983 case WIDEN_MULT_MINUS_EXPR:
2984 if (!OP_SAME (2))
2985 return 0;
2986 /* The multiplcation operands are commutative. */
2987 /* FALLTHRU */
2988
2989 case TRUTH_AND_EXPR:
2990 case TRUTH_OR_EXPR:
2991 case TRUTH_XOR_EXPR:
2992 if (OP_SAME (0) && OP_SAME (1))
2993 return 1;
2994
2995 /* Otherwise take into account this is a commutative operation. */
2996 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2997 TREE_OPERAND (arg1, 1), flags)
2998 && operand_equal_p (TREE_OPERAND (arg0, 1),
2999 TREE_OPERAND (arg1, 0), flags));
3000
3001 case COND_EXPR:
3002 case VEC_COND_EXPR:
3003 case DOT_PROD_EXPR:
3004 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3005
3006 default:
3007 return 0;
3008 }
3009
3010 case tcc_vl_exp:
3011 switch (TREE_CODE (arg0))
3012 {
3013 case CALL_EXPR:
3014 /* If the CALL_EXPRs call different functions, then they
3015 clearly can not be equal. */
3016 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3017 flags))
3018 return 0;
3019
3020 {
3021 unsigned int cef = call_expr_flags (arg0);
3022 if (flags & OEP_PURE_SAME)
3023 cef &= ECF_CONST | ECF_PURE;
3024 else
3025 cef &= ECF_CONST;
3026 if (!cef)
3027 return 0;
3028 }
3029
3030 /* Now see if all the arguments are the same. */
3031 {
3032 const_call_expr_arg_iterator iter0, iter1;
3033 const_tree a0, a1;
3034 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3035 a1 = first_const_call_expr_arg (arg1, &iter1);
3036 a0 && a1;
3037 a0 = next_const_call_expr_arg (&iter0),
3038 a1 = next_const_call_expr_arg (&iter1))
3039 if (! operand_equal_p (a0, a1, flags))
3040 return 0;
3041
3042 /* If we get here and both argument lists are exhausted
3043 then the CALL_EXPRs are equal. */
3044 return ! (a0 || a1);
3045 }
3046 default:
3047 return 0;
3048 }
3049
3050 case tcc_declaration:
3051 /* Consider __builtin_sqrt equal to sqrt. */
3052 return (TREE_CODE (arg0) == FUNCTION_DECL
3053 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3054 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3055 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3056
3057 default:
3058 return 0;
3059 }
3060
3061 #undef OP_SAME
3062 #undef OP_SAME_WITH_NULL
3063 }
3064 \f
3065 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3066 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3067
3068 When in doubt, return 0. */
3069
3070 static int
3071 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3072 {
3073 int unsignedp1, unsignedpo;
3074 tree primarg0, primarg1, primother;
3075 unsigned int correct_width;
3076
3077 if (operand_equal_p (arg0, arg1, 0))
3078 return 1;
3079
3080 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3081 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3082 return 0;
3083
3084 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3085 and see if the inner values are the same. This removes any
3086 signedness comparison, which doesn't matter here. */
3087 primarg0 = arg0, primarg1 = arg1;
3088 STRIP_NOPS (primarg0);
3089 STRIP_NOPS (primarg1);
3090 if (operand_equal_p (primarg0, primarg1, 0))
3091 return 1;
3092
3093 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3094 actual comparison operand, ARG0.
3095
3096 First throw away any conversions to wider types
3097 already present in the operands. */
3098
3099 primarg1 = get_narrower (arg1, &unsignedp1);
3100 primother = get_narrower (other, &unsignedpo);
3101
3102 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3103 if (unsignedp1 == unsignedpo
3104 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3105 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3106 {
3107 tree type = TREE_TYPE (arg0);
3108
3109 /* Make sure shorter operand is extended the right way
3110 to match the longer operand. */
3111 primarg1 = fold_convert (signed_or_unsigned_type_for
3112 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3113
3114 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3115 return 1;
3116 }
3117
3118 return 0;
3119 }
3120 \f
3121 /* See if ARG is an expression that is either a comparison or is performing
3122 arithmetic on comparisons. The comparisons must only be comparing
3123 two different values, which will be stored in *CVAL1 and *CVAL2; if
3124 they are nonzero it means that some operands have already been found.
3125 No variables may be used anywhere else in the expression except in the
3126 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3127 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3128
3129 If this is true, return 1. Otherwise, return zero. */
3130
3131 static int
3132 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3133 {
3134 enum tree_code code = TREE_CODE (arg);
3135 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3136
3137 /* We can handle some of the tcc_expression cases here. */
3138 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3139 tclass = tcc_unary;
3140 else if (tclass == tcc_expression
3141 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3142 || code == COMPOUND_EXPR))
3143 tclass = tcc_binary;
3144
3145 else if (tclass == tcc_expression && code == SAVE_EXPR
3146 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3147 {
3148 /* If we've already found a CVAL1 or CVAL2, this expression is
3149 two complex to handle. */
3150 if (*cval1 || *cval2)
3151 return 0;
3152
3153 tclass = tcc_unary;
3154 *save_p = 1;
3155 }
3156
3157 switch (tclass)
3158 {
3159 case tcc_unary:
3160 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3161
3162 case tcc_binary:
3163 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3164 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3165 cval1, cval2, save_p));
3166
3167 case tcc_constant:
3168 return 1;
3169
3170 case tcc_expression:
3171 if (code == COND_EXPR)
3172 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3173 cval1, cval2, save_p)
3174 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3175 cval1, cval2, save_p)
3176 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3177 cval1, cval2, save_p));
3178 return 0;
3179
3180 case tcc_comparison:
3181 /* First see if we can handle the first operand, then the second. For
3182 the second operand, we know *CVAL1 can't be zero. It must be that
3183 one side of the comparison is each of the values; test for the
3184 case where this isn't true by failing if the two operands
3185 are the same. */
3186
3187 if (operand_equal_p (TREE_OPERAND (arg, 0),
3188 TREE_OPERAND (arg, 1), 0))
3189 return 0;
3190
3191 if (*cval1 == 0)
3192 *cval1 = TREE_OPERAND (arg, 0);
3193 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3194 ;
3195 else if (*cval2 == 0)
3196 *cval2 = TREE_OPERAND (arg, 0);
3197 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3198 ;
3199 else
3200 return 0;
3201
3202 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3203 ;
3204 else if (*cval2 == 0)
3205 *cval2 = TREE_OPERAND (arg, 1);
3206 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3207 ;
3208 else
3209 return 0;
3210
3211 return 1;
3212
3213 default:
3214 return 0;
3215 }
3216 }
3217 \f
3218 /* ARG is a tree that is known to contain just arithmetic operations and
3219 comparisons. Evaluate the operations in the tree substituting NEW0 for
3220 any occurrence of OLD0 as an operand of a comparison and likewise for
3221 NEW1 and OLD1. */
3222
3223 static tree
3224 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3225 tree old1, tree new1)
3226 {
3227 tree type = TREE_TYPE (arg);
3228 enum tree_code code = TREE_CODE (arg);
3229 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3230
3231 /* We can handle some of the tcc_expression cases here. */
3232 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3233 tclass = tcc_unary;
3234 else if (tclass == tcc_expression
3235 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3236 tclass = tcc_binary;
3237
3238 switch (tclass)
3239 {
3240 case tcc_unary:
3241 return fold_build1_loc (loc, code, type,
3242 eval_subst (loc, TREE_OPERAND (arg, 0),
3243 old0, new0, old1, new1));
3244
3245 case tcc_binary:
3246 return fold_build2_loc (loc, code, type,
3247 eval_subst (loc, TREE_OPERAND (arg, 0),
3248 old0, new0, old1, new1),
3249 eval_subst (loc, TREE_OPERAND (arg, 1),
3250 old0, new0, old1, new1));
3251
3252 case tcc_expression:
3253 switch (code)
3254 {
3255 case SAVE_EXPR:
3256 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3257 old1, new1);
3258
3259 case COMPOUND_EXPR:
3260 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3261 old1, new1);
3262
3263 case COND_EXPR:
3264 return fold_build3_loc (loc, code, type,
3265 eval_subst (loc, TREE_OPERAND (arg, 0),
3266 old0, new0, old1, new1),
3267 eval_subst (loc, TREE_OPERAND (arg, 1),
3268 old0, new0, old1, new1),
3269 eval_subst (loc, TREE_OPERAND (arg, 2),
3270 old0, new0, old1, new1));
3271 default:
3272 break;
3273 }
3274 /* Fall through - ??? */
3275
3276 case tcc_comparison:
3277 {
3278 tree arg0 = TREE_OPERAND (arg, 0);
3279 tree arg1 = TREE_OPERAND (arg, 1);
3280
3281 /* We need to check both for exact equality and tree equality. The
3282 former will be true if the operand has a side-effect. In that
3283 case, we know the operand occurred exactly once. */
3284
3285 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3286 arg0 = new0;
3287 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3288 arg0 = new1;
3289
3290 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3291 arg1 = new0;
3292 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3293 arg1 = new1;
3294
3295 return fold_build2_loc (loc, code, type, arg0, arg1);
3296 }
3297
3298 default:
3299 return arg;
3300 }
3301 }
3302 \f
3303 /* Return a tree for the case when the result of an expression is RESULT
3304 converted to TYPE and OMITTED was previously an operand of the expression
3305 but is now not needed (e.g., we folded OMITTED * 0).
3306
3307 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3308 the conversion of RESULT to TYPE. */
3309
3310 tree
3311 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3312 {
3313 tree t = fold_convert_loc (loc, type, result);
3314
3315 /* If the resulting operand is an empty statement, just return the omitted
3316 statement casted to void. */
3317 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3318 return build1_loc (loc, NOP_EXPR, void_type_node,
3319 fold_ignored_result (omitted));
3320
3321 if (TREE_SIDE_EFFECTS (omitted))
3322 return build2_loc (loc, COMPOUND_EXPR, type,
3323 fold_ignored_result (omitted), t);
3324
3325 return non_lvalue_loc (loc, t);
3326 }
3327
3328 /* Return a tree for the case when the result of an expression is RESULT
3329 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3330 of the expression but are now not needed.
3331
3332 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3333 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3334 evaluated before OMITTED2. Otherwise, if neither has side effects,
3335 just do the conversion of RESULT to TYPE. */
3336
3337 tree
3338 omit_two_operands_loc (location_t loc, tree type, tree result,
3339 tree omitted1, tree omitted2)
3340 {
3341 tree t = fold_convert_loc (loc, type, result);
3342
3343 if (TREE_SIDE_EFFECTS (omitted2))
3344 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3345 if (TREE_SIDE_EFFECTS (omitted1))
3346 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3347
3348 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3349 }
3350
3351 \f
3352 /* Return a simplified tree node for the truth-negation of ARG. This
3353 never alters ARG itself. We assume that ARG is an operation that
3354 returns a truth value (0 or 1).
3355
3356 FIXME: one would think we would fold the result, but it causes
3357 problems with the dominator optimizer. */
3358
3359 static tree
3360 fold_truth_not_expr (location_t loc, tree arg)
3361 {
3362 tree type = TREE_TYPE (arg);
3363 enum tree_code code = TREE_CODE (arg);
3364 location_t loc1, loc2;
3365
3366 /* If this is a comparison, we can simply invert it, except for
3367 floating-point non-equality comparisons, in which case we just
3368 enclose a TRUTH_NOT_EXPR around what we have. */
3369
3370 if (TREE_CODE_CLASS (code) == tcc_comparison)
3371 {
3372 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3373 if (FLOAT_TYPE_P (op_type)
3374 && flag_trapping_math
3375 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3376 && code != NE_EXPR && code != EQ_EXPR)
3377 return NULL_TREE;
3378
3379 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3380 if (code == ERROR_MARK)
3381 return NULL_TREE;
3382
3383 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3384 TREE_OPERAND (arg, 1));
3385 }
3386
3387 switch (code)
3388 {
3389 case INTEGER_CST:
3390 return constant_boolean_node (integer_zerop (arg), type);
3391
3392 case TRUTH_AND_EXPR:
3393 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3394 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3395 return build2_loc (loc, TRUTH_OR_EXPR, type,
3396 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3397 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3398
3399 case TRUTH_OR_EXPR:
3400 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3401 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3402 return build2_loc (loc, TRUTH_AND_EXPR, type,
3403 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3404 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3405
3406 case TRUTH_XOR_EXPR:
3407 /* Here we can invert either operand. We invert the first operand
3408 unless the second operand is a TRUTH_NOT_EXPR in which case our
3409 result is the XOR of the first operand with the inside of the
3410 negation of the second operand. */
3411
3412 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3413 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3414 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3415 else
3416 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3417 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3418 TREE_OPERAND (arg, 1));
3419
3420 case TRUTH_ANDIF_EXPR:
3421 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3422 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3423 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3424 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3425 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3426
3427 case TRUTH_ORIF_EXPR:
3428 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3429 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3430 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3431 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3432 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3433
3434 case TRUTH_NOT_EXPR:
3435 return TREE_OPERAND (arg, 0);
3436
3437 case COND_EXPR:
3438 {
3439 tree arg1 = TREE_OPERAND (arg, 1);
3440 tree arg2 = TREE_OPERAND (arg, 2);
3441
3442 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3443 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3444
3445 /* A COND_EXPR may have a throw as one operand, which
3446 then has void type. Just leave void operands
3447 as they are. */
3448 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3449 VOID_TYPE_P (TREE_TYPE (arg1))
3450 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3451 VOID_TYPE_P (TREE_TYPE (arg2))
3452 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3453 }
3454
3455 case COMPOUND_EXPR:
3456 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3457 return build2_loc (loc, COMPOUND_EXPR, type,
3458 TREE_OPERAND (arg, 0),
3459 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3460
3461 case NON_LVALUE_EXPR:
3462 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3463 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3464
3465 CASE_CONVERT:
3466 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3467 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3468
3469 /* ... fall through ... */
3470
3471 case FLOAT_EXPR:
3472 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3473 return build1_loc (loc, TREE_CODE (arg), type,
3474 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3475
3476 case BIT_AND_EXPR:
3477 if (!integer_onep (TREE_OPERAND (arg, 1)))
3478 return NULL_TREE;
3479 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3480
3481 case SAVE_EXPR:
3482 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3483
3484 case CLEANUP_POINT_EXPR:
3485 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3486 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3487 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3488
3489 default:
3490 return NULL_TREE;
3491 }
3492 }
3493
3494 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3495 assume that ARG is an operation that returns a truth value (0 or 1
3496 for scalars, 0 or -1 for vectors). Return the folded expression if
3497 folding is successful. Otherwise, return NULL_TREE. */
3498
3499 static tree
3500 fold_invert_truthvalue (location_t loc, tree arg)
3501 {
3502 tree type = TREE_TYPE (arg);
3503 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3504 ? BIT_NOT_EXPR
3505 : TRUTH_NOT_EXPR,
3506 type, arg);
3507 }
3508
3509 /* Return a simplified tree node for the truth-negation of ARG. This
3510 never alters ARG itself. We assume that ARG is an operation that
3511 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3512
3513 tree
3514 invert_truthvalue_loc (location_t loc, tree arg)
3515 {
3516 if (TREE_CODE (arg) == ERROR_MARK)
3517 return arg;
3518
3519 tree type = TREE_TYPE (arg);
3520 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3521 ? BIT_NOT_EXPR
3522 : TRUTH_NOT_EXPR,
3523 type, arg);
3524 }
3525
3526 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3527 operands are another bit-wise operation with a common input. If so,
3528 distribute the bit operations to save an operation and possibly two if
3529 constants are involved. For example, convert
3530 (A | B) & (A | C) into A | (B & C)
3531 Further simplification will occur if B and C are constants.
3532
3533 If this optimization cannot be done, 0 will be returned. */
3534
3535 static tree
3536 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3537 tree arg0, tree arg1)
3538 {
3539 tree common;
3540 tree left, right;
3541
3542 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3543 || TREE_CODE (arg0) == code
3544 || (TREE_CODE (arg0) != BIT_AND_EXPR
3545 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3546 return 0;
3547
3548 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3549 {
3550 common = TREE_OPERAND (arg0, 0);
3551 left = TREE_OPERAND (arg0, 1);
3552 right = TREE_OPERAND (arg1, 1);
3553 }
3554 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3555 {
3556 common = TREE_OPERAND (arg0, 0);
3557 left = TREE_OPERAND (arg0, 1);
3558 right = TREE_OPERAND (arg1, 0);
3559 }
3560 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3561 {
3562 common = TREE_OPERAND (arg0, 1);
3563 left = TREE_OPERAND (arg0, 0);
3564 right = TREE_OPERAND (arg1, 1);
3565 }
3566 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3567 {
3568 common = TREE_OPERAND (arg0, 1);
3569 left = TREE_OPERAND (arg0, 0);
3570 right = TREE_OPERAND (arg1, 0);
3571 }
3572 else
3573 return 0;
3574
3575 common = fold_convert_loc (loc, type, common);
3576 left = fold_convert_loc (loc, type, left);
3577 right = fold_convert_loc (loc, type, right);
3578 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3579 fold_build2_loc (loc, code, type, left, right));
3580 }
3581
3582 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3583 with code CODE. This optimization is unsafe. */
3584 static tree
3585 distribute_real_division (location_t loc, enum tree_code code, tree type,
3586 tree arg0, tree arg1)
3587 {
3588 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3589 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3590
3591 /* (A / C) +- (B / C) -> (A +- B) / C. */
3592 if (mul0 == mul1
3593 && operand_equal_p (TREE_OPERAND (arg0, 1),
3594 TREE_OPERAND (arg1, 1), 0))
3595 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3596 fold_build2_loc (loc, code, type,
3597 TREE_OPERAND (arg0, 0),
3598 TREE_OPERAND (arg1, 0)),
3599 TREE_OPERAND (arg0, 1));
3600
3601 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3602 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3603 TREE_OPERAND (arg1, 0), 0)
3604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3605 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3606 {
3607 REAL_VALUE_TYPE r0, r1;
3608 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3609 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3610 if (!mul0)
3611 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3612 if (!mul1)
3613 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3614 real_arithmetic (&r0, code, &r0, &r1);
3615 return fold_build2_loc (loc, MULT_EXPR, type,
3616 TREE_OPERAND (arg0, 0),
3617 build_real (type, r0));
3618 }
3619
3620 return NULL_TREE;
3621 }
3622 \f
3623 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3624 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3625
3626 static tree
3627 make_bit_field_ref (location_t loc, tree inner, tree type,
3628 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3629 {
3630 tree result, bftype;
3631
3632 if (bitpos == 0)
3633 {
3634 tree size = TYPE_SIZE (TREE_TYPE (inner));
3635 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3636 || POINTER_TYPE_P (TREE_TYPE (inner)))
3637 && tree_fits_shwi_p (size)
3638 && tree_to_shwi (size) == bitsize)
3639 return fold_convert_loc (loc, type, inner);
3640 }
3641
3642 bftype = type;
3643 if (TYPE_PRECISION (bftype) != bitsize
3644 || TYPE_UNSIGNED (bftype) == !unsignedp)
3645 bftype = build_nonstandard_integer_type (bitsize, 0);
3646
3647 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3648 size_int (bitsize), bitsize_int (bitpos));
3649
3650 if (bftype != type)
3651 result = fold_convert_loc (loc, type, result);
3652
3653 return result;
3654 }
3655
3656 /* Optimize a bit-field compare.
3657
3658 There are two cases: First is a compare against a constant and the
3659 second is a comparison of two items where the fields are at the same
3660 bit position relative to the start of a chunk (byte, halfword, word)
3661 large enough to contain it. In these cases we can avoid the shift
3662 implicit in bitfield extractions.
3663
3664 For constants, we emit a compare of the shifted constant with the
3665 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3666 compared. For two fields at the same position, we do the ANDs with the
3667 similar mask and compare the result of the ANDs.
3668
3669 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3670 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3671 are the left and right operands of the comparison, respectively.
3672
3673 If the optimization described above can be done, we return the resulting
3674 tree. Otherwise we return zero. */
3675
3676 static tree
3677 optimize_bit_field_compare (location_t loc, enum tree_code code,
3678 tree compare_type, tree lhs, tree rhs)
3679 {
3680 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3681 tree type = TREE_TYPE (lhs);
3682 tree unsigned_type;
3683 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3684 machine_mode lmode, rmode, nmode;
3685 int lunsignedp, runsignedp;
3686 int lvolatilep = 0, rvolatilep = 0;
3687 tree linner, rinner = NULL_TREE;
3688 tree mask;
3689 tree offset;
3690
3691 /* Get all the information about the extractions being done. If the bit size
3692 if the same as the size of the underlying object, we aren't doing an
3693 extraction at all and so can do nothing. We also don't want to
3694 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3695 then will no longer be able to replace it. */
3696 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3697 &lunsignedp, &lvolatilep, false);
3698 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3699 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3700 return 0;
3701
3702 if (!const_p)
3703 {
3704 /* If this is not a constant, we can only do something if bit positions,
3705 sizes, and signedness are the same. */
3706 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3707 &runsignedp, &rvolatilep, false);
3708
3709 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3710 || lunsignedp != runsignedp || offset != 0
3711 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3712 return 0;
3713 }
3714
3715 /* See if we can find a mode to refer to this field. We should be able to,
3716 but fail if we can't. */
3717 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3718 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3719 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3720 TYPE_ALIGN (TREE_TYPE (rinner))),
3721 word_mode, false);
3722 if (nmode == VOIDmode)
3723 return 0;
3724
3725 /* Set signed and unsigned types of the precision of this mode for the
3726 shifts below. */
3727 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3728
3729 /* Compute the bit position and size for the new reference and our offset
3730 within it. If the new reference is the same size as the original, we
3731 won't optimize anything, so return zero. */
3732 nbitsize = GET_MODE_BITSIZE (nmode);
3733 nbitpos = lbitpos & ~ (nbitsize - 1);
3734 lbitpos -= nbitpos;
3735 if (nbitsize == lbitsize)
3736 return 0;
3737
3738 if (BYTES_BIG_ENDIAN)
3739 lbitpos = nbitsize - lbitsize - lbitpos;
3740
3741 /* Make the mask to be used against the extracted field. */
3742 mask = build_int_cst_type (unsigned_type, -1);
3743 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3744 mask = const_binop (RSHIFT_EXPR, mask,
3745 size_int (nbitsize - lbitsize - lbitpos));
3746
3747 if (! const_p)
3748 /* If not comparing with constant, just rework the comparison
3749 and return. */
3750 return fold_build2_loc (loc, code, compare_type,
3751 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3752 make_bit_field_ref (loc, linner,
3753 unsigned_type,
3754 nbitsize, nbitpos,
3755 1),
3756 mask),
3757 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3758 make_bit_field_ref (loc, rinner,
3759 unsigned_type,
3760 nbitsize, nbitpos,
3761 1),
3762 mask));
3763
3764 /* Otherwise, we are handling the constant case. See if the constant is too
3765 big for the field. Warn and return a tree of for 0 (false) if so. We do
3766 this not only for its own sake, but to avoid having to test for this
3767 error case below. If we didn't, we might generate wrong code.
3768
3769 For unsigned fields, the constant shifted right by the field length should
3770 be all zero. For signed fields, the high-order bits should agree with
3771 the sign bit. */
3772
3773 if (lunsignedp)
3774 {
3775 if (wi::lrshift (rhs, lbitsize) != 0)
3776 {
3777 warning (0, "comparison is always %d due to width of bit-field",
3778 code == NE_EXPR);
3779 return constant_boolean_node (code == NE_EXPR, compare_type);
3780 }
3781 }
3782 else
3783 {
3784 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3785 if (tem != 0 && tem != -1)
3786 {
3787 warning (0, "comparison is always %d due to width of bit-field",
3788 code == NE_EXPR);
3789 return constant_boolean_node (code == NE_EXPR, compare_type);
3790 }
3791 }
3792
3793 /* Single-bit compares should always be against zero. */
3794 if (lbitsize == 1 && ! integer_zerop (rhs))
3795 {
3796 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3797 rhs = build_int_cst (type, 0);
3798 }
3799
3800 /* Make a new bitfield reference, shift the constant over the
3801 appropriate number of bits and mask it with the computed mask
3802 (in case this was a signed field). If we changed it, make a new one. */
3803 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3804
3805 rhs = const_binop (BIT_AND_EXPR,
3806 const_binop (LSHIFT_EXPR,
3807 fold_convert_loc (loc, unsigned_type, rhs),
3808 size_int (lbitpos)),
3809 mask);
3810
3811 lhs = build2_loc (loc, code, compare_type,
3812 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3813 return lhs;
3814 }
3815 \f
3816 /* Subroutine for fold_truth_andor_1: decode a field reference.
3817
3818 If EXP is a comparison reference, we return the innermost reference.
3819
3820 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3821 set to the starting bit number.
3822
3823 If the innermost field can be completely contained in a mode-sized
3824 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3825
3826 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3827 otherwise it is not changed.
3828
3829 *PUNSIGNEDP is set to the signedness of the field.
3830
3831 *PMASK is set to the mask used. This is either contained in a
3832 BIT_AND_EXPR or derived from the width of the field.
3833
3834 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3835
3836 Return 0 if this is not a component reference or is one that we can't
3837 do anything with. */
3838
3839 static tree
3840 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3841 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3842 int *punsignedp, int *pvolatilep,
3843 tree *pmask, tree *pand_mask)
3844 {
3845 tree outer_type = 0;
3846 tree and_mask = 0;
3847 tree mask, inner, offset;
3848 tree unsigned_type;
3849 unsigned int precision;
3850
3851 /* All the optimizations using this function assume integer fields.
3852 There are problems with FP fields since the type_for_size call
3853 below can fail for, e.g., XFmode. */
3854 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3855 return 0;
3856
3857 /* We are interested in the bare arrangement of bits, so strip everything
3858 that doesn't affect the machine mode. However, record the type of the
3859 outermost expression if it may matter below. */
3860 if (CONVERT_EXPR_P (exp)
3861 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3862 outer_type = TREE_TYPE (exp);
3863 STRIP_NOPS (exp);
3864
3865 if (TREE_CODE (exp) == BIT_AND_EXPR)
3866 {
3867 and_mask = TREE_OPERAND (exp, 1);
3868 exp = TREE_OPERAND (exp, 0);
3869 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3870 if (TREE_CODE (and_mask) != INTEGER_CST)
3871 return 0;
3872 }
3873
3874 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3875 punsignedp, pvolatilep, false);
3876 if ((inner == exp && and_mask == 0)
3877 || *pbitsize < 0 || offset != 0
3878 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3879 return 0;
3880
3881 /* If the number of bits in the reference is the same as the bitsize of
3882 the outer type, then the outer type gives the signedness. Otherwise
3883 (in case of a small bitfield) the signedness is unchanged. */
3884 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3885 *punsignedp = TYPE_UNSIGNED (outer_type);
3886
3887 /* Compute the mask to access the bitfield. */
3888 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3889 precision = TYPE_PRECISION (unsigned_type);
3890
3891 mask = build_int_cst_type (unsigned_type, -1);
3892
3893 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3894 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3895
3896 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3897 if (and_mask != 0)
3898 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3899 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3900
3901 *pmask = mask;
3902 *pand_mask = and_mask;
3903 return inner;
3904 }
3905
3906 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3907 bit positions and MASK is SIGNED. */
3908
3909 static int
3910 all_ones_mask_p (const_tree mask, unsigned int size)
3911 {
3912 tree type = TREE_TYPE (mask);
3913 unsigned int precision = TYPE_PRECISION (type);
3914
3915 /* If this function returns true when the type of the mask is
3916 UNSIGNED, then there will be errors. In particular see
3917 gcc.c-torture/execute/990326-1.c. There does not appear to be
3918 any documentation paper trail as to why this is so. But the pre
3919 wide-int worked with that restriction and it has been preserved
3920 here. */
3921 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3922 return false;
3923
3924 return wi::mask (size, false, precision) == mask;
3925 }
3926
3927 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3928 represents the sign bit of EXP's type. If EXP represents a sign
3929 or zero extension, also test VAL against the unextended type.
3930 The return value is the (sub)expression whose sign bit is VAL,
3931 or NULL_TREE otherwise. */
3932
3933 tree
3934 sign_bit_p (tree exp, const_tree val)
3935 {
3936 int width;
3937 tree t;
3938
3939 /* Tree EXP must have an integral type. */
3940 t = TREE_TYPE (exp);
3941 if (! INTEGRAL_TYPE_P (t))
3942 return NULL_TREE;
3943
3944 /* Tree VAL must be an integer constant. */
3945 if (TREE_CODE (val) != INTEGER_CST
3946 || TREE_OVERFLOW (val))
3947 return NULL_TREE;
3948
3949 width = TYPE_PRECISION (t);
3950 if (wi::only_sign_bit_p (val, width))
3951 return exp;
3952
3953 /* Handle extension from a narrower type. */
3954 if (TREE_CODE (exp) == NOP_EXPR
3955 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3956 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3957
3958 return NULL_TREE;
3959 }
3960
3961 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3962 to be evaluated unconditionally. */
3963
3964 static int
3965 simple_operand_p (const_tree exp)
3966 {
3967 /* Strip any conversions that don't change the machine mode. */
3968 STRIP_NOPS (exp);
3969
3970 return (CONSTANT_CLASS_P (exp)
3971 || TREE_CODE (exp) == SSA_NAME
3972 || (DECL_P (exp)
3973 && ! TREE_ADDRESSABLE (exp)
3974 && ! TREE_THIS_VOLATILE (exp)
3975 && ! DECL_NONLOCAL (exp)
3976 /* Don't regard global variables as simple. They may be
3977 allocated in ways unknown to the compiler (shared memory,
3978 #pragma weak, etc). */
3979 && ! TREE_PUBLIC (exp)
3980 && ! DECL_EXTERNAL (exp)
3981 /* Weakrefs are not safe to be read, since they can be NULL.
3982 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3983 have DECL_WEAK flag set. */
3984 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3985 /* Loading a static variable is unduly expensive, but global
3986 registers aren't expensive. */
3987 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3988 }
3989
3990 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3991 to be evaluated unconditionally.
3992 I addition to simple_operand_p, we assume that comparisons, conversions,
3993 and logic-not operations are simple, if their operands are simple, too. */
3994
3995 static bool
3996 simple_operand_p_2 (tree exp)
3997 {
3998 enum tree_code code;
3999
4000 if (TREE_SIDE_EFFECTS (exp)
4001 || tree_could_trap_p (exp))
4002 return false;
4003
4004 while (CONVERT_EXPR_P (exp))
4005 exp = TREE_OPERAND (exp, 0);
4006
4007 code = TREE_CODE (exp);
4008
4009 if (TREE_CODE_CLASS (code) == tcc_comparison)
4010 return (simple_operand_p (TREE_OPERAND (exp, 0))
4011 && simple_operand_p (TREE_OPERAND (exp, 1)));
4012
4013 if (code == TRUTH_NOT_EXPR)
4014 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4015
4016 return simple_operand_p (exp);
4017 }
4018
4019 \f
4020 /* The following functions are subroutines to fold_range_test and allow it to
4021 try to change a logical combination of comparisons into a range test.
4022
4023 For example, both
4024 X == 2 || X == 3 || X == 4 || X == 5
4025 and
4026 X >= 2 && X <= 5
4027 are converted to
4028 (unsigned) (X - 2) <= 3
4029
4030 We describe each set of comparisons as being either inside or outside
4031 a range, using a variable named like IN_P, and then describe the
4032 range with a lower and upper bound. If one of the bounds is omitted,
4033 it represents either the highest or lowest value of the type.
4034
4035 In the comments below, we represent a range by two numbers in brackets
4036 preceded by a "+" to designate being inside that range, or a "-" to
4037 designate being outside that range, so the condition can be inverted by
4038 flipping the prefix. An omitted bound is represented by a "-". For
4039 example, "- [-, 10]" means being outside the range starting at the lowest
4040 possible value and ending at 10, in other words, being greater than 10.
4041 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4042 always false.
4043
4044 We set up things so that the missing bounds are handled in a consistent
4045 manner so neither a missing bound nor "true" and "false" need to be
4046 handled using a special case. */
4047
4048 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4049 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4050 and UPPER1_P are nonzero if the respective argument is an upper bound
4051 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4052 must be specified for a comparison. ARG1 will be converted to ARG0's
4053 type if both are specified. */
4054
4055 static tree
4056 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4057 tree arg1, int upper1_p)
4058 {
4059 tree tem;
4060 int result;
4061 int sgn0, sgn1;
4062
4063 /* If neither arg represents infinity, do the normal operation.
4064 Else, if not a comparison, return infinity. Else handle the special
4065 comparison rules. Note that most of the cases below won't occur, but
4066 are handled for consistency. */
4067
4068 if (arg0 != 0 && arg1 != 0)
4069 {
4070 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4071 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4072 STRIP_NOPS (tem);
4073 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4074 }
4075
4076 if (TREE_CODE_CLASS (code) != tcc_comparison)
4077 return 0;
4078
4079 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4080 for neither. In real maths, we cannot assume open ended ranges are
4081 the same. But, this is computer arithmetic, where numbers are finite.
4082 We can therefore make the transformation of any unbounded range with
4083 the value Z, Z being greater than any representable number. This permits
4084 us to treat unbounded ranges as equal. */
4085 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4086 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4087 switch (code)
4088 {
4089 case EQ_EXPR:
4090 result = sgn0 == sgn1;
4091 break;
4092 case NE_EXPR:
4093 result = sgn0 != sgn1;
4094 break;
4095 case LT_EXPR:
4096 result = sgn0 < sgn1;
4097 break;
4098 case LE_EXPR:
4099 result = sgn0 <= sgn1;
4100 break;
4101 case GT_EXPR:
4102 result = sgn0 > sgn1;
4103 break;
4104 case GE_EXPR:
4105 result = sgn0 >= sgn1;
4106 break;
4107 default:
4108 gcc_unreachable ();
4109 }
4110
4111 return constant_boolean_node (result, type);
4112 }
4113 \f
4114 /* Helper routine for make_range. Perform one step for it, return
4115 new expression if the loop should continue or NULL_TREE if it should
4116 stop. */
4117
4118 tree
4119 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4120 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4121 bool *strict_overflow_p)
4122 {
4123 tree arg0_type = TREE_TYPE (arg0);
4124 tree n_low, n_high, low = *p_low, high = *p_high;
4125 int in_p = *p_in_p, n_in_p;
4126
4127 switch (code)
4128 {
4129 case TRUTH_NOT_EXPR:
4130 /* We can only do something if the range is testing for zero. */
4131 if (low == NULL_TREE || high == NULL_TREE
4132 || ! integer_zerop (low) || ! integer_zerop (high))
4133 return NULL_TREE;
4134 *p_in_p = ! in_p;
4135 return arg0;
4136
4137 case EQ_EXPR: case NE_EXPR:
4138 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4139 /* We can only do something if the range is testing for zero
4140 and if the second operand is an integer constant. Note that
4141 saying something is "in" the range we make is done by
4142 complementing IN_P since it will set in the initial case of
4143 being not equal to zero; "out" is leaving it alone. */
4144 if (low == NULL_TREE || high == NULL_TREE
4145 || ! integer_zerop (low) || ! integer_zerop (high)
4146 || TREE_CODE (arg1) != INTEGER_CST)
4147 return NULL_TREE;
4148
4149 switch (code)
4150 {
4151 case NE_EXPR: /* - [c, c] */
4152 low = high = arg1;
4153 break;
4154 case EQ_EXPR: /* + [c, c] */
4155 in_p = ! in_p, low = high = arg1;
4156 break;
4157 case GT_EXPR: /* - [-, c] */
4158 low = 0, high = arg1;
4159 break;
4160 case GE_EXPR: /* + [c, -] */
4161 in_p = ! in_p, low = arg1, high = 0;
4162 break;
4163 case LT_EXPR: /* - [c, -] */
4164 low = arg1, high = 0;
4165 break;
4166 case LE_EXPR: /* + [-, c] */
4167 in_p = ! in_p, low = 0, high = arg1;
4168 break;
4169 default:
4170 gcc_unreachable ();
4171 }
4172
4173 /* If this is an unsigned comparison, we also know that EXP is
4174 greater than or equal to zero. We base the range tests we make
4175 on that fact, so we record it here so we can parse existing
4176 range tests. We test arg0_type since often the return type
4177 of, e.g. EQ_EXPR, is boolean. */
4178 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4179 {
4180 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4181 in_p, low, high, 1,
4182 build_int_cst (arg0_type, 0),
4183 NULL_TREE))
4184 return NULL_TREE;
4185
4186 in_p = n_in_p, low = n_low, high = n_high;
4187
4188 /* If the high bound is missing, but we have a nonzero low
4189 bound, reverse the range so it goes from zero to the low bound
4190 minus 1. */
4191 if (high == 0 && low && ! integer_zerop (low))
4192 {
4193 in_p = ! in_p;
4194 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4195 build_int_cst (TREE_TYPE (low), 1), 0);
4196 low = build_int_cst (arg0_type, 0);
4197 }
4198 }
4199
4200 *p_low = low;
4201 *p_high = high;
4202 *p_in_p = in_p;
4203 return arg0;
4204
4205 case NEGATE_EXPR:
4206 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4207 low and high are non-NULL, then normalize will DTRT. */
4208 if (!TYPE_UNSIGNED (arg0_type)
4209 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4210 {
4211 if (low == NULL_TREE)
4212 low = TYPE_MIN_VALUE (arg0_type);
4213 if (high == NULL_TREE)
4214 high = TYPE_MAX_VALUE (arg0_type);
4215 }
4216
4217 /* (-x) IN [a,b] -> x in [-b, -a] */
4218 n_low = range_binop (MINUS_EXPR, exp_type,
4219 build_int_cst (exp_type, 0),
4220 0, high, 1);
4221 n_high = range_binop (MINUS_EXPR, exp_type,
4222 build_int_cst (exp_type, 0),
4223 0, low, 0);
4224 if (n_high != 0 && TREE_OVERFLOW (n_high))
4225 return NULL_TREE;
4226 goto normalize;
4227
4228 case BIT_NOT_EXPR:
4229 /* ~ X -> -X - 1 */
4230 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4231 build_int_cst (exp_type, 1));
4232
4233 case PLUS_EXPR:
4234 case MINUS_EXPR:
4235 if (TREE_CODE (arg1) != INTEGER_CST)
4236 return NULL_TREE;
4237
4238 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4239 move a constant to the other side. */
4240 if (!TYPE_UNSIGNED (arg0_type)
4241 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4242 return NULL_TREE;
4243
4244 /* If EXP is signed, any overflow in the computation is undefined,
4245 so we don't worry about it so long as our computations on
4246 the bounds don't overflow. For unsigned, overflow is defined
4247 and this is exactly the right thing. */
4248 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4249 arg0_type, low, 0, arg1, 0);
4250 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4251 arg0_type, high, 1, arg1, 0);
4252 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4253 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4254 return NULL_TREE;
4255
4256 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4257 *strict_overflow_p = true;
4258
4259 normalize:
4260 /* Check for an unsigned range which has wrapped around the maximum
4261 value thus making n_high < n_low, and normalize it. */
4262 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4263 {
4264 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4265 build_int_cst (TREE_TYPE (n_high), 1), 0);
4266 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4267 build_int_cst (TREE_TYPE (n_low), 1), 0);
4268
4269 /* If the range is of the form +/- [ x+1, x ], we won't
4270 be able to normalize it. But then, it represents the
4271 whole range or the empty set, so make it
4272 +/- [ -, - ]. */
4273 if (tree_int_cst_equal (n_low, low)
4274 && tree_int_cst_equal (n_high, high))
4275 low = high = 0;
4276 else
4277 in_p = ! in_p;
4278 }
4279 else
4280 low = n_low, high = n_high;
4281
4282 *p_low = low;
4283 *p_high = high;
4284 *p_in_p = in_p;
4285 return arg0;
4286
4287 CASE_CONVERT:
4288 case NON_LVALUE_EXPR:
4289 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4290 return NULL_TREE;
4291
4292 if (! INTEGRAL_TYPE_P (arg0_type)
4293 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4294 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4295 return NULL_TREE;
4296
4297 n_low = low, n_high = high;
4298
4299 if (n_low != 0)
4300 n_low = fold_convert_loc (loc, arg0_type, n_low);
4301
4302 if (n_high != 0)
4303 n_high = fold_convert_loc (loc, arg0_type, n_high);
4304
4305 /* If we're converting arg0 from an unsigned type, to exp,
4306 a signed type, we will be doing the comparison as unsigned.
4307 The tests above have already verified that LOW and HIGH
4308 are both positive.
4309
4310 So we have to ensure that we will handle large unsigned
4311 values the same way that the current signed bounds treat
4312 negative values. */
4313
4314 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4315 {
4316 tree high_positive;
4317 tree equiv_type;
4318 /* For fixed-point modes, we need to pass the saturating flag
4319 as the 2nd parameter. */
4320 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4321 equiv_type
4322 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4323 TYPE_SATURATING (arg0_type));
4324 else
4325 equiv_type
4326 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4327
4328 /* A range without an upper bound is, naturally, unbounded.
4329 Since convert would have cropped a very large value, use
4330 the max value for the destination type. */
4331 high_positive
4332 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4333 : TYPE_MAX_VALUE (arg0_type);
4334
4335 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4336 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4337 fold_convert_loc (loc, arg0_type,
4338 high_positive),
4339 build_int_cst (arg0_type, 1));
4340
4341 /* If the low bound is specified, "and" the range with the
4342 range for which the original unsigned value will be
4343 positive. */
4344 if (low != 0)
4345 {
4346 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4347 1, fold_convert_loc (loc, arg0_type,
4348 integer_zero_node),
4349 high_positive))
4350 return NULL_TREE;
4351
4352 in_p = (n_in_p == in_p);
4353 }
4354 else
4355 {
4356 /* Otherwise, "or" the range with the range of the input
4357 that will be interpreted as negative. */
4358 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4359 1, fold_convert_loc (loc, arg0_type,
4360 integer_zero_node),
4361 high_positive))
4362 return NULL_TREE;
4363
4364 in_p = (in_p != n_in_p);
4365 }
4366 }
4367
4368 *p_low = n_low;
4369 *p_high = n_high;
4370 *p_in_p = in_p;
4371 return arg0;
4372
4373 default:
4374 return NULL_TREE;
4375 }
4376 }
4377
4378 /* Given EXP, a logical expression, set the range it is testing into
4379 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4380 actually being tested. *PLOW and *PHIGH will be made of the same
4381 type as the returned expression. If EXP is not a comparison, we
4382 will most likely not be returning a useful value and range. Set
4383 *STRICT_OVERFLOW_P to true if the return value is only valid
4384 because signed overflow is undefined; otherwise, do not change
4385 *STRICT_OVERFLOW_P. */
4386
4387 tree
4388 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4389 bool *strict_overflow_p)
4390 {
4391 enum tree_code code;
4392 tree arg0, arg1 = NULL_TREE;
4393 tree exp_type, nexp;
4394 int in_p;
4395 tree low, high;
4396 location_t loc = EXPR_LOCATION (exp);
4397
4398 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4399 and see if we can refine the range. Some of the cases below may not
4400 happen, but it doesn't seem worth worrying about this. We "continue"
4401 the outer loop when we've changed something; otherwise we "break"
4402 the switch, which will "break" the while. */
4403
4404 in_p = 0;
4405 low = high = build_int_cst (TREE_TYPE (exp), 0);
4406
4407 while (1)
4408 {
4409 code = TREE_CODE (exp);
4410 exp_type = TREE_TYPE (exp);
4411 arg0 = NULL_TREE;
4412
4413 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4414 {
4415 if (TREE_OPERAND_LENGTH (exp) > 0)
4416 arg0 = TREE_OPERAND (exp, 0);
4417 if (TREE_CODE_CLASS (code) == tcc_binary
4418 || TREE_CODE_CLASS (code) == tcc_comparison
4419 || (TREE_CODE_CLASS (code) == tcc_expression
4420 && TREE_OPERAND_LENGTH (exp) > 1))
4421 arg1 = TREE_OPERAND (exp, 1);
4422 }
4423 if (arg0 == NULL_TREE)
4424 break;
4425
4426 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4427 &high, &in_p, strict_overflow_p);
4428 if (nexp == NULL_TREE)
4429 break;
4430 exp = nexp;
4431 }
4432
4433 /* If EXP is a constant, we can evaluate whether this is true or false. */
4434 if (TREE_CODE (exp) == INTEGER_CST)
4435 {
4436 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4437 exp, 0, low, 0))
4438 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4439 exp, 1, high, 1)));
4440 low = high = 0;
4441 exp = 0;
4442 }
4443
4444 *pin_p = in_p, *plow = low, *phigh = high;
4445 return exp;
4446 }
4447 \f
4448 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4449 type, TYPE, return an expression to test if EXP is in (or out of, depending
4450 on IN_P) the range. Return 0 if the test couldn't be created. */
4451
4452 tree
4453 build_range_check (location_t loc, tree type, tree exp, int in_p,
4454 tree low, tree high)
4455 {
4456 tree etype = TREE_TYPE (exp), value;
4457
4458 #ifdef HAVE_canonicalize_funcptr_for_compare
4459 /* Disable this optimization for function pointer expressions
4460 on targets that require function pointer canonicalization. */
4461 if (HAVE_canonicalize_funcptr_for_compare
4462 && TREE_CODE (etype) == POINTER_TYPE
4463 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4464 return NULL_TREE;
4465 #endif
4466
4467 if (! in_p)
4468 {
4469 value = build_range_check (loc, type, exp, 1, low, high);
4470 if (value != 0)
4471 return invert_truthvalue_loc (loc, value);
4472
4473 return 0;
4474 }
4475
4476 if (low == 0 && high == 0)
4477 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4478
4479 if (low == 0)
4480 return fold_build2_loc (loc, LE_EXPR, type, exp,
4481 fold_convert_loc (loc, etype, high));
4482
4483 if (high == 0)
4484 return fold_build2_loc (loc, GE_EXPR, type, exp,
4485 fold_convert_loc (loc, etype, low));
4486
4487 if (operand_equal_p (low, high, 0))
4488 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4489 fold_convert_loc (loc, etype, low));
4490
4491 if (integer_zerop (low))
4492 {
4493 if (! TYPE_UNSIGNED (etype))
4494 {
4495 etype = unsigned_type_for (etype);
4496 high = fold_convert_loc (loc, etype, high);
4497 exp = fold_convert_loc (loc, etype, exp);
4498 }
4499 return build_range_check (loc, type, exp, 1, 0, high);
4500 }
4501
4502 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4503 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4504 {
4505 int prec = TYPE_PRECISION (etype);
4506
4507 if (wi::mask (prec - 1, false, prec) == high)
4508 {
4509 if (TYPE_UNSIGNED (etype))
4510 {
4511 tree signed_etype = signed_type_for (etype);
4512 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4513 etype
4514 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4515 else
4516 etype = signed_etype;
4517 exp = fold_convert_loc (loc, etype, exp);
4518 }
4519 return fold_build2_loc (loc, GT_EXPR, type, exp,
4520 build_int_cst (etype, 0));
4521 }
4522 }
4523
4524 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4525 This requires wrap-around arithmetics for the type of the expression.
4526 First make sure that arithmetics in this type is valid, then make sure
4527 that it wraps around. */
4528 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4529 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4530 TYPE_UNSIGNED (etype));
4531
4532 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4533 {
4534 tree utype, minv, maxv;
4535
4536 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4537 for the type in question, as we rely on this here. */
4538 utype = unsigned_type_for (etype);
4539 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4540 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4541 build_int_cst (TREE_TYPE (maxv), 1), 1);
4542 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4543
4544 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4545 minv, 1, maxv, 1)))
4546 etype = utype;
4547 else
4548 return 0;
4549 }
4550
4551 high = fold_convert_loc (loc, etype, high);
4552 low = fold_convert_loc (loc, etype, low);
4553 exp = fold_convert_loc (loc, etype, exp);
4554
4555 value = const_binop (MINUS_EXPR, high, low);
4556
4557
4558 if (POINTER_TYPE_P (etype))
4559 {
4560 if (value != 0 && !TREE_OVERFLOW (value))
4561 {
4562 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4563 return build_range_check (loc, type,
4564 fold_build_pointer_plus_loc (loc, exp, low),
4565 1, build_int_cst (etype, 0), value);
4566 }
4567 return 0;
4568 }
4569
4570 if (value != 0 && !TREE_OVERFLOW (value))
4571 return build_range_check (loc, type,
4572 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4573 1, build_int_cst (etype, 0), value);
4574
4575 return 0;
4576 }
4577 \f
4578 /* Return the predecessor of VAL in its type, handling the infinite case. */
4579
4580 static tree
4581 range_predecessor (tree val)
4582 {
4583 tree type = TREE_TYPE (val);
4584
4585 if (INTEGRAL_TYPE_P (type)
4586 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4587 return 0;
4588 else
4589 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4590 build_int_cst (TREE_TYPE (val), 1), 0);
4591 }
4592
4593 /* Return the successor of VAL in its type, handling the infinite case. */
4594
4595 static tree
4596 range_successor (tree val)
4597 {
4598 tree type = TREE_TYPE (val);
4599
4600 if (INTEGRAL_TYPE_P (type)
4601 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4602 return 0;
4603 else
4604 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4605 build_int_cst (TREE_TYPE (val), 1), 0);
4606 }
4607
4608 /* Given two ranges, see if we can merge them into one. Return 1 if we
4609 can, 0 if we can't. Set the output range into the specified parameters. */
4610
4611 bool
4612 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4613 tree high0, int in1_p, tree low1, tree high1)
4614 {
4615 int no_overlap;
4616 int subset;
4617 int temp;
4618 tree tem;
4619 int in_p;
4620 tree low, high;
4621 int lowequal = ((low0 == 0 && low1 == 0)
4622 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4623 low0, 0, low1, 0)));
4624 int highequal = ((high0 == 0 && high1 == 0)
4625 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4626 high0, 1, high1, 1)));
4627
4628 /* Make range 0 be the range that starts first, or ends last if they
4629 start at the same value. Swap them if it isn't. */
4630 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4631 low0, 0, low1, 0))
4632 || (lowequal
4633 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4634 high1, 1, high0, 1))))
4635 {
4636 temp = in0_p, in0_p = in1_p, in1_p = temp;
4637 tem = low0, low0 = low1, low1 = tem;
4638 tem = high0, high0 = high1, high1 = tem;
4639 }
4640
4641 /* Now flag two cases, whether the ranges are disjoint or whether the
4642 second range is totally subsumed in the first. Note that the tests
4643 below are simplified by the ones above. */
4644 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4645 high0, 1, low1, 0));
4646 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4647 high1, 1, high0, 1));
4648
4649 /* We now have four cases, depending on whether we are including or
4650 excluding the two ranges. */
4651 if (in0_p && in1_p)
4652 {
4653 /* If they don't overlap, the result is false. If the second range
4654 is a subset it is the result. Otherwise, the range is from the start
4655 of the second to the end of the first. */
4656 if (no_overlap)
4657 in_p = 0, low = high = 0;
4658 else if (subset)
4659 in_p = 1, low = low1, high = high1;
4660 else
4661 in_p = 1, low = low1, high = high0;
4662 }
4663
4664 else if (in0_p && ! in1_p)
4665 {
4666 /* If they don't overlap, the result is the first range. If they are
4667 equal, the result is false. If the second range is a subset of the
4668 first, and the ranges begin at the same place, we go from just after
4669 the end of the second range to the end of the first. If the second
4670 range is not a subset of the first, or if it is a subset and both
4671 ranges end at the same place, the range starts at the start of the
4672 first range and ends just before the second range.
4673 Otherwise, we can't describe this as a single range. */
4674 if (no_overlap)
4675 in_p = 1, low = low0, high = high0;
4676 else if (lowequal && highequal)
4677 in_p = 0, low = high = 0;
4678 else if (subset && lowequal)
4679 {
4680 low = range_successor (high1);
4681 high = high0;
4682 in_p = 1;
4683 if (low == 0)
4684 {
4685 /* We are in the weird situation where high0 > high1 but
4686 high1 has no successor. Punt. */
4687 return 0;
4688 }
4689 }
4690 else if (! subset || highequal)
4691 {
4692 low = low0;
4693 high = range_predecessor (low1);
4694 in_p = 1;
4695 if (high == 0)
4696 {
4697 /* low0 < low1 but low1 has no predecessor. Punt. */
4698 return 0;
4699 }
4700 }
4701 else
4702 return 0;
4703 }
4704
4705 else if (! in0_p && in1_p)
4706 {
4707 /* If they don't overlap, the result is the second range. If the second
4708 is a subset of the first, the result is false. Otherwise,
4709 the range starts just after the first range and ends at the
4710 end of the second. */
4711 if (no_overlap)
4712 in_p = 1, low = low1, high = high1;
4713 else if (subset || highequal)
4714 in_p = 0, low = high = 0;
4715 else
4716 {
4717 low = range_successor (high0);
4718 high = high1;
4719 in_p = 1;
4720 if (low == 0)
4721 {
4722 /* high1 > high0 but high0 has no successor. Punt. */
4723 return 0;
4724 }
4725 }
4726 }
4727
4728 else
4729 {
4730 /* The case where we are excluding both ranges. Here the complex case
4731 is if they don't overlap. In that case, the only time we have a
4732 range is if they are adjacent. If the second is a subset of the
4733 first, the result is the first. Otherwise, the range to exclude
4734 starts at the beginning of the first range and ends at the end of the
4735 second. */
4736 if (no_overlap)
4737 {
4738 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4739 range_successor (high0),
4740 1, low1, 0)))
4741 in_p = 0, low = low0, high = high1;
4742 else
4743 {
4744 /* Canonicalize - [min, x] into - [-, x]. */
4745 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4746 switch (TREE_CODE (TREE_TYPE (low0)))
4747 {
4748 case ENUMERAL_TYPE:
4749 if (TYPE_PRECISION (TREE_TYPE (low0))
4750 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4751 break;
4752 /* FALLTHROUGH */
4753 case INTEGER_TYPE:
4754 if (tree_int_cst_equal (low0,
4755 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4756 low0 = 0;
4757 break;
4758 case POINTER_TYPE:
4759 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4760 && integer_zerop (low0))
4761 low0 = 0;
4762 break;
4763 default:
4764 break;
4765 }
4766
4767 /* Canonicalize - [x, max] into - [x, -]. */
4768 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4769 switch (TREE_CODE (TREE_TYPE (high1)))
4770 {
4771 case ENUMERAL_TYPE:
4772 if (TYPE_PRECISION (TREE_TYPE (high1))
4773 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4774 break;
4775 /* FALLTHROUGH */
4776 case INTEGER_TYPE:
4777 if (tree_int_cst_equal (high1,
4778 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4779 high1 = 0;
4780 break;
4781 case POINTER_TYPE:
4782 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4783 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4784 high1, 1,
4785 build_int_cst (TREE_TYPE (high1), 1),
4786 1)))
4787 high1 = 0;
4788 break;
4789 default:
4790 break;
4791 }
4792
4793 /* The ranges might be also adjacent between the maximum and
4794 minimum values of the given type. For
4795 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4796 return + [x + 1, y - 1]. */
4797 if (low0 == 0 && high1 == 0)
4798 {
4799 low = range_successor (high0);
4800 high = range_predecessor (low1);
4801 if (low == 0 || high == 0)
4802 return 0;
4803
4804 in_p = 1;
4805 }
4806 else
4807 return 0;
4808 }
4809 }
4810 else if (subset)
4811 in_p = 0, low = low0, high = high0;
4812 else
4813 in_p = 0, low = low0, high = high1;
4814 }
4815
4816 *pin_p = in_p, *plow = low, *phigh = high;
4817 return 1;
4818 }
4819 \f
4820
4821 /* Subroutine of fold, looking inside expressions of the form
4822 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4823 of the COND_EXPR. This function is being used also to optimize
4824 A op B ? C : A, by reversing the comparison first.
4825
4826 Return a folded expression whose code is not a COND_EXPR
4827 anymore, or NULL_TREE if no folding opportunity is found. */
4828
4829 static tree
4830 fold_cond_expr_with_comparison (location_t loc, tree type,
4831 tree arg0, tree arg1, tree arg2)
4832 {
4833 enum tree_code comp_code = TREE_CODE (arg0);
4834 tree arg00 = TREE_OPERAND (arg0, 0);
4835 tree arg01 = TREE_OPERAND (arg0, 1);
4836 tree arg1_type = TREE_TYPE (arg1);
4837 tree tem;
4838
4839 STRIP_NOPS (arg1);
4840 STRIP_NOPS (arg2);
4841
4842 /* If we have A op 0 ? A : -A, consider applying the following
4843 transformations:
4844
4845 A == 0? A : -A same as -A
4846 A != 0? A : -A same as A
4847 A >= 0? A : -A same as abs (A)
4848 A > 0? A : -A same as abs (A)
4849 A <= 0? A : -A same as -abs (A)
4850 A < 0? A : -A same as -abs (A)
4851
4852 None of these transformations work for modes with signed
4853 zeros. If A is +/-0, the first two transformations will
4854 change the sign of the result (from +0 to -0, or vice
4855 versa). The last four will fix the sign of the result,
4856 even though the original expressions could be positive or
4857 negative, depending on the sign of A.
4858
4859 Note that all these transformations are correct if A is
4860 NaN, since the two alternatives (A and -A) are also NaNs. */
4861 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4862 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4863 ? real_zerop (arg01)
4864 : integer_zerop (arg01))
4865 && ((TREE_CODE (arg2) == NEGATE_EXPR
4866 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4867 /* In the case that A is of the form X-Y, '-A' (arg2) may
4868 have already been folded to Y-X, check for that. */
4869 || (TREE_CODE (arg1) == MINUS_EXPR
4870 && TREE_CODE (arg2) == MINUS_EXPR
4871 && operand_equal_p (TREE_OPERAND (arg1, 0),
4872 TREE_OPERAND (arg2, 1), 0)
4873 && operand_equal_p (TREE_OPERAND (arg1, 1),
4874 TREE_OPERAND (arg2, 0), 0))))
4875 switch (comp_code)
4876 {
4877 case EQ_EXPR:
4878 case UNEQ_EXPR:
4879 tem = fold_convert_loc (loc, arg1_type, arg1);
4880 return pedantic_non_lvalue_loc (loc,
4881 fold_convert_loc (loc, type,
4882 negate_expr (tem)));
4883 case NE_EXPR:
4884 case LTGT_EXPR:
4885 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4886 case UNGE_EXPR:
4887 case UNGT_EXPR:
4888 if (flag_trapping_math)
4889 break;
4890 /* Fall through. */
4891 case GE_EXPR:
4892 case GT_EXPR:
4893 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4894 arg1 = fold_convert_loc (loc, signed_type_for
4895 (TREE_TYPE (arg1)), arg1);
4896 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4897 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4898 case UNLE_EXPR:
4899 case UNLT_EXPR:
4900 if (flag_trapping_math)
4901 break;
4902 case LE_EXPR:
4903 case LT_EXPR:
4904 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4905 arg1 = fold_convert_loc (loc, signed_type_for
4906 (TREE_TYPE (arg1)), arg1);
4907 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4908 return negate_expr (fold_convert_loc (loc, type, tem));
4909 default:
4910 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4911 break;
4912 }
4913
4914 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4915 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4916 both transformations are correct when A is NaN: A != 0
4917 is then true, and A == 0 is false. */
4918
4919 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4920 && integer_zerop (arg01) && integer_zerop (arg2))
4921 {
4922 if (comp_code == NE_EXPR)
4923 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4924 else if (comp_code == EQ_EXPR)
4925 return build_zero_cst (type);
4926 }
4927
4928 /* Try some transformations of A op B ? A : B.
4929
4930 A == B? A : B same as B
4931 A != B? A : B same as A
4932 A >= B? A : B same as max (A, B)
4933 A > B? A : B same as max (B, A)
4934 A <= B? A : B same as min (A, B)
4935 A < B? A : B same as min (B, A)
4936
4937 As above, these transformations don't work in the presence
4938 of signed zeros. For example, if A and B are zeros of
4939 opposite sign, the first two transformations will change
4940 the sign of the result. In the last four, the original
4941 expressions give different results for (A=+0, B=-0) and
4942 (A=-0, B=+0), but the transformed expressions do not.
4943
4944 The first two transformations are correct if either A or B
4945 is a NaN. In the first transformation, the condition will
4946 be false, and B will indeed be chosen. In the case of the
4947 second transformation, the condition A != B will be true,
4948 and A will be chosen.
4949
4950 The conversions to max() and min() are not correct if B is
4951 a number and A is not. The conditions in the original
4952 expressions will be false, so all four give B. The min()
4953 and max() versions would give a NaN instead. */
4954 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4955 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4956 /* Avoid these transformations if the COND_EXPR may be used
4957 as an lvalue in the C++ front-end. PR c++/19199. */
4958 && (in_gimple_form
4959 || VECTOR_TYPE_P (type)
4960 || (! lang_GNU_CXX ()
4961 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4962 || ! maybe_lvalue_p (arg1)
4963 || ! maybe_lvalue_p (arg2)))
4964 {
4965 tree comp_op0 = arg00;
4966 tree comp_op1 = arg01;
4967 tree comp_type = TREE_TYPE (comp_op0);
4968
4969 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4970 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4971 {
4972 comp_type = type;
4973 comp_op0 = arg1;
4974 comp_op1 = arg2;
4975 }
4976
4977 switch (comp_code)
4978 {
4979 case EQ_EXPR:
4980 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4981 case NE_EXPR:
4982 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4983 case LE_EXPR:
4984 case LT_EXPR:
4985 case UNLE_EXPR:
4986 case UNLT_EXPR:
4987 /* In C++ a ?: expression can be an lvalue, so put the
4988 operand which will be used if they are equal first
4989 so that we can convert this back to the
4990 corresponding COND_EXPR. */
4991 if (!HONOR_NANS (element_mode (arg1)))
4992 {
4993 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4994 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4995 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4996 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4997 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4998 comp_op1, comp_op0);
4999 return pedantic_non_lvalue_loc (loc,
5000 fold_convert_loc (loc, type, tem));
5001 }
5002 break;
5003 case GE_EXPR:
5004 case GT_EXPR:
5005 case UNGE_EXPR:
5006 case UNGT_EXPR:
5007 if (!HONOR_NANS (element_mode (arg1)))
5008 {
5009 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5010 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5011 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5012 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5013 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5014 comp_op1, comp_op0);
5015 return pedantic_non_lvalue_loc (loc,
5016 fold_convert_loc (loc, type, tem));
5017 }
5018 break;
5019 case UNEQ_EXPR:
5020 if (!HONOR_NANS (element_mode (arg1)))
5021 return pedantic_non_lvalue_loc (loc,
5022 fold_convert_loc (loc, type, arg2));
5023 break;
5024 case LTGT_EXPR:
5025 if (!HONOR_NANS (element_mode (arg1)))
5026 return pedantic_non_lvalue_loc (loc,
5027 fold_convert_loc (loc, type, arg1));
5028 break;
5029 default:
5030 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5031 break;
5032 }
5033 }
5034
5035 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5036 we might still be able to simplify this. For example,
5037 if C1 is one less or one more than C2, this might have started
5038 out as a MIN or MAX and been transformed by this function.
5039 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5040
5041 if (INTEGRAL_TYPE_P (type)
5042 && TREE_CODE (arg01) == INTEGER_CST
5043 && TREE_CODE (arg2) == INTEGER_CST)
5044 switch (comp_code)
5045 {
5046 case EQ_EXPR:
5047 if (TREE_CODE (arg1) == INTEGER_CST)
5048 break;
5049 /* We can replace A with C1 in this case. */
5050 arg1 = fold_convert_loc (loc, type, arg01);
5051 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5052
5053 case LT_EXPR:
5054 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5055 MIN_EXPR, to preserve the signedness of the comparison. */
5056 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5057 OEP_ONLY_CONST)
5058 && operand_equal_p (arg01,
5059 const_binop (PLUS_EXPR, arg2,
5060 build_int_cst (type, 1)),
5061 OEP_ONLY_CONST))
5062 {
5063 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5064 fold_convert_loc (loc, TREE_TYPE (arg00),
5065 arg2));
5066 return pedantic_non_lvalue_loc (loc,
5067 fold_convert_loc (loc, type, tem));
5068 }
5069 break;
5070
5071 case LE_EXPR:
5072 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5073 as above. */
5074 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5075 OEP_ONLY_CONST)
5076 && operand_equal_p (arg01,
5077 const_binop (MINUS_EXPR, arg2,
5078 build_int_cst (type, 1)),
5079 OEP_ONLY_CONST))
5080 {
5081 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5082 fold_convert_loc (loc, TREE_TYPE (arg00),
5083 arg2));
5084 return pedantic_non_lvalue_loc (loc,
5085 fold_convert_loc (loc, type, tem));
5086 }
5087 break;
5088
5089 case GT_EXPR:
5090 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5091 MAX_EXPR, to preserve the signedness of the comparison. */
5092 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5093 OEP_ONLY_CONST)
5094 && operand_equal_p (arg01,
5095 const_binop (MINUS_EXPR, arg2,
5096 build_int_cst (type, 1)),
5097 OEP_ONLY_CONST))
5098 {
5099 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5100 fold_convert_loc (loc, TREE_TYPE (arg00),
5101 arg2));
5102 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5103 }
5104 break;
5105
5106 case GE_EXPR:
5107 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5108 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5109 OEP_ONLY_CONST)
5110 && operand_equal_p (arg01,
5111 const_binop (PLUS_EXPR, arg2,
5112 build_int_cst (type, 1)),
5113 OEP_ONLY_CONST))
5114 {
5115 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5116 fold_convert_loc (loc, TREE_TYPE (arg00),
5117 arg2));
5118 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5119 }
5120 break;
5121 case NE_EXPR:
5122 break;
5123 default:
5124 gcc_unreachable ();
5125 }
5126
5127 return NULL_TREE;
5128 }
5129
5130
5131 \f
5132 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5133 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5134 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5135 false) >= 2)
5136 #endif
5137
5138 /* EXP is some logical combination of boolean tests. See if we can
5139 merge it into some range test. Return the new tree if so. */
5140
5141 static tree
5142 fold_range_test (location_t loc, enum tree_code code, tree type,
5143 tree op0, tree op1)
5144 {
5145 int or_op = (code == TRUTH_ORIF_EXPR
5146 || code == TRUTH_OR_EXPR);
5147 int in0_p, in1_p, in_p;
5148 tree low0, low1, low, high0, high1, high;
5149 bool strict_overflow_p = false;
5150 tree tem, lhs, rhs;
5151 const char * const warnmsg = G_("assuming signed overflow does not occur "
5152 "when simplifying range test");
5153
5154 if (!INTEGRAL_TYPE_P (type))
5155 return 0;
5156
5157 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5158 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5159
5160 /* If this is an OR operation, invert both sides; we will invert
5161 again at the end. */
5162 if (or_op)
5163 in0_p = ! in0_p, in1_p = ! in1_p;
5164
5165 /* If both expressions are the same, if we can merge the ranges, and we
5166 can build the range test, return it or it inverted. If one of the
5167 ranges is always true or always false, consider it to be the same
5168 expression as the other. */
5169 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5170 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5171 in1_p, low1, high1)
5172 && 0 != (tem = (build_range_check (loc, type,
5173 lhs != 0 ? lhs
5174 : rhs != 0 ? rhs : integer_zero_node,
5175 in_p, low, high))))
5176 {
5177 if (strict_overflow_p)
5178 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5179 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5180 }
5181
5182 /* On machines where the branch cost is expensive, if this is a
5183 short-circuited branch and the underlying object on both sides
5184 is the same, make a non-short-circuit operation. */
5185 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5186 && lhs != 0 && rhs != 0
5187 && (code == TRUTH_ANDIF_EXPR
5188 || code == TRUTH_ORIF_EXPR)
5189 && operand_equal_p (lhs, rhs, 0))
5190 {
5191 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5192 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5193 which cases we can't do this. */
5194 if (simple_operand_p (lhs))
5195 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5196 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5197 type, op0, op1);
5198
5199 else if (!lang_hooks.decls.global_bindings_p ()
5200 && !CONTAINS_PLACEHOLDER_P (lhs))
5201 {
5202 tree common = save_expr (lhs);
5203
5204 if (0 != (lhs = build_range_check (loc, type, common,
5205 or_op ? ! in0_p : in0_p,
5206 low0, high0))
5207 && (0 != (rhs = build_range_check (loc, type, common,
5208 or_op ? ! in1_p : in1_p,
5209 low1, high1))))
5210 {
5211 if (strict_overflow_p)
5212 fold_overflow_warning (warnmsg,
5213 WARN_STRICT_OVERFLOW_COMPARISON);
5214 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5215 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5216 type, lhs, rhs);
5217 }
5218 }
5219 }
5220
5221 return 0;
5222 }
5223 \f
5224 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5225 bit value. Arrange things so the extra bits will be set to zero if and
5226 only if C is signed-extended to its full width. If MASK is nonzero,
5227 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5228
5229 static tree
5230 unextend (tree c, int p, int unsignedp, tree mask)
5231 {
5232 tree type = TREE_TYPE (c);
5233 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5234 tree temp;
5235
5236 if (p == modesize || unsignedp)
5237 return c;
5238
5239 /* We work by getting just the sign bit into the low-order bit, then
5240 into the high-order bit, then sign-extend. We then XOR that value
5241 with C. */
5242 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5243
5244 /* We must use a signed type in order to get an arithmetic right shift.
5245 However, we must also avoid introducing accidental overflows, so that
5246 a subsequent call to integer_zerop will work. Hence we must
5247 do the type conversion here. At this point, the constant is either
5248 zero or one, and the conversion to a signed type can never overflow.
5249 We could get an overflow if this conversion is done anywhere else. */
5250 if (TYPE_UNSIGNED (type))
5251 temp = fold_convert (signed_type_for (type), temp);
5252
5253 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5254 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5255 if (mask != 0)
5256 temp = const_binop (BIT_AND_EXPR, temp,
5257 fold_convert (TREE_TYPE (c), mask));
5258 /* If necessary, convert the type back to match the type of C. */
5259 if (TYPE_UNSIGNED (type))
5260 temp = fold_convert (type, temp);
5261
5262 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5263 }
5264 \f
5265 /* For an expression that has the form
5266 (A && B) || ~B
5267 or
5268 (A || B) && ~B,
5269 we can drop one of the inner expressions and simplify to
5270 A || ~B
5271 or
5272 A && ~B
5273 LOC is the location of the resulting expression. OP is the inner
5274 logical operation; the left-hand side in the examples above, while CMPOP
5275 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5276 removing a condition that guards another, as in
5277 (A != NULL && A->...) || A == NULL
5278 which we must not transform. If RHS_ONLY is true, only eliminate the
5279 right-most operand of the inner logical operation. */
5280
5281 static tree
5282 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5283 bool rhs_only)
5284 {
5285 tree type = TREE_TYPE (cmpop);
5286 enum tree_code code = TREE_CODE (cmpop);
5287 enum tree_code truthop_code = TREE_CODE (op);
5288 tree lhs = TREE_OPERAND (op, 0);
5289 tree rhs = TREE_OPERAND (op, 1);
5290 tree orig_lhs = lhs, orig_rhs = rhs;
5291 enum tree_code rhs_code = TREE_CODE (rhs);
5292 enum tree_code lhs_code = TREE_CODE (lhs);
5293 enum tree_code inv_code;
5294
5295 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5296 return NULL_TREE;
5297
5298 if (TREE_CODE_CLASS (code) != tcc_comparison)
5299 return NULL_TREE;
5300
5301 if (rhs_code == truthop_code)
5302 {
5303 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5304 if (newrhs != NULL_TREE)
5305 {
5306 rhs = newrhs;
5307 rhs_code = TREE_CODE (rhs);
5308 }
5309 }
5310 if (lhs_code == truthop_code && !rhs_only)
5311 {
5312 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5313 if (newlhs != NULL_TREE)
5314 {
5315 lhs = newlhs;
5316 lhs_code = TREE_CODE (lhs);
5317 }
5318 }
5319
5320 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5321 if (inv_code == rhs_code
5322 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5323 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5324 return lhs;
5325 if (!rhs_only && inv_code == lhs_code
5326 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5327 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5328 return rhs;
5329 if (rhs != orig_rhs || lhs != orig_lhs)
5330 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5331 lhs, rhs);
5332 return NULL_TREE;
5333 }
5334
5335 /* Find ways of folding logical expressions of LHS and RHS:
5336 Try to merge two comparisons to the same innermost item.
5337 Look for range tests like "ch >= '0' && ch <= '9'".
5338 Look for combinations of simple terms on machines with expensive branches
5339 and evaluate the RHS unconditionally.
5340
5341 For example, if we have p->a == 2 && p->b == 4 and we can make an
5342 object large enough to span both A and B, we can do this with a comparison
5343 against the object ANDed with the a mask.
5344
5345 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5346 operations to do this with one comparison.
5347
5348 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5349 function and the one above.
5350
5351 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5352 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5353
5354 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5355 two operands.
5356
5357 We return the simplified tree or 0 if no optimization is possible. */
5358
5359 static tree
5360 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5361 tree lhs, tree rhs)
5362 {
5363 /* If this is the "or" of two comparisons, we can do something if
5364 the comparisons are NE_EXPR. If this is the "and", we can do something
5365 if the comparisons are EQ_EXPR. I.e.,
5366 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5367
5368 WANTED_CODE is this operation code. For single bit fields, we can
5369 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5370 comparison for one-bit fields. */
5371
5372 enum tree_code wanted_code;
5373 enum tree_code lcode, rcode;
5374 tree ll_arg, lr_arg, rl_arg, rr_arg;
5375 tree ll_inner, lr_inner, rl_inner, rr_inner;
5376 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5377 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5378 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5379 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5380 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5381 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5382 machine_mode lnmode, rnmode;
5383 tree ll_mask, lr_mask, rl_mask, rr_mask;
5384 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5385 tree l_const, r_const;
5386 tree lntype, rntype, result;
5387 HOST_WIDE_INT first_bit, end_bit;
5388 int volatilep;
5389
5390 /* Start by getting the comparison codes. Fail if anything is volatile.
5391 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5392 it were surrounded with a NE_EXPR. */
5393
5394 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5395 return 0;
5396
5397 lcode = TREE_CODE (lhs);
5398 rcode = TREE_CODE (rhs);
5399
5400 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5401 {
5402 lhs = build2 (NE_EXPR, truth_type, lhs,
5403 build_int_cst (TREE_TYPE (lhs), 0));
5404 lcode = NE_EXPR;
5405 }
5406
5407 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5408 {
5409 rhs = build2 (NE_EXPR, truth_type, rhs,
5410 build_int_cst (TREE_TYPE (rhs), 0));
5411 rcode = NE_EXPR;
5412 }
5413
5414 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5415 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5416 return 0;
5417
5418 ll_arg = TREE_OPERAND (lhs, 0);
5419 lr_arg = TREE_OPERAND (lhs, 1);
5420 rl_arg = TREE_OPERAND (rhs, 0);
5421 rr_arg = TREE_OPERAND (rhs, 1);
5422
5423 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5424 if (simple_operand_p (ll_arg)
5425 && simple_operand_p (lr_arg))
5426 {
5427 if (operand_equal_p (ll_arg, rl_arg, 0)
5428 && operand_equal_p (lr_arg, rr_arg, 0))
5429 {
5430 result = combine_comparisons (loc, code, lcode, rcode,
5431 truth_type, ll_arg, lr_arg);
5432 if (result)
5433 return result;
5434 }
5435 else if (operand_equal_p (ll_arg, rr_arg, 0)
5436 && operand_equal_p (lr_arg, rl_arg, 0))
5437 {
5438 result = combine_comparisons (loc, code, lcode,
5439 swap_tree_comparison (rcode),
5440 truth_type, ll_arg, lr_arg);
5441 if (result)
5442 return result;
5443 }
5444 }
5445
5446 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5447 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5448
5449 /* If the RHS can be evaluated unconditionally and its operands are
5450 simple, it wins to evaluate the RHS unconditionally on machines
5451 with expensive branches. In this case, this isn't a comparison
5452 that can be merged. */
5453
5454 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5455 false) >= 2
5456 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5457 && simple_operand_p (rl_arg)
5458 && simple_operand_p (rr_arg))
5459 {
5460 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5461 if (code == TRUTH_OR_EXPR
5462 && lcode == NE_EXPR && integer_zerop (lr_arg)
5463 && rcode == NE_EXPR && integer_zerop (rr_arg)
5464 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5465 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5466 return build2_loc (loc, NE_EXPR, truth_type,
5467 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5468 ll_arg, rl_arg),
5469 build_int_cst (TREE_TYPE (ll_arg), 0));
5470
5471 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5472 if (code == TRUTH_AND_EXPR
5473 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5474 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5475 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5476 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5477 return build2_loc (loc, EQ_EXPR, truth_type,
5478 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5479 ll_arg, rl_arg),
5480 build_int_cst (TREE_TYPE (ll_arg), 0));
5481 }
5482
5483 /* See if the comparisons can be merged. Then get all the parameters for
5484 each side. */
5485
5486 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5487 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5488 return 0;
5489
5490 volatilep = 0;
5491 ll_inner = decode_field_reference (loc, ll_arg,
5492 &ll_bitsize, &ll_bitpos, &ll_mode,
5493 &ll_unsignedp, &volatilep, &ll_mask,
5494 &ll_and_mask);
5495 lr_inner = decode_field_reference (loc, lr_arg,
5496 &lr_bitsize, &lr_bitpos, &lr_mode,
5497 &lr_unsignedp, &volatilep, &lr_mask,
5498 &lr_and_mask);
5499 rl_inner = decode_field_reference (loc, rl_arg,
5500 &rl_bitsize, &rl_bitpos, &rl_mode,
5501 &rl_unsignedp, &volatilep, &rl_mask,
5502 &rl_and_mask);
5503 rr_inner = decode_field_reference (loc, rr_arg,
5504 &rr_bitsize, &rr_bitpos, &rr_mode,
5505 &rr_unsignedp, &volatilep, &rr_mask,
5506 &rr_and_mask);
5507
5508 /* It must be true that the inner operation on the lhs of each
5509 comparison must be the same if we are to be able to do anything.
5510 Then see if we have constants. If not, the same must be true for
5511 the rhs's. */
5512 if (volatilep || ll_inner == 0 || rl_inner == 0
5513 || ! operand_equal_p (ll_inner, rl_inner, 0))
5514 return 0;
5515
5516 if (TREE_CODE (lr_arg) == INTEGER_CST
5517 && TREE_CODE (rr_arg) == INTEGER_CST)
5518 l_const = lr_arg, r_const = rr_arg;
5519 else if (lr_inner == 0 || rr_inner == 0
5520 || ! operand_equal_p (lr_inner, rr_inner, 0))
5521 return 0;
5522 else
5523 l_const = r_const = 0;
5524
5525 /* If either comparison code is not correct for our logical operation,
5526 fail. However, we can convert a one-bit comparison against zero into
5527 the opposite comparison against that bit being set in the field. */
5528
5529 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5530 if (lcode != wanted_code)
5531 {
5532 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5533 {
5534 /* Make the left operand unsigned, since we are only interested
5535 in the value of one bit. Otherwise we are doing the wrong
5536 thing below. */
5537 ll_unsignedp = 1;
5538 l_const = ll_mask;
5539 }
5540 else
5541 return 0;
5542 }
5543
5544 /* This is analogous to the code for l_const above. */
5545 if (rcode != wanted_code)
5546 {
5547 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5548 {
5549 rl_unsignedp = 1;
5550 r_const = rl_mask;
5551 }
5552 else
5553 return 0;
5554 }
5555
5556 /* See if we can find a mode that contains both fields being compared on
5557 the left. If we can't, fail. Otherwise, update all constants and masks
5558 to be relative to a field of that size. */
5559 first_bit = MIN (ll_bitpos, rl_bitpos);
5560 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5561 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5562 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5563 volatilep);
5564 if (lnmode == VOIDmode)
5565 return 0;
5566
5567 lnbitsize = GET_MODE_BITSIZE (lnmode);
5568 lnbitpos = first_bit & ~ (lnbitsize - 1);
5569 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5570 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5571
5572 if (BYTES_BIG_ENDIAN)
5573 {
5574 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5575 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5576 }
5577
5578 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5579 size_int (xll_bitpos));
5580 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5581 size_int (xrl_bitpos));
5582
5583 if (l_const)
5584 {
5585 l_const = fold_convert_loc (loc, lntype, l_const);
5586 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5587 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5588 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5589 fold_build1_loc (loc, BIT_NOT_EXPR,
5590 lntype, ll_mask))))
5591 {
5592 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5593
5594 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5595 }
5596 }
5597 if (r_const)
5598 {
5599 r_const = fold_convert_loc (loc, lntype, r_const);
5600 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5601 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5602 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5603 fold_build1_loc (loc, BIT_NOT_EXPR,
5604 lntype, rl_mask))))
5605 {
5606 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5607
5608 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5609 }
5610 }
5611
5612 /* If the right sides are not constant, do the same for it. Also,
5613 disallow this optimization if a size or signedness mismatch occurs
5614 between the left and right sides. */
5615 if (l_const == 0)
5616 {
5617 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5618 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5619 /* Make sure the two fields on the right
5620 correspond to the left without being swapped. */
5621 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5622 return 0;
5623
5624 first_bit = MIN (lr_bitpos, rr_bitpos);
5625 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5626 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5627 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5628 volatilep);
5629 if (rnmode == VOIDmode)
5630 return 0;
5631
5632 rnbitsize = GET_MODE_BITSIZE (rnmode);
5633 rnbitpos = first_bit & ~ (rnbitsize - 1);
5634 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5635 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5636
5637 if (BYTES_BIG_ENDIAN)
5638 {
5639 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5640 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5641 }
5642
5643 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5644 rntype, lr_mask),
5645 size_int (xlr_bitpos));
5646 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5647 rntype, rr_mask),
5648 size_int (xrr_bitpos));
5649
5650 /* Make a mask that corresponds to both fields being compared.
5651 Do this for both items being compared. If the operands are the
5652 same size and the bits being compared are in the same position
5653 then we can do this by masking both and comparing the masked
5654 results. */
5655 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5656 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5657 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5658 {
5659 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5660 ll_unsignedp || rl_unsignedp);
5661 if (! all_ones_mask_p (ll_mask, lnbitsize))
5662 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5663
5664 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5665 lr_unsignedp || rr_unsignedp);
5666 if (! all_ones_mask_p (lr_mask, rnbitsize))
5667 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5668
5669 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5670 }
5671
5672 /* There is still another way we can do something: If both pairs of
5673 fields being compared are adjacent, we may be able to make a wider
5674 field containing them both.
5675
5676 Note that we still must mask the lhs/rhs expressions. Furthermore,
5677 the mask must be shifted to account for the shift done by
5678 make_bit_field_ref. */
5679 if ((ll_bitsize + ll_bitpos == rl_bitpos
5680 && lr_bitsize + lr_bitpos == rr_bitpos)
5681 || (ll_bitpos == rl_bitpos + rl_bitsize
5682 && lr_bitpos == rr_bitpos + rr_bitsize))
5683 {
5684 tree type;
5685
5686 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5687 ll_bitsize + rl_bitsize,
5688 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5689 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5690 lr_bitsize + rr_bitsize,
5691 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5692
5693 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5694 size_int (MIN (xll_bitpos, xrl_bitpos)));
5695 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5696 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5697
5698 /* Convert to the smaller type before masking out unwanted bits. */
5699 type = lntype;
5700 if (lntype != rntype)
5701 {
5702 if (lnbitsize > rnbitsize)
5703 {
5704 lhs = fold_convert_loc (loc, rntype, lhs);
5705 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5706 type = rntype;
5707 }
5708 else if (lnbitsize < rnbitsize)
5709 {
5710 rhs = fold_convert_loc (loc, lntype, rhs);
5711 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5712 type = lntype;
5713 }
5714 }
5715
5716 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5717 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5718
5719 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5720 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5721
5722 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5723 }
5724
5725 return 0;
5726 }
5727
5728 /* Handle the case of comparisons with constants. If there is something in
5729 common between the masks, those bits of the constants must be the same.
5730 If not, the condition is always false. Test for this to avoid generating
5731 incorrect code below. */
5732 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5733 if (! integer_zerop (result)
5734 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5735 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5736 {
5737 if (wanted_code == NE_EXPR)
5738 {
5739 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5740 return constant_boolean_node (true, truth_type);
5741 }
5742 else
5743 {
5744 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5745 return constant_boolean_node (false, truth_type);
5746 }
5747 }
5748
5749 /* Construct the expression we will return. First get the component
5750 reference we will make. Unless the mask is all ones the width of
5751 that field, perform the mask operation. Then compare with the
5752 merged constant. */
5753 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5754 ll_unsignedp || rl_unsignedp);
5755
5756 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5757 if (! all_ones_mask_p (ll_mask, lnbitsize))
5758 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5759
5760 return build2_loc (loc, wanted_code, truth_type, result,
5761 const_binop (BIT_IOR_EXPR, l_const, r_const));
5762 }
5763 \f
5764 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5765 constant. */
5766
5767 static tree
5768 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5769 tree op0, tree op1)
5770 {
5771 tree arg0 = op0;
5772 enum tree_code op_code;
5773 tree comp_const;
5774 tree minmax_const;
5775 int consts_equal, consts_lt;
5776 tree inner;
5777
5778 STRIP_SIGN_NOPS (arg0);
5779
5780 op_code = TREE_CODE (arg0);
5781 minmax_const = TREE_OPERAND (arg0, 1);
5782 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5783 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5784 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5785 inner = TREE_OPERAND (arg0, 0);
5786
5787 /* If something does not permit us to optimize, return the original tree. */
5788 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5789 || TREE_CODE (comp_const) != INTEGER_CST
5790 || TREE_OVERFLOW (comp_const)
5791 || TREE_CODE (minmax_const) != INTEGER_CST
5792 || TREE_OVERFLOW (minmax_const))
5793 return NULL_TREE;
5794
5795 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5796 and GT_EXPR, doing the rest with recursive calls using logical
5797 simplifications. */
5798 switch (code)
5799 {
5800 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5801 {
5802 tree tem
5803 = optimize_minmax_comparison (loc,
5804 invert_tree_comparison (code, false),
5805 type, op0, op1);
5806 if (tem)
5807 return invert_truthvalue_loc (loc, tem);
5808 return NULL_TREE;
5809 }
5810
5811 case GE_EXPR:
5812 return
5813 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5814 optimize_minmax_comparison
5815 (loc, EQ_EXPR, type, arg0, comp_const),
5816 optimize_minmax_comparison
5817 (loc, GT_EXPR, type, arg0, comp_const));
5818
5819 case EQ_EXPR:
5820 if (op_code == MAX_EXPR && consts_equal)
5821 /* MAX (X, 0) == 0 -> X <= 0 */
5822 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5823
5824 else if (op_code == MAX_EXPR && consts_lt)
5825 /* MAX (X, 0) == 5 -> X == 5 */
5826 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5827
5828 else if (op_code == MAX_EXPR)
5829 /* MAX (X, 0) == -1 -> false */
5830 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5831
5832 else if (consts_equal)
5833 /* MIN (X, 0) == 0 -> X >= 0 */
5834 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5835
5836 else if (consts_lt)
5837 /* MIN (X, 0) == 5 -> false */
5838 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5839
5840 else
5841 /* MIN (X, 0) == -1 -> X == -1 */
5842 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5843
5844 case GT_EXPR:
5845 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5846 /* MAX (X, 0) > 0 -> X > 0
5847 MAX (X, 0) > 5 -> X > 5 */
5848 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5849
5850 else if (op_code == MAX_EXPR)
5851 /* MAX (X, 0) > -1 -> true */
5852 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5853
5854 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5855 /* MIN (X, 0) > 0 -> false
5856 MIN (X, 0) > 5 -> false */
5857 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5858
5859 else
5860 /* MIN (X, 0) > -1 -> X > -1 */
5861 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5862
5863 default:
5864 return NULL_TREE;
5865 }
5866 }
5867 \f
5868 /* T is an integer expression that is being multiplied, divided, or taken a
5869 modulus (CODE says which and what kind of divide or modulus) by a
5870 constant C. See if we can eliminate that operation by folding it with
5871 other operations already in T. WIDE_TYPE, if non-null, is a type that
5872 should be used for the computation if wider than our type.
5873
5874 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5875 (X * 2) + (Y * 4). We must, however, be assured that either the original
5876 expression would not overflow or that overflow is undefined for the type
5877 in the language in question.
5878
5879 If we return a non-null expression, it is an equivalent form of the
5880 original computation, but need not be in the original type.
5881
5882 We set *STRICT_OVERFLOW_P to true if the return values depends on
5883 signed overflow being undefined. Otherwise we do not change
5884 *STRICT_OVERFLOW_P. */
5885
5886 static tree
5887 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5888 bool *strict_overflow_p)
5889 {
5890 /* To avoid exponential search depth, refuse to allow recursion past
5891 three levels. Beyond that (1) it's highly unlikely that we'll find
5892 something interesting and (2) we've probably processed it before
5893 when we built the inner expression. */
5894
5895 static int depth;
5896 tree ret;
5897
5898 if (depth > 3)
5899 return NULL;
5900
5901 depth++;
5902 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5903 depth--;
5904
5905 return ret;
5906 }
5907
5908 static tree
5909 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5910 bool *strict_overflow_p)
5911 {
5912 tree type = TREE_TYPE (t);
5913 enum tree_code tcode = TREE_CODE (t);
5914 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5915 > GET_MODE_SIZE (TYPE_MODE (type)))
5916 ? wide_type : type);
5917 tree t1, t2;
5918 int same_p = tcode == code;
5919 tree op0 = NULL_TREE, op1 = NULL_TREE;
5920 bool sub_strict_overflow_p;
5921
5922 /* Don't deal with constants of zero here; they confuse the code below. */
5923 if (integer_zerop (c))
5924 return NULL_TREE;
5925
5926 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5927 op0 = TREE_OPERAND (t, 0);
5928
5929 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5930 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5931
5932 /* Note that we need not handle conditional operations here since fold
5933 already handles those cases. So just do arithmetic here. */
5934 switch (tcode)
5935 {
5936 case INTEGER_CST:
5937 /* For a constant, we can always simplify if we are a multiply
5938 or (for divide and modulus) if it is a multiple of our constant. */
5939 if (code == MULT_EXPR
5940 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5941 return const_binop (code, fold_convert (ctype, t),
5942 fold_convert (ctype, c));
5943 break;
5944
5945 CASE_CONVERT: case NON_LVALUE_EXPR:
5946 /* If op0 is an expression ... */
5947 if ((COMPARISON_CLASS_P (op0)
5948 || UNARY_CLASS_P (op0)
5949 || BINARY_CLASS_P (op0)
5950 || VL_EXP_CLASS_P (op0)
5951 || EXPRESSION_CLASS_P (op0))
5952 /* ... and has wrapping overflow, and its type is smaller
5953 than ctype, then we cannot pass through as widening. */
5954 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5955 && (TYPE_PRECISION (ctype)
5956 > TYPE_PRECISION (TREE_TYPE (op0))))
5957 /* ... or this is a truncation (t is narrower than op0),
5958 then we cannot pass through this narrowing. */
5959 || (TYPE_PRECISION (type)
5960 < TYPE_PRECISION (TREE_TYPE (op0)))
5961 /* ... or signedness changes for division or modulus,
5962 then we cannot pass through this conversion. */
5963 || (code != MULT_EXPR
5964 && (TYPE_UNSIGNED (ctype)
5965 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5966 /* ... or has undefined overflow while the converted to
5967 type has not, we cannot do the operation in the inner type
5968 as that would introduce undefined overflow. */
5969 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5970 && !TYPE_OVERFLOW_UNDEFINED (type))))
5971 break;
5972
5973 /* Pass the constant down and see if we can make a simplification. If
5974 we can, replace this expression with the inner simplification for
5975 possible later conversion to our or some other type. */
5976 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5977 && TREE_CODE (t2) == INTEGER_CST
5978 && !TREE_OVERFLOW (t2)
5979 && (0 != (t1 = extract_muldiv (op0, t2, code,
5980 code == MULT_EXPR
5981 ? ctype : NULL_TREE,
5982 strict_overflow_p))))
5983 return t1;
5984 break;
5985
5986 case ABS_EXPR:
5987 /* If widening the type changes it from signed to unsigned, then we
5988 must avoid building ABS_EXPR itself as unsigned. */
5989 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5990 {
5991 tree cstype = (*signed_type_for) (ctype);
5992 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5993 != 0)
5994 {
5995 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5996 return fold_convert (ctype, t1);
5997 }
5998 break;
5999 }
6000 /* If the constant is negative, we cannot simplify this. */
6001 if (tree_int_cst_sgn (c) == -1)
6002 break;
6003 /* FALLTHROUGH */
6004 case NEGATE_EXPR:
6005 /* For division and modulus, type can't be unsigned, as e.g.
6006 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6007 For signed types, even with wrapping overflow, this is fine. */
6008 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6009 break;
6010 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6011 != 0)
6012 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6013 break;
6014
6015 case MIN_EXPR: case MAX_EXPR:
6016 /* If widening the type changes the signedness, then we can't perform
6017 this optimization as that changes the result. */
6018 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6019 break;
6020
6021 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6022 sub_strict_overflow_p = false;
6023 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6024 &sub_strict_overflow_p)) != 0
6025 && (t2 = extract_muldiv (op1, c, code, wide_type,
6026 &sub_strict_overflow_p)) != 0)
6027 {
6028 if (tree_int_cst_sgn (c) < 0)
6029 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6030 if (sub_strict_overflow_p)
6031 *strict_overflow_p = true;
6032 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6033 fold_convert (ctype, t2));
6034 }
6035 break;
6036
6037 case LSHIFT_EXPR: case RSHIFT_EXPR:
6038 /* If the second operand is constant, this is a multiplication
6039 or floor division, by a power of two, so we can treat it that
6040 way unless the multiplier or divisor overflows. Signed
6041 left-shift overflow is implementation-defined rather than
6042 undefined in C90, so do not convert signed left shift into
6043 multiplication. */
6044 if (TREE_CODE (op1) == INTEGER_CST
6045 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6046 /* const_binop may not detect overflow correctly,
6047 so check for it explicitly here. */
6048 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6049 && 0 != (t1 = fold_convert (ctype,
6050 const_binop (LSHIFT_EXPR,
6051 size_one_node,
6052 op1)))
6053 && !TREE_OVERFLOW (t1))
6054 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6055 ? MULT_EXPR : FLOOR_DIV_EXPR,
6056 ctype,
6057 fold_convert (ctype, op0),
6058 t1),
6059 c, code, wide_type, strict_overflow_p);
6060 break;
6061
6062 case PLUS_EXPR: case MINUS_EXPR:
6063 /* See if we can eliminate the operation on both sides. If we can, we
6064 can return a new PLUS or MINUS. If we can't, the only remaining
6065 cases where we can do anything are if the second operand is a
6066 constant. */
6067 sub_strict_overflow_p = false;
6068 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6069 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6070 if (t1 != 0 && t2 != 0
6071 && (code == MULT_EXPR
6072 /* If not multiplication, we can only do this if both operands
6073 are divisible by c. */
6074 || (multiple_of_p (ctype, op0, c)
6075 && multiple_of_p (ctype, op1, c))))
6076 {
6077 if (sub_strict_overflow_p)
6078 *strict_overflow_p = true;
6079 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6080 fold_convert (ctype, t2));
6081 }
6082
6083 /* If this was a subtraction, negate OP1 and set it to be an addition.
6084 This simplifies the logic below. */
6085 if (tcode == MINUS_EXPR)
6086 {
6087 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6088 /* If OP1 was not easily negatable, the constant may be OP0. */
6089 if (TREE_CODE (op0) == INTEGER_CST)
6090 {
6091 tree tem = op0;
6092 op0 = op1;
6093 op1 = tem;
6094 tem = t1;
6095 t1 = t2;
6096 t2 = tem;
6097 }
6098 }
6099
6100 if (TREE_CODE (op1) != INTEGER_CST)
6101 break;
6102
6103 /* If either OP1 or C are negative, this optimization is not safe for
6104 some of the division and remainder types while for others we need
6105 to change the code. */
6106 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6107 {
6108 if (code == CEIL_DIV_EXPR)
6109 code = FLOOR_DIV_EXPR;
6110 else if (code == FLOOR_DIV_EXPR)
6111 code = CEIL_DIV_EXPR;
6112 else if (code != MULT_EXPR
6113 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6114 break;
6115 }
6116
6117 /* If it's a multiply or a division/modulus operation of a multiple
6118 of our constant, do the operation and verify it doesn't overflow. */
6119 if (code == MULT_EXPR
6120 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6121 {
6122 op1 = const_binop (code, fold_convert (ctype, op1),
6123 fold_convert (ctype, c));
6124 /* We allow the constant to overflow with wrapping semantics. */
6125 if (op1 == 0
6126 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6127 break;
6128 }
6129 else
6130 break;
6131
6132 /* If we have an unsigned type, we cannot widen the operation since it
6133 will change the result if the original computation overflowed. */
6134 if (TYPE_UNSIGNED (ctype) && ctype != type)
6135 break;
6136
6137 /* If we were able to eliminate our operation from the first side,
6138 apply our operation to the second side and reform the PLUS. */
6139 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6140 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6141
6142 /* The last case is if we are a multiply. In that case, we can
6143 apply the distributive law to commute the multiply and addition
6144 if the multiplication of the constants doesn't overflow
6145 and overflow is defined. With undefined overflow
6146 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6147 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6148 return fold_build2 (tcode, ctype,
6149 fold_build2 (code, ctype,
6150 fold_convert (ctype, op0),
6151 fold_convert (ctype, c)),
6152 op1);
6153
6154 break;
6155
6156 case MULT_EXPR:
6157 /* We have a special case here if we are doing something like
6158 (C * 8) % 4 since we know that's zero. */
6159 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6160 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6161 /* If the multiplication can overflow we cannot optimize this. */
6162 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6163 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6164 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6165 {
6166 *strict_overflow_p = true;
6167 return omit_one_operand (type, integer_zero_node, op0);
6168 }
6169
6170 /* ... fall through ... */
6171
6172 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6173 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6174 /* If we can extract our operation from the LHS, do so and return a
6175 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6176 do something only if the second operand is a constant. */
6177 if (same_p
6178 && (t1 = extract_muldiv (op0, c, code, wide_type,
6179 strict_overflow_p)) != 0)
6180 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6181 fold_convert (ctype, op1));
6182 else if (tcode == MULT_EXPR && code == MULT_EXPR
6183 && (t1 = extract_muldiv (op1, c, code, wide_type,
6184 strict_overflow_p)) != 0)
6185 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6186 fold_convert (ctype, t1));
6187 else if (TREE_CODE (op1) != INTEGER_CST)
6188 return 0;
6189
6190 /* If these are the same operation types, we can associate them
6191 assuming no overflow. */
6192 if (tcode == code)
6193 {
6194 bool overflow_p = false;
6195 bool overflow_mul_p;
6196 signop sign = TYPE_SIGN (ctype);
6197 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6198 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6199 if (overflow_mul_p
6200 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6201 overflow_p = true;
6202 if (!overflow_p)
6203 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6204 wide_int_to_tree (ctype, mul));
6205 }
6206
6207 /* If these operations "cancel" each other, we have the main
6208 optimizations of this pass, which occur when either constant is a
6209 multiple of the other, in which case we replace this with either an
6210 operation or CODE or TCODE.
6211
6212 If we have an unsigned type, we cannot do this since it will change
6213 the result if the original computation overflowed. */
6214 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6215 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6216 || (tcode == MULT_EXPR
6217 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6218 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6219 && code != MULT_EXPR)))
6220 {
6221 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6222 {
6223 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6224 *strict_overflow_p = true;
6225 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6226 fold_convert (ctype,
6227 const_binop (TRUNC_DIV_EXPR,
6228 op1, c)));
6229 }
6230 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6231 {
6232 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6233 *strict_overflow_p = true;
6234 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6235 fold_convert (ctype,
6236 const_binop (TRUNC_DIV_EXPR,
6237 c, op1)));
6238 }
6239 }
6240 break;
6241
6242 default:
6243 break;
6244 }
6245
6246 return 0;
6247 }
6248 \f
6249 /* Return a node which has the indicated constant VALUE (either 0 or
6250 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6251 and is of the indicated TYPE. */
6252
6253 tree
6254 constant_boolean_node (bool value, tree type)
6255 {
6256 if (type == integer_type_node)
6257 return value ? integer_one_node : integer_zero_node;
6258 else if (type == boolean_type_node)
6259 return value ? boolean_true_node : boolean_false_node;
6260 else if (TREE_CODE (type) == VECTOR_TYPE)
6261 return build_vector_from_val (type,
6262 build_int_cst (TREE_TYPE (type),
6263 value ? -1 : 0));
6264 else
6265 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6266 }
6267
6268
6269 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6270 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6271 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6272 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6273 COND is the first argument to CODE; otherwise (as in the example
6274 given here), it is the second argument. TYPE is the type of the
6275 original expression. Return NULL_TREE if no simplification is
6276 possible. */
6277
6278 static tree
6279 fold_binary_op_with_conditional_arg (location_t loc,
6280 enum tree_code code,
6281 tree type, tree op0, tree op1,
6282 tree cond, tree arg, int cond_first_p)
6283 {
6284 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6285 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6286 tree test, true_value, false_value;
6287 tree lhs = NULL_TREE;
6288 tree rhs = NULL_TREE;
6289 enum tree_code cond_code = COND_EXPR;
6290
6291 if (TREE_CODE (cond) == COND_EXPR
6292 || TREE_CODE (cond) == VEC_COND_EXPR)
6293 {
6294 test = TREE_OPERAND (cond, 0);
6295 true_value = TREE_OPERAND (cond, 1);
6296 false_value = TREE_OPERAND (cond, 2);
6297 /* If this operand throws an expression, then it does not make
6298 sense to try to perform a logical or arithmetic operation
6299 involving it. */
6300 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6301 lhs = true_value;
6302 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6303 rhs = false_value;
6304 }
6305 else
6306 {
6307 tree testtype = TREE_TYPE (cond);
6308 test = cond;
6309 true_value = constant_boolean_node (true, testtype);
6310 false_value = constant_boolean_node (false, testtype);
6311 }
6312
6313 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6314 cond_code = VEC_COND_EXPR;
6315
6316 /* This transformation is only worthwhile if we don't have to wrap ARG
6317 in a SAVE_EXPR and the operation can be simplified without recursing
6318 on at least one of the branches once its pushed inside the COND_EXPR. */
6319 if (!TREE_CONSTANT (arg)
6320 && (TREE_SIDE_EFFECTS (arg)
6321 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6322 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6323 return NULL_TREE;
6324
6325 arg = fold_convert_loc (loc, arg_type, arg);
6326 if (lhs == 0)
6327 {
6328 true_value = fold_convert_loc (loc, cond_type, true_value);
6329 if (cond_first_p)
6330 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6331 else
6332 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6333 }
6334 if (rhs == 0)
6335 {
6336 false_value = fold_convert_loc (loc, cond_type, false_value);
6337 if (cond_first_p)
6338 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6339 else
6340 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6341 }
6342
6343 /* Check that we have simplified at least one of the branches. */
6344 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6345 return NULL_TREE;
6346
6347 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6348 }
6349
6350 \f
6351 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6352
6353 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6354 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6355 ADDEND is the same as X.
6356
6357 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6358 and finite. The problematic cases are when X is zero, and its mode
6359 has signed zeros. In the case of rounding towards -infinity,
6360 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6361 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6362
6363 bool
6364 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6365 {
6366 if (!real_zerop (addend))
6367 return false;
6368
6369 /* Don't allow the fold with -fsignaling-nans. */
6370 if (HONOR_SNANS (element_mode (type)))
6371 return false;
6372
6373 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6374 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6375 return true;
6376
6377 /* In a vector or complex, we would need to check the sign of all zeros. */
6378 if (TREE_CODE (addend) != REAL_CST)
6379 return false;
6380
6381 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6382 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6383 negate = !negate;
6384
6385 /* The mode has signed zeros, and we have to honor their sign.
6386 In this situation, there is only one case we can return true for.
6387 X - 0 is the same as X unless rounding towards -infinity is
6388 supported. */
6389 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6390 }
6391
6392 /* Subroutine of fold() that checks comparisons of built-in math
6393 functions against real constants.
6394
6395 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6396 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6397 is the type of the result and ARG0 and ARG1 are the operands of the
6398 comparison. ARG1 must be a TREE_REAL_CST.
6399
6400 The function returns the constant folded tree if a simplification
6401 can be made, and NULL_TREE otherwise. */
6402
6403 static tree
6404 fold_mathfn_compare (location_t loc,
6405 enum built_in_function fcode, enum tree_code code,
6406 tree type, tree arg0, tree arg1)
6407 {
6408 REAL_VALUE_TYPE c;
6409
6410 if (BUILTIN_SQRT_P (fcode))
6411 {
6412 tree arg = CALL_EXPR_ARG (arg0, 0);
6413 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6414
6415 c = TREE_REAL_CST (arg1);
6416 if (REAL_VALUE_NEGATIVE (c))
6417 {
6418 /* sqrt(x) < y is always false, if y is negative. */
6419 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6420 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6421
6422 /* sqrt(x) > y is always true, if y is negative and we
6423 don't care about NaNs, i.e. negative values of x. */
6424 if (code == NE_EXPR || !HONOR_NANS (mode))
6425 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6426
6427 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6428 return fold_build2_loc (loc, GE_EXPR, type, arg,
6429 build_real (TREE_TYPE (arg), dconst0));
6430 }
6431 else if (code == GT_EXPR || code == GE_EXPR)
6432 {
6433 REAL_VALUE_TYPE c2;
6434
6435 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6436 real_convert (&c2, mode, &c2);
6437
6438 if (REAL_VALUE_ISINF (c2))
6439 {
6440 /* sqrt(x) > y is x == +Inf, when y is very large. */
6441 if (HONOR_INFINITIES (mode))
6442 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6443 build_real (TREE_TYPE (arg), c2));
6444
6445 /* sqrt(x) > y is always false, when y is very large
6446 and we don't care about infinities. */
6447 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6448 }
6449
6450 /* sqrt(x) > c is the same as x > c*c. */
6451 return fold_build2_loc (loc, code, type, arg,
6452 build_real (TREE_TYPE (arg), c2));
6453 }
6454 else if (code == LT_EXPR || code == LE_EXPR)
6455 {
6456 REAL_VALUE_TYPE c2;
6457
6458 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6459 real_convert (&c2, mode, &c2);
6460
6461 if (REAL_VALUE_ISINF (c2))
6462 {
6463 /* sqrt(x) < y is always true, when y is a very large
6464 value and we don't care about NaNs or Infinities. */
6465 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6466 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6467
6468 /* sqrt(x) < y is x != +Inf when y is very large and we
6469 don't care about NaNs. */
6470 if (! HONOR_NANS (mode))
6471 return fold_build2_loc (loc, NE_EXPR, type, arg,
6472 build_real (TREE_TYPE (arg), c2));
6473
6474 /* sqrt(x) < y is x >= 0 when y is very large and we
6475 don't care about Infinities. */
6476 if (! HONOR_INFINITIES (mode))
6477 return fold_build2_loc (loc, GE_EXPR, type, arg,
6478 build_real (TREE_TYPE (arg), dconst0));
6479
6480 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6481 arg = save_expr (arg);
6482 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6483 fold_build2_loc (loc, GE_EXPR, type, arg,
6484 build_real (TREE_TYPE (arg),
6485 dconst0)),
6486 fold_build2_loc (loc, NE_EXPR, type, arg,
6487 build_real (TREE_TYPE (arg),
6488 c2)));
6489 }
6490
6491 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6492 if (! HONOR_NANS (mode))
6493 return fold_build2_loc (loc, code, type, arg,
6494 build_real (TREE_TYPE (arg), c2));
6495
6496 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6497 arg = save_expr (arg);
6498 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6499 fold_build2_loc (loc, GE_EXPR, type, arg,
6500 build_real (TREE_TYPE (arg),
6501 dconst0)),
6502 fold_build2_loc (loc, code, type, arg,
6503 build_real (TREE_TYPE (arg),
6504 c2)));
6505 }
6506 }
6507
6508 return NULL_TREE;
6509 }
6510
6511 /* Subroutine of fold() that optimizes comparisons against Infinities,
6512 either +Inf or -Inf.
6513
6514 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6515 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6516 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6517
6518 The function returns the constant folded tree if a simplification
6519 can be made, and NULL_TREE otherwise. */
6520
6521 static tree
6522 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6523 tree arg0, tree arg1)
6524 {
6525 machine_mode mode;
6526 REAL_VALUE_TYPE max;
6527 tree temp;
6528 bool neg;
6529
6530 mode = TYPE_MODE (TREE_TYPE (arg0));
6531
6532 /* For negative infinity swap the sense of the comparison. */
6533 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6534 if (neg)
6535 code = swap_tree_comparison (code);
6536
6537 switch (code)
6538 {
6539 case GT_EXPR:
6540 /* x > +Inf is always false, if with ignore sNANs. */
6541 if (HONOR_SNANS (mode))
6542 return NULL_TREE;
6543 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6544
6545 case LE_EXPR:
6546 /* x <= +Inf is always true, if we don't case about NaNs. */
6547 if (! HONOR_NANS (mode))
6548 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6549
6550 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6551 arg0 = save_expr (arg0);
6552 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6553
6554 case EQ_EXPR:
6555 case GE_EXPR:
6556 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6557 real_maxval (&max, neg, mode);
6558 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6559 arg0, build_real (TREE_TYPE (arg0), max));
6560
6561 case LT_EXPR:
6562 /* x < +Inf is always equal to x <= DBL_MAX. */
6563 real_maxval (&max, neg, mode);
6564 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6565 arg0, build_real (TREE_TYPE (arg0), max));
6566
6567 case NE_EXPR:
6568 /* x != +Inf is always equal to !(x > DBL_MAX). */
6569 real_maxval (&max, neg, mode);
6570 if (! HONOR_NANS (mode))
6571 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6572 arg0, build_real (TREE_TYPE (arg0), max));
6573
6574 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6575 arg0, build_real (TREE_TYPE (arg0), max));
6576 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6577
6578 default:
6579 break;
6580 }
6581
6582 return NULL_TREE;
6583 }
6584
6585 /* Subroutine of fold() that optimizes comparisons of a division by
6586 a nonzero integer constant against an integer constant, i.e.
6587 X/C1 op C2.
6588
6589 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6590 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6591 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6592
6593 The function returns the constant folded tree if a simplification
6594 can be made, and NULL_TREE otherwise. */
6595
6596 static tree
6597 fold_div_compare (location_t loc,
6598 enum tree_code code, tree type, tree arg0, tree arg1)
6599 {
6600 tree prod, tmp, hi, lo;
6601 tree arg00 = TREE_OPERAND (arg0, 0);
6602 tree arg01 = TREE_OPERAND (arg0, 1);
6603 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6604 bool neg_overflow = false;
6605 bool overflow;
6606
6607 /* We have to do this the hard way to detect unsigned overflow.
6608 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6609 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6610 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6611 neg_overflow = false;
6612
6613 if (sign == UNSIGNED)
6614 {
6615 tmp = int_const_binop (MINUS_EXPR, arg01,
6616 build_int_cst (TREE_TYPE (arg01), 1));
6617 lo = prod;
6618
6619 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6620 val = wi::add (prod, tmp, sign, &overflow);
6621 hi = force_fit_type (TREE_TYPE (arg00), val,
6622 -1, overflow | TREE_OVERFLOW (prod));
6623 }
6624 else if (tree_int_cst_sgn (arg01) >= 0)
6625 {
6626 tmp = int_const_binop (MINUS_EXPR, arg01,
6627 build_int_cst (TREE_TYPE (arg01), 1));
6628 switch (tree_int_cst_sgn (arg1))
6629 {
6630 case -1:
6631 neg_overflow = true;
6632 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6633 hi = prod;
6634 break;
6635
6636 case 0:
6637 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6638 hi = tmp;
6639 break;
6640
6641 case 1:
6642 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6643 lo = prod;
6644 break;
6645
6646 default:
6647 gcc_unreachable ();
6648 }
6649 }
6650 else
6651 {
6652 /* A negative divisor reverses the relational operators. */
6653 code = swap_tree_comparison (code);
6654
6655 tmp = int_const_binop (PLUS_EXPR, arg01,
6656 build_int_cst (TREE_TYPE (arg01), 1));
6657 switch (tree_int_cst_sgn (arg1))
6658 {
6659 case -1:
6660 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6661 lo = prod;
6662 break;
6663
6664 case 0:
6665 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6666 lo = tmp;
6667 break;
6668
6669 case 1:
6670 neg_overflow = true;
6671 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6672 hi = prod;
6673 break;
6674
6675 default:
6676 gcc_unreachable ();
6677 }
6678 }
6679
6680 switch (code)
6681 {
6682 case EQ_EXPR:
6683 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6684 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6685 if (TREE_OVERFLOW (hi))
6686 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6687 if (TREE_OVERFLOW (lo))
6688 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6689 return build_range_check (loc, type, arg00, 1, lo, hi);
6690
6691 case NE_EXPR:
6692 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6693 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6694 if (TREE_OVERFLOW (hi))
6695 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6696 if (TREE_OVERFLOW (lo))
6697 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6698 return build_range_check (loc, type, arg00, 0, lo, hi);
6699
6700 case LT_EXPR:
6701 if (TREE_OVERFLOW (lo))
6702 {
6703 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6704 return omit_one_operand_loc (loc, type, tmp, arg00);
6705 }
6706 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6707
6708 case LE_EXPR:
6709 if (TREE_OVERFLOW (hi))
6710 {
6711 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6712 return omit_one_operand_loc (loc, type, tmp, arg00);
6713 }
6714 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6715
6716 case GT_EXPR:
6717 if (TREE_OVERFLOW (hi))
6718 {
6719 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6720 return omit_one_operand_loc (loc, type, tmp, arg00);
6721 }
6722 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6723
6724 case GE_EXPR:
6725 if (TREE_OVERFLOW (lo))
6726 {
6727 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6728 return omit_one_operand_loc (loc, type, tmp, arg00);
6729 }
6730 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6731
6732 default:
6733 break;
6734 }
6735
6736 return NULL_TREE;
6737 }
6738
6739
6740 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6741 equality/inequality test, then return a simplified form of the test
6742 using a sign testing. Otherwise return NULL. TYPE is the desired
6743 result type. */
6744
6745 static tree
6746 fold_single_bit_test_into_sign_test (location_t loc,
6747 enum tree_code code, tree arg0, tree arg1,
6748 tree result_type)
6749 {
6750 /* If this is testing a single bit, we can optimize the test. */
6751 if ((code == NE_EXPR || code == EQ_EXPR)
6752 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6753 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6754 {
6755 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6756 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6757 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6758
6759 if (arg00 != NULL_TREE
6760 /* This is only a win if casting to a signed type is cheap,
6761 i.e. when arg00's type is not a partial mode. */
6762 && TYPE_PRECISION (TREE_TYPE (arg00))
6763 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6764 {
6765 tree stype = signed_type_for (TREE_TYPE (arg00));
6766 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6767 result_type,
6768 fold_convert_loc (loc, stype, arg00),
6769 build_int_cst (stype, 0));
6770 }
6771 }
6772
6773 return NULL_TREE;
6774 }
6775
6776 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6777 equality/inequality test, then return a simplified form of
6778 the test using shifts and logical operations. Otherwise return
6779 NULL. TYPE is the desired result type. */
6780
6781 tree
6782 fold_single_bit_test (location_t loc, enum tree_code code,
6783 tree arg0, tree arg1, tree result_type)
6784 {
6785 /* If this is testing a single bit, we can optimize the test. */
6786 if ((code == NE_EXPR || code == EQ_EXPR)
6787 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6788 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6789 {
6790 tree inner = TREE_OPERAND (arg0, 0);
6791 tree type = TREE_TYPE (arg0);
6792 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6793 machine_mode operand_mode = TYPE_MODE (type);
6794 int ops_unsigned;
6795 tree signed_type, unsigned_type, intermediate_type;
6796 tree tem, one;
6797
6798 /* First, see if we can fold the single bit test into a sign-bit
6799 test. */
6800 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6801 result_type);
6802 if (tem)
6803 return tem;
6804
6805 /* Otherwise we have (A & C) != 0 where C is a single bit,
6806 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6807 Similarly for (A & C) == 0. */
6808
6809 /* If INNER is a right shift of a constant and it plus BITNUM does
6810 not overflow, adjust BITNUM and INNER. */
6811 if (TREE_CODE (inner) == RSHIFT_EXPR
6812 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6813 && bitnum < TYPE_PRECISION (type)
6814 && wi::ltu_p (TREE_OPERAND (inner, 1),
6815 TYPE_PRECISION (type) - bitnum))
6816 {
6817 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6818 inner = TREE_OPERAND (inner, 0);
6819 }
6820
6821 /* If we are going to be able to omit the AND below, we must do our
6822 operations as unsigned. If we must use the AND, we have a choice.
6823 Normally unsigned is faster, but for some machines signed is. */
6824 #ifdef LOAD_EXTEND_OP
6825 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6826 && !flag_syntax_only) ? 0 : 1;
6827 #else
6828 ops_unsigned = 1;
6829 #endif
6830
6831 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6832 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6833 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6834 inner = fold_convert_loc (loc, intermediate_type, inner);
6835
6836 if (bitnum != 0)
6837 inner = build2 (RSHIFT_EXPR, intermediate_type,
6838 inner, size_int (bitnum));
6839
6840 one = build_int_cst (intermediate_type, 1);
6841
6842 if (code == EQ_EXPR)
6843 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6844
6845 /* Put the AND last so it can combine with more things. */
6846 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6847
6848 /* Make sure to return the proper type. */
6849 inner = fold_convert_loc (loc, result_type, inner);
6850
6851 return inner;
6852 }
6853 return NULL_TREE;
6854 }
6855
6856 /* Check whether we are allowed to reorder operands arg0 and arg1,
6857 such that the evaluation of arg1 occurs before arg0. */
6858
6859 static bool
6860 reorder_operands_p (const_tree arg0, const_tree arg1)
6861 {
6862 if (! flag_evaluation_order)
6863 return true;
6864 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6865 return true;
6866 return ! TREE_SIDE_EFFECTS (arg0)
6867 && ! TREE_SIDE_EFFECTS (arg1);
6868 }
6869
6870 /* Test whether it is preferable two swap two operands, ARG0 and
6871 ARG1, for example because ARG0 is an integer constant and ARG1
6872 isn't. If REORDER is true, only recommend swapping if we can
6873 evaluate the operands in reverse order. */
6874
6875 bool
6876 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6877 {
6878 if (CONSTANT_CLASS_P (arg1))
6879 return 0;
6880 if (CONSTANT_CLASS_P (arg0))
6881 return 1;
6882
6883 STRIP_NOPS (arg0);
6884 STRIP_NOPS (arg1);
6885
6886 if (TREE_CONSTANT (arg1))
6887 return 0;
6888 if (TREE_CONSTANT (arg0))
6889 return 1;
6890
6891 if (reorder && flag_evaluation_order
6892 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6893 return 0;
6894
6895 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6896 for commutative and comparison operators. Ensuring a canonical
6897 form allows the optimizers to find additional redundancies without
6898 having to explicitly check for both orderings. */
6899 if (TREE_CODE (arg0) == SSA_NAME
6900 && TREE_CODE (arg1) == SSA_NAME
6901 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6902 return 1;
6903
6904 /* Put SSA_NAMEs last. */
6905 if (TREE_CODE (arg1) == SSA_NAME)
6906 return 0;
6907 if (TREE_CODE (arg0) == SSA_NAME)
6908 return 1;
6909
6910 /* Put variables last. */
6911 if (DECL_P (arg1))
6912 return 0;
6913 if (DECL_P (arg0))
6914 return 1;
6915
6916 return 0;
6917 }
6918
6919 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6920 ARG0 is extended to a wider type. */
6921
6922 static tree
6923 fold_widened_comparison (location_t loc, enum tree_code code,
6924 tree type, tree arg0, tree arg1)
6925 {
6926 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6927 tree arg1_unw;
6928 tree shorter_type, outer_type;
6929 tree min, max;
6930 bool above, below;
6931
6932 if (arg0_unw == arg0)
6933 return NULL_TREE;
6934 shorter_type = TREE_TYPE (arg0_unw);
6935
6936 #ifdef HAVE_canonicalize_funcptr_for_compare
6937 /* Disable this optimization if we're casting a function pointer
6938 type on targets that require function pointer canonicalization. */
6939 if (HAVE_canonicalize_funcptr_for_compare
6940 && TREE_CODE (shorter_type) == POINTER_TYPE
6941 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6942 return NULL_TREE;
6943 #endif
6944
6945 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6946 return NULL_TREE;
6947
6948 arg1_unw = get_unwidened (arg1, NULL_TREE);
6949
6950 /* If possible, express the comparison in the shorter mode. */
6951 if ((code == EQ_EXPR || code == NE_EXPR
6952 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6953 && (TREE_TYPE (arg1_unw) == shorter_type
6954 || ((TYPE_PRECISION (shorter_type)
6955 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6956 && (TYPE_UNSIGNED (shorter_type)
6957 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6958 || (TREE_CODE (arg1_unw) == INTEGER_CST
6959 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6960 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6961 && int_fits_type_p (arg1_unw, shorter_type))))
6962 return fold_build2_loc (loc, code, type, arg0_unw,
6963 fold_convert_loc (loc, shorter_type, arg1_unw));
6964
6965 if (TREE_CODE (arg1_unw) != INTEGER_CST
6966 || TREE_CODE (shorter_type) != INTEGER_TYPE
6967 || !int_fits_type_p (arg1_unw, shorter_type))
6968 return NULL_TREE;
6969
6970 /* If we are comparing with the integer that does not fit into the range
6971 of the shorter type, the result is known. */
6972 outer_type = TREE_TYPE (arg1_unw);
6973 min = lower_bound_in_type (outer_type, shorter_type);
6974 max = upper_bound_in_type (outer_type, shorter_type);
6975
6976 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6977 max, arg1_unw));
6978 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6979 arg1_unw, min));
6980
6981 switch (code)
6982 {
6983 case EQ_EXPR:
6984 if (above || below)
6985 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6986 break;
6987
6988 case NE_EXPR:
6989 if (above || below)
6990 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6991 break;
6992
6993 case LT_EXPR:
6994 case LE_EXPR:
6995 if (above)
6996 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6997 else if (below)
6998 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6999
7000 case GT_EXPR:
7001 case GE_EXPR:
7002 if (above)
7003 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7004 else if (below)
7005 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7006
7007 default:
7008 break;
7009 }
7010
7011 return NULL_TREE;
7012 }
7013
7014 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7015 ARG0 just the signedness is changed. */
7016
7017 static tree
7018 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7019 tree arg0, tree arg1)
7020 {
7021 tree arg0_inner;
7022 tree inner_type, outer_type;
7023
7024 if (!CONVERT_EXPR_P (arg0))
7025 return NULL_TREE;
7026
7027 outer_type = TREE_TYPE (arg0);
7028 arg0_inner = TREE_OPERAND (arg0, 0);
7029 inner_type = TREE_TYPE (arg0_inner);
7030
7031 #ifdef HAVE_canonicalize_funcptr_for_compare
7032 /* Disable this optimization if we're casting a function pointer
7033 type on targets that require function pointer canonicalization. */
7034 if (HAVE_canonicalize_funcptr_for_compare
7035 && TREE_CODE (inner_type) == POINTER_TYPE
7036 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7037 return NULL_TREE;
7038 #endif
7039
7040 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7041 return NULL_TREE;
7042
7043 if (TREE_CODE (arg1) != INTEGER_CST
7044 && !(CONVERT_EXPR_P (arg1)
7045 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7046 return NULL_TREE;
7047
7048 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7049 && code != NE_EXPR
7050 && code != EQ_EXPR)
7051 return NULL_TREE;
7052
7053 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7054 return NULL_TREE;
7055
7056 if (TREE_CODE (arg1) == INTEGER_CST)
7057 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7058 TREE_OVERFLOW (arg1));
7059 else
7060 arg1 = fold_convert_loc (loc, inner_type, arg1);
7061
7062 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7063 }
7064
7065
7066 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7067 means A >= Y && A != MAX, but in this case we know that
7068 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7069
7070 static tree
7071 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7072 {
7073 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7074
7075 if (TREE_CODE (bound) == LT_EXPR)
7076 a = TREE_OPERAND (bound, 0);
7077 else if (TREE_CODE (bound) == GT_EXPR)
7078 a = TREE_OPERAND (bound, 1);
7079 else
7080 return NULL_TREE;
7081
7082 typea = TREE_TYPE (a);
7083 if (!INTEGRAL_TYPE_P (typea)
7084 && !POINTER_TYPE_P (typea))
7085 return NULL_TREE;
7086
7087 if (TREE_CODE (ineq) == LT_EXPR)
7088 {
7089 a1 = TREE_OPERAND (ineq, 1);
7090 y = TREE_OPERAND (ineq, 0);
7091 }
7092 else if (TREE_CODE (ineq) == GT_EXPR)
7093 {
7094 a1 = TREE_OPERAND (ineq, 0);
7095 y = TREE_OPERAND (ineq, 1);
7096 }
7097 else
7098 return NULL_TREE;
7099
7100 if (TREE_TYPE (a1) != typea)
7101 return NULL_TREE;
7102
7103 if (POINTER_TYPE_P (typea))
7104 {
7105 /* Convert the pointer types into integer before taking the difference. */
7106 tree ta = fold_convert_loc (loc, ssizetype, a);
7107 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7108 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7109 }
7110 else
7111 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7112
7113 if (!diff || !integer_onep (diff))
7114 return NULL_TREE;
7115
7116 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7117 }
7118
7119 /* Fold a sum or difference of at least one multiplication.
7120 Returns the folded tree or NULL if no simplification could be made. */
7121
7122 static tree
7123 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7124 tree arg0, tree arg1)
7125 {
7126 tree arg00, arg01, arg10, arg11;
7127 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7128
7129 /* (A * C) +- (B * C) -> (A+-B) * C.
7130 (A * C) +- A -> A * (C+-1).
7131 We are most concerned about the case where C is a constant,
7132 but other combinations show up during loop reduction. Since
7133 it is not difficult, try all four possibilities. */
7134
7135 if (TREE_CODE (arg0) == MULT_EXPR)
7136 {
7137 arg00 = TREE_OPERAND (arg0, 0);
7138 arg01 = TREE_OPERAND (arg0, 1);
7139 }
7140 else if (TREE_CODE (arg0) == INTEGER_CST)
7141 {
7142 arg00 = build_one_cst (type);
7143 arg01 = arg0;
7144 }
7145 else
7146 {
7147 /* We cannot generate constant 1 for fract. */
7148 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7149 return NULL_TREE;
7150 arg00 = arg0;
7151 arg01 = build_one_cst (type);
7152 }
7153 if (TREE_CODE (arg1) == MULT_EXPR)
7154 {
7155 arg10 = TREE_OPERAND (arg1, 0);
7156 arg11 = TREE_OPERAND (arg1, 1);
7157 }
7158 else if (TREE_CODE (arg1) == INTEGER_CST)
7159 {
7160 arg10 = build_one_cst (type);
7161 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7162 the purpose of this canonicalization. */
7163 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7164 && negate_expr_p (arg1)
7165 && code == PLUS_EXPR)
7166 {
7167 arg11 = negate_expr (arg1);
7168 code = MINUS_EXPR;
7169 }
7170 else
7171 arg11 = arg1;
7172 }
7173 else
7174 {
7175 /* We cannot generate constant 1 for fract. */
7176 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7177 return NULL_TREE;
7178 arg10 = arg1;
7179 arg11 = build_one_cst (type);
7180 }
7181 same = NULL_TREE;
7182
7183 if (operand_equal_p (arg01, arg11, 0))
7184 same = arg01, alt0 = arg00, alt1 = arg10;
7185 else if (operand_equal_p (arg00, arg10, 0))
7186 same = arg00, alt0 = arg01, alt1 = arg11;
7187 else if (operand_equal_p (arg00, arg11, 0))
7188 same = arg00, alt0 = arg01, alt1 = arg10;
7189 else if (operand_equal_p (arg01, arg10, 0))
7190 same = arg01, alt0 = arg00, alt1 = arg11;
7191
7192 /* No identical multiplicands; see if we can find a common
7193 power-of-two factor in non-power-of-two multiplies. This
7194 can help in multi-dimensional array access. */
7195 else if (tree_fits_shwi_p (arg01)
7196 && tree_fits_shwi_p (arg11))
7197 {
7198 HOST_WIDE_INT int01, int11, tmp;
7199 bool swap = false;
7200 tree maybe_same;
7201 int01 = tree_to_shwi (arg01);
7202 int11 = tree_to_shwi (arg11);
7203
7204 /* Move min of absolute values to int11. */
7205 if (absu_hwi (int01) < absu_hwi (int11))
7206 {
7207 tmp = int01, int01 = int11, int11 = tmp;
7208 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7209 maybe_same = arg01;
7210 swap = true;
7211 }
7212 else
7213 maybe_same = arg11;
7214
7215 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7216 /* The remainder should not be a constant, otherwise we
7217 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7218 increased the number of multiplications necessary. */
7219 && TREE_CODE (arg10) != INTEGER_CST)
7220 {
7221 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7222 build_int_cst (TREE_TYPE (arg00),
7223 int01 / int11));
7224 alt1 = arg10;
7225 same = maybe_same;
7226 if (swap)
7227 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7228 }
7229 }
7230
7231 if (same)
7232 return fold_build2_loc (loc, MULT_EXPR, type,
7233 fold_build2_loc (loc, code, type,
7234 fold_convert_loc (loc, type, alt0),
7235 fold_convert_loc (loc, type, alt1)),
7236 fold_convert_loc (loc, type, same));
7237
7238 return NULL_TREE;
7239 }
7240
7241 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7244 upon failure. */
7245
7246 static int
7247 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7248 {
7249 tree type = TREE_TYPE (expr);
7250 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7251 int byte, offset, word, words;
7252 unsigned char value;
7253
7254 if ((off == -1 && total_bytes > len)
7255 || off >= total_bytes)
7256 return 0;
7257 if (off == -1)
7258 off = 0;
7259 words = total_bytes / UNITS_PER_WORD;
7260
7261 for (byte = 0; byte < total_bytes; byte++)
7262 {
7263 int bitpos = byte * BITS_PER_UNIT;
7264 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7265 number of bytes. */
7266 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7267
7268 if (total_bytes > UNITS_PER_WORD)
7269 {
7270 word = byte / UNITS_PER_WORD;
7271 if (WORDS_BIG_ENDIAN)
7272 word = (words - 1) - word;
7273 offset = word * UNITS_PER_WORD;
7274 if (BYTES_BIG_ENDIAN)
7275 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7276 else
7277 offset += byte % UNITS_PER_WORD;
7278 }
7279 else
7280 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7281 if (offset >= off
7282 && offset - off < len)
7283 ptr[offset - off] = value;
7284 }
7285 return MIN (len, total_bytes - off);
7286 }
7287
7288
7289 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7290 specified by EXPR into the buffer PTR of length LEN bytes.
7291 Return the number of bytes placed in the buffer, or zero
7292 upon failure. */
7293
7294 static int
7295 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7296 {
7297 tree type = TREE_TYPE (expr);
7298 machine_mode mode = TYPE_MODE (type);
7299 int total_bytes = GET_MODE_SIZE (mode);
7300 FIXED_VALUE_TYPE value;
7301 tree i_value, i_type;
7302
7303 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7304 return 0;
7305
7306 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7307
7308 if (NULL_TREE == i_type
7309 || TYPE_PRECISION (i_type) != total_bytes)
7310 return 0;
7311
7312 value = TREE_FIXED_CST (expr);
7313 i_value = double_int_to_tree (i_type, value.data);
7314
7315 return native_encode_int (i_value, ptr, len, off);
7316 }
7317
7318
7319 /* Subroutine of native_encode_expr. Encode the REAL_CST
7320 specified by EXPR into the buffer PTR of length LEN bytes.
7321 Return the number of bytes placed in the buffer, or zero
7322 upon failure. */
7323
7324 static int
7325 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7326 {
7327 tree type = TREE_TYPE (expr);
7328 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7329 int byte, offset, word, words, bitpos;
7330 unsigned char value;
7331
7332 /* There are always 32 bits in each long, no matter the size of
7333 the hosts long. We handle floating point representations with
7334 up to 192 bits. */
7335 long tmp[6];
7336
7337 if ((off == -1 && total_bytes > len)
7338 || off >= total_bytes)
7339 return 0;
7340 if (off == -1)
7341 off = 0;
7342 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7343
7344 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7345
7346 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347 bitpos += BITS_PER_UNIT)
7348 {
7349 byte = (bitpos / BITS_PER_UNIT) & 3;
7350 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7351
7352 if (UNITS_PER_WORD < 4)
7353 {
7354 word = byte / UNITS_PER_WORD;
7355 if (WORDS_BIG_ENDIAN)
7356 word = (words - 1) - word;
7357 offset = word * UNITS_PER_WORD;
7358 if (BYTES_BIG_ENDIAN)
7359 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7360 else
7361 offset += byte % UNITS_PER_WORD;
7362 }
7363 else
7364 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7365 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7366 if (offset >= off
7367 && offset - off < len)
7368 ptr[offset - off] = value;
7369 }
7370 return MIN (len, total_bytes - off);
7371 }
7372
7373 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7374 specified by EXPR into the buffer PTR of length LEN bytes.
7375 Return the number of bytes placed in the buffer, or zero
7376 upon failure. */
7377
7378 static int
7379 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7380 {
7381 int rsize, isize;
7382 tree part;
7383
7384 part = TREE_REALPART (expr);
7385 rsize = native_encode_expr (part, ptr, len, off);
7386 if (off == -1
7387 && rsize == 0)
7388 return 0;
7389 part = TREE_IMAGPART (expr);
7390 if (off != -1)
7391 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7392 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7393 if (off == -1
7394 && isize != rsize)
7395 return 0;
7396 return rsize + isize;
7397 }
7398
7399
7400 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7401 specified by EXPR into the buffer PTR of length LEN bytes.
7402 Return the number of bytes placed in the buffer, or zero
7403 upon failure. */
7404
7405 static int
7406 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7407 {
7408 unsigned i, count;
7409 int size, offset;
7410 tree itype, elem;
7411
7412 offset = 0;
7413 count = VECTOR_CST_NELTS (expr);
7414 itype = TREE_TYPE (TREE_TYPE (expr));
7415 size = GET_MODE_SIZE (TYPE_MODE (itype));
7416 for (i = 0; i < count; i++)
7417 {
7418 if (off >= size)
7419 {
7420 off -= size;
7421 continue;
7422 }
7423 elem = VECTOR_CST_ELT (expr, i);
7424 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7425 if ((off == -1 && res != size)
7426 || res == 0)
7427 return 0;
7428 offset += res;
7429 if (offset >= len)
7430 return offset;
7431 if (off != -1)
7432 off = 0;
7433 }
7434 return offset;
7435 }
7436
7437
7438 /* Subroutine of native_encode_expr. Encode the STRING_CST
7439 specified by EXPR into the buffer PTR of length LEN bytes.
7440 Return the number of bytes placed in the buffer, or zero
7441 upon failure. */
7442
7443 static int
7444 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7445 {
7446 tree type = TREE_TYPE (expr);
7447 HOST_WIDE_INT total_bytes;
7448
7449 if (TREE_CODE (type) != ARRAY_TYPE
7450 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7451 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7452 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7453 return 0;
7454 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7455 if ((off == -1 && total_bytes > len)
7456 || off >= total_bytes)
7457 return 0;
7458 if (off == -1)
7459 off = 0;
7460 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7461 {
7462 int written = 0;
7463 if (off < TREE_STRING_LENGTH (expr))
7464 {
7465 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7466 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7467 }
7468 memset (ptr + written, 0,
7469 MIN (total_bytes - written, len - written));
7470 }
7471 else
7472 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7473 return MIN (total_bytes - off, len);
7474 }
7475
7476
7477 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7478 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7479 buffer PTR of length LEN bytes. If OFF is not -1 then start
7480 the encoding at byte offset OFF and encode at most LEN bytes.
7481 Return the number of bytes placed in the buffer, or zero upon failure. */
7482
7483 int
7484 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7485 {
7486 switch (TREE_CODE (expr))
7487 {
7488 case INTEGER_CST:
7489 return native_encode_int (expr, ptr, len, off);
7490
7491 case REAL_CST:
7492 return native_encode_real (expr, ptr, len, off);
7493
7494 case FIXED_CST:
7495 return native_encode_fixed (expr, ptr, len, off);
7496
7497 case COMPLEX_CST:
7498 return native_encode_complex (expr, ptr, len, off);
7499
7500 case VECTOR_CST:
7501 return native_encode_vector (expr, ptr, len, off);
7502
7503 case STRING_CST:
7504 return native_encode_string (expr, ptr, len, off);
7505
7506 default:
7507 return 0;
7508 }
7509 }
7510
7511
7512 /* Subroutine of native_interpret_expr. Interpret the contents of
7513 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7514 If the buffer cannot be interpreted, return NULL_TREE. */
7515
7516 static tree
7517 native_interpret_int (tree type, const unsigned char *ptr, int len)
7518 {
7519 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7520
7521 if (total_bytes > len
7522 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7523 return NULL_TREE;
7524
7525 wide_int result = wi::from_buffer (ptr, total_bytes);
7526
7527 return wide_int_to_tree (type, result);
7528 }
7529
7530
7531 /* Subroutine of native_interpret_expr. Interpret the contents of
7532 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7533 If the buffer cannot be interpreted, return NULL_TREE. */
7534
7535 static tree
7536 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7537 {
7538 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7539 double_int result;
7540 FIXED_VALUE_TYPE fixed_value;
7541
7542 if (total_bytes > len
7543 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7544 return NULL_TREE;
7545
7546 result = double_int::from_buffer (ptr, total_bytes);
7547 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7548
7549 return build_fixed (type, fixed_value);
7550 }
7551
7552
7553 /* Subroutine of native_interpret_expr. Interpret the contents of
7554 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7555 If the buffer cannot be interpreted, return NULL_TREE. */
7556
7557 static tree
7558 native_interpret_real (tree type, const unsigned char *ptr, int len)
7559 {
7560 machine_mode mode = TYPE_MODE (type);
7561 int total_bytes = GET_MODE_SIZE (mode);
7562 int byte, offset, word, words, bitpos;
7563 unsigned char value;
7564 /* There are always 32 bits in each long, no matter the size of
7565 the hosts long. We handle floating point representations with
7566 up to 192 bits. */
7567 REAL_VALUE_TYPE r;
7568 long tmp[6];
7569
7570 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7571 if (total_bytes > len || total_bytes > 24)
7572 return NULL_TREE;
7573 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7574
7575 memset (tmp, 0, sizeof (tmp));
7576 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7577 bitpos += BITS_PER_UNIT)
7578 {
7579 byte = (bitpos / BITS_PER_UNIT) & 3;
7580 if (UNITS_PER_WORD < 4)
7581 {
7582 word = byte / UNITS_PER_WORD;
7583 if (WORDS_BIG_ENDIAN)
7584 word = (words - 1) - word;
7585 offset = word * UNITS_PER_WORD;
7586 if (BYTES_BIG_ENDIAN)
7587 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7588 else
7589 offset += byte % UNITS_PER_WORD;
7590 }
7591 else
7592 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7593 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7594
7595 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7596 }
7597
7598 real_from_target (&r, tmp, mode);
7599 return build_real (type, r);
7600 }
7601
7602
7603 /* Subroutine of native_interpret_expr. Interpret the contents of
7604 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7605 If the buffer cannot be interpreted, return NULL_TREE. */
7606
7607 static tree
7608 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7609 {
7610 tree etype, rpart, ipart;
7611 int size;
7612
7613 etype = TREE_TYPE (type);
7614 size = GET_MODE_SIZE (TYPE_MODE (etype));
7615 if (size * 2 > len)
7616 return NULL_TREE;
7617 rpart = native_interpret_expr (etype, ptr, size);
7618 if (!rpart)
7619 return NULL_TREE;
7620 ipart = native_interpret_expr (etype, ptr+size, size);
7621 if (!ipart)
7622 return NULL_TREE;
7623 return build_complex (type, rpart, ipart);
7624 }
7625
7626
7627 /* Subroutine of native_interpret_expr. Interpret the contents of
7628 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7629 If the buffer cannot be interpreted, return NULL_TREE. */
7630
7631 static tree
7632 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7633 {
7634 tree etype, elem;
7635 int i, size, count;
7636 tree *elements;
7637
7638 etype = TREE_TYPE (type);
7639 size = GET_MODE_SIZE (TYPE_MODE (etype));
7640 count = TYPE_VECTOR_SUBPARTS (type);
7641 if (size * count > len)
7642 return NULL_TREE;
7643
7644 elements = XALLOCAVEC (tree, count);
7645 for (i = count - 1; i >= 0; i--)
7646 {
7647 elem = native_interpret_expr (etype, ptr+(i*size), size);
7648 if (!elem)
7649 return NULL_TREE;
7650 elements[i] = elem;
7651 }
7652 return build_vector (type, elements);
7653 }
7654
7655
7656 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7657 the buffer PTR of length LEN as a constant of type TYPE. For
7658 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7659 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7660 return NULL_TREE. */
7661
7662 tree
7663 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7664 {
7665 switch (TREE_CODE (type))
7666 {
7667 case INTEGER_TYPE:
7668 case ENUMERAL_TYPE:
7669 case BOOLEAN_TYPE:
7670 case POINTER_TYPE:
7671 case REFERENCE_TYPE:
7672 return native_interpret_int (type, ptr, len);
7673
7674 case REAL_TYPE:
7675 return native_interpret_real (type, ptr, len);
7676
7677 case FIXED_POINT_TYPE:
7678 return native_interpret_fixed (type, ptr, len);
7679
7680 case COMPLEX_TYPE:
7681 return native_interpret_complex (type, ptr, len);
7682
7683 case VECTOR_TYPE:
7684 return native_interpret_vector (type, ptr, len);
7685
7686 default:
7687 return NULL_TREE;
7688 }
7689 }
7690
7691 /* Returns true if we can interpret the contents of a native encoding
7692 as TYPE. */
7693
7694 static bool
7695 can_native_interpret_type_p (tree type)
7696 {
7697 switch (TREE_CODE (type))
7698 {
7699 case INTEGER_TYPE:
7700 case ENUMERAL_TYPE:
7701 case BOOLEAN_TYPE:
7702 case POINTER_TYPE:
7703 case REFERENCE_TYPE:
7704 case FIXED_POINT_TYPE:
7705 case REAL_TYPE:
7706 case COMPLEX_TYPE:
7707 case VECTOR_TYPE:
7708 return true;
7709 default:
7710 return false;
7711 }
7712 }
7713
7714 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7715 TYPE at compile-time. If we're unable to perform the conversion
7716 return NULL_TREE. */
7717
7718 static tree
7719 fold_view_convert_expr (tree type, tree expr)
7720 {
7721 /* We support up to 512-bit values (for V8DFmode). */
7722 unsigned char buffer[64];
7723 int len;
7724
7725 /* Check that the host and target are sane. */
7726 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7727 return NULL_TREE;
7728
7729 len = native_encode_expr (expr, buffer, sizeof (buffer));
7730 if (len == 0)
7731 return NULL_TREE;
7732
7733 return native_interpret_expr (type, buffer, len);
7734 }
7735
7736 /* Build an expression for the address of T. Folds away INDIRECT_REF
7737 to avoid confusing the gimplify process. */
7738
7739 tree
7740 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7741 {
7742 /* The size of the object is not relevant when talking about its address. */
7743 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7744 t = TREE_OPERAND (t, 0);
7745
7746 if (TREE_CODE (t) == INDIRECT_REF)
7747 {
7748 t = TREE_OPERAND (t, 0);
7749
7750 if (TREE_TYPE (t) != ptrtype)
7751 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7752 }
7753 else if (TREE_CODE (t) == MEM_REF
7754 && integer_zerop (TREE_OPERAND (t, 1)))
7755 return TREE_OPERAND (t, 0);
7756 else if (TREE_CODE (t) == MEM_REF
7757 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7758 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7759 TREE_OPERAND (t, 0),
7760 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7761 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7762 {
7763 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7764
7765 if (TREE_TYPE (t) != ptrtype)
7766 t = fold_convert_loc (loc, ptrtype, t);
7767 }
7768 else
7769 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7770
7771 return t;
7772 }
7773
7774 /* Build an expression for the address of T. */
7775
7776 tree
7777 build_fold_addr_expr_loc (location_t loc, tree t)
7778 {
7779 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7780
7781 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7782 }
7783
7784 /* Fold a unary expression of code CODE and type TYPE with operand
7785 OP0. Return the folded expression if folding is successful.
7786 Otherwise, return NULL_TREE. */
7787
7788 tree
7789 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7790 {
7791 tree tem;
7792 tree arg0;
7793 enum tree_code_class kind = TREE_CODE_CLASS (code);
7794
7795 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7796 && TREE_CODE_LENGTH (code) == 1);
7797
7798 arg0 = op0;
7799 if (arg0)
7800 {
7801 if (CONVERT_EXPR_CODE_P (code)
7802 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7803 {
7804 /* Don't use STRIP_NOPS, because signedness of argument type
7805 matters. */
7806 STRIP_SIGN_NOPS (arg0);
7807 }
7808 else
7809 {
7810 /* Strip any conversions that don't change the mode. This
7811 is safe for every expression, except for a comparison
7812 expression because its signedness is derived from its
7813 operands.
7814
7815 Note that this is done as an internal manipulation within
7816 the constant folder, in order to find the simplest
7817 representation of the arguments so that their form can be
7818 studied. In any cases, the appropriate type conversions
7819 should be put back in the tree that will get out of the
7820 constant folder. */
7821 STRIP_NOPS (arg0);
7822 }
7823
7824 if (CONSTANT_CLASS_P (arg0))
7825 {
7826 tree tem = const_unop (code, type, arg0);
7827 if (tem)
7828 {
7829 if (TREE_TYPE (tem) != type)
7830 tem = fold_convert_loc (loc, type, tem);
7831 return tem;
7832 }
7833 }
7834 }
7835
7836 tem = generic_simplify (loc, code, type, op0);
7837 if (tem)
7838 return tem;
7839
7840 if (TREE_CODE_CLASS (code) == tcc_unary)
7841 {
7842 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7843 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7844 fold_build1_loc (loc, code, type,
7845 fold_convert_loc (loc, TREE_TYPE (op0),
7846 TREE_OPERAND (arg0, 1))));
7847 else if (TREE_CODE (arg0) == COND_EXPR)
7848 {
7849 tree arg01 = TREE_OPERAND (arg0, 1);
7850 tree arg02 = TREE_OPERAND (arg0, 2);
7851 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7852 arg01 = fold_build1_loc (loc, code, type,
7853 fold_convert_loc (loc,
7854 TREE_TYPE (op0), arg01));
7855 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7856 arg02 = fold_build1_loc (loc, code, type,
7857 fold_convert_loc (loc,
7858 TREE_TYPE (op0), arg02));
7859 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7860 arg01, arg02);
7861
7862 /* If this was a conversion, and all we did was to move into
7863 inside the COND_EXPR, bring it back out. But leave it if
7864 it is a conversion from integer to integer and the
7865 result precision is no wider than a word since such a
7866 conversion is cheap and may be optimized away by combine,
7867 while it couldn't if it were outside the COND_EXPR. Then return
7868 so we don't get into an infinite recursion loop taking the
7869 conversion out and then back in. */
7870
7871 if ((CONVERT_EXPR_CODE_P (code)
7872 || code == NON_LVALUE_EXPR)
7873 && TREE_CODE (tem) == COND_EXPR
7874 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7875 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7876 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7877 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7878 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7879 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7880 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7881 && (INTEGRAL_TYPE_P
7882 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7883 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7884 || flag_syntax_only))
7885 tem = build1_loc (loc, code, type,
7886 build3 (COND_EXPR,
7887 TREE_TYPE (TREE_OPERAND
7888 (TREE_OPERAND (tem, 1), 0)),
7889 TREE_OPERAND (tem, 0),
7890 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7891 TREE_OPERAND (TREE_OPERAND (tem, 2),
7892 0)));
7893 return tem;
7894 }
7895 }
7896
7897 switch (code)
7898 {
7899 case NON_LVALUE_EXPR:
7900 if (!maybe_lvalue_p (op0))
7901 return fold_convert_loc (loc, type, op0);
7902 return NULL_TREE;
7903
7904 CASE_CONVERT:
7905 case FLOAT_EXPR:
7906 case FIX_TRUNC_EXPR:
7907 if (COMPARISON_CLASS_P (op0))
7908 {
7909 /* If we have (type) (a CMP b) and type is an integral type, return
7910 new expression involving the new type. Canonicalize
7911 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7912 non-integral type.
7913 Do not fold the result as that would not simplify further, also
7914 folding again results in recursions. */
7915 if (TREE_CODE (type) == BOOLEAN_TYPE)
7916 return build2_loc (loc, TREE_CODE (op0), type,
7917 TREE_OPERAND (op0, 0),
7918 TREE_OPERAND (op0, 1));
7919 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7920 && TREE_CODE (type) != VECTOR_TYPE)
7921 return build3_loc (loc, COND_EXPR, type, op0,
7922 constant_boolean_node (true, type),
7923 constant_boolean_node (false, type));
7924 }
7925
7926 /* Handle (T *)&A.B.C for A being of type T and B and C
7927 living at offset zero. This occurs frequently in
7928 C++ upcasting and then accessing the base. */
7929 if (TREE_CODE (op0) == ADDR_EXPR
7930 && POINTER_TYPE_P (type)
7931 && handled_component_p (TREE_OPERAND (op0, 0)))
7932 {
7933 HOST_WIDE_INT bitsize, bitpos;
7934 tree offset;
7935 machine_mode mode;
7936 int unsignedp, volatilep;
7937 tree base = TREE_OPERAND (op0, 0);
7938 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7939 &mode, &unsignedp, &volatilep, false);
7940 /* If the reference was to a (constant) zero offset, we can use
7941 the address of the base if it has the same base type
7942 as the result type and the pointer type is unqualified. */
7943 if (! offset && bitpos == 0
7944 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7945 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7946 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7947 return fold_convert_loc (loc, type,
7948 build_fold_addr_expr_loc (loc, base));
7949 }
7950
7951 if (TREE_CODE (op0) == MODIFY_EXPR
7952 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7953 /* Detect assigning a bitfield. */
7954 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7955 && DECL_BIT_FIELD
7956 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7957 {
7958 /* Don't leave an assignment inside a conversion
7959 unless assigning a bitfield. */
7960 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7961 /* First do the assignment, then return converted constant. */
7962 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7963 TREE_NO_WARNING (tem) = 1;
7964 TREE_USED (tem) = 1;
7965 return tem;
7966 }
7967
7968 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7969 constants (if x has signed type, the sign bit cannot be set
7970 in c). This folds extension into the BIT_AND_EXPR.
7971 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7972 very likely don't have maximal range for their precision and this
7973 transformation effectively doesn't preserve non-maximal ranges. */
7974 if (TREE_CODE (type) == INTEGER_TYPE
7975 && TREE_CODE (op0) == BIT_AND_EXPR
7976 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7977 {
7978 tree and_expr = op0;
7979 tree and0 = TREE_OPERAND (and_expr, 0);
7980 tree and1 = TREE_OPERAND (and_expr, 1);
7981 int change = 0;
7982
7983 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7984 || (TYPE_PRECISION (type)
7985 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7986 change = 1;
7987 else if (TYPE_PRECISION (TREE_TYPE (and1))
7988 <= HOST_BITS_PER_WIDE_INT
7989 && tree_fits_uhwi_p (and1))
7990 {
7991 unsigned HOST_WIDE_INT cst;
7992
7993 cst = tree_to_uhwi (and1);
7994 cst &= HOST_WIDE_INT_M1U
7995 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7996 change = (cst == 0);
7997 #ifdef LOAD_EXTEND_OP
7998 if (change
7999 && !flag_syntax_only
8000 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8001 == ZERO_EXTEND))
8002 {
8003 tree uns = unsigned_type_for (TREE_TYPE (and0));
8004 and0 = fold_convert_loc (loc, uns, and0);
8005 and1 = fold_convert_loc (loc, uns, and1);
8006 }
8007 #endif
8008 }
8009 if (change)
8010 {
8011 tem = force_fit_type (type, wi::to_widest (and1), 0,
8012 TREE_OVERFLOW (and1));
8013 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8014 fold_convert_loc (loc, type, and0), tem);
8015 }
8016 }
8017
8018 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8019 when one of the new casts will fold away. Conservatively we assume
8020 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8021 if (POINTER_TYPE_P (type)
8022 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8023 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8024 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8025 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8026 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8027 {
8028 tree arg00 = TREE_OPERAND (arg0, 0);
8029 tree arg01 = TREE_OPERAND (arg0, 1);
8030
8031 return fold_build_pointer_plus_loc
8032 (loc, fold_convert_loc (loc, type, arg00), arg01);
8033 }
8034
8035 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8036 of the same precision, and X is an integer type not narrower than
8037 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8038 if (INTEGRAL_TYPE_P (type)
8039 && TREE_CODE (op0) == BIT_NOT_EXPR
8040 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8042 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8043 {
8044 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8045 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8046 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8047 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8048 fold_convert_loc (loc, type, tem));
8049 }
8050
8051 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8052 type of X and Y (integer types only). */
8053 if (INTEGRAL_TYPE_P (type)
8054 && TREE_CODE (op0) == MULT_EXPR
8055 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8056 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8057 {
8058 /* Be careful not to introduce new overflows. */
8059 tree mult_type;
8060 if (TYPE_OVERFLOW_WRAPS (type))
8061 mult_type = type;
8062 else
8063 mult_type = unsigned_type_for (type);
8064
8065 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8066 {
8067 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8068 fold_convert_loc (loc, mult_type,
8069 TREE_OPERAND (op0, 0)),
8070 fold_convert_loc (loc, mult_type,
8071 TREE_OPERAND (op0, 1)));
8072 return fold_convert_loc (loc, type, tem);
8073 }
8074 }
8075
8076 return NULL_TREE;
8077
8078 case VIEW_CONVERT_EXPR:
8079 if (TREE_CODE (op0) == MEM_REF)
8080 return fold_build2_loc (loc, MEM_REF, type,
8081 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8082
8083 return NULL_TREE;
8084
8085 case NEGATE_EXPR:
8086 tem = fold_negate_expr (loc, arg0);
8087 if (tem)
8088 return fold_convert_loc (loc, type, tem);
8089 return NULL_TREE;
8090
8091 case ABS_EXPR:
8092 /* Convert fabs((double)float) into (double)fabsf(float). */
8093 if (TREE_CODE (arg0) == NOP_EXPR
8094 && TREE_CODE (type) == REAL_TYPE)
8095 {
8096 tree targ0 = strip_float_extensions (arg0);
8097 if (targ0 != arg0)
8098 return fold_convert_loc (loc, type,
8099 fold_build1_loc (loc, ABS_EXPR,
8100 TREE_TYPE (targ0),
8101 targ0));
8102 }
8103 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8104 else if (TREE_CODE (arg0) == ABS_EXPR)
8105 return arg0;
8106
8107 /* Strip sign ops from argument. */
8108 if (TREE_CODE (type) == REAL_TYPE)
8109 {
8110 tem = fold_strip_sign_ops (arg0);
8111 if (tem)
8112 return fold_build1_loc (loc, ABS_EXPR, type,
8113 fold_convert_loc (loc, type, tem));
8114 }
8115 return NULL_TREE;
8116
8117 case CONJ_EXPR:
8118 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8119 return fold_convert_loc (loc, type, arg0);
8120 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8121 {
8122 tree itype = TREE_TYPE (type);
8123 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8124 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8125 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8126 negate_expr (ipart));
8127 }
8128 if (TREE_CODE (arg0) == CONJ_EXPR)
8129 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8130 return NULL_TREE;
8131
8132 case BIT_NOT_EXPR:
8133 /* Convert ~ (-A) to A - 1. */
8134 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8135 return fold_build2_loc (loc, MINUS_EXPR, type,
8136 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8137 build_int_cst (type, 1));
8138 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8139 else if (INTEGRAL_TYPE_P (type)
8140 && ((TREE_CODE (arg0) == MINUS_EXPR
8141 && integer_onep (TREE_OPERAND (arg0, 1)))
8142 || (TREE_CODE (arg0) == PLUS_EXPR
8143 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8144 return fold_build1_loc (loc, NEGATE_EXPR, type,
8145 fold_convert_loc (loc, type,
8146 TREE_OPERAND (arg0, 0)));
8147 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8148 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8149 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8150 fold_convert_loc (loc, type,
8151 TREE_OPERAND (arg0, 0)))))
8152 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8153 fold_convert_loc (loc, type,
8154 TREE_OPERAND (arg0, 1)));
8155 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8156 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8157 fold_convert_loc (loc, type,
8158 TREE_OPERAND (arg0, 1)))))
8159 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8160 fold_convert_loc (loc, type,
8161 TREE_OPERAND (arg0, 0)), tem);
8162
8163 return NULL_TREE;
8164
8165 case TRUTH_NOT_EXPR:
8166 /* Note that the operand of this must be an int
8167 and its values must be 0 or 1.
8168 ("true" is a fixed value perhaps depending on the language,
8169 but we don't handle values other than 1 correctly yet.) */
8170 tem = fold_truth_not_expr (loc, arg0);
8171 if (!tem)
8172 return NULL_TREE;
8173 return fold_convert_loc (loc, type, tem);
8174
8175 case REALPART_EXPR:
8176 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8177 return fold_convert_loc (loc, type, arg0);
8178 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8179 {
8180 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8181 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8182 fold_build1_loc (loc, REALPART_EXPR, itype,
8183 TREE_OPERAND (arg0, 0)),
8184 fold_build1_loc (loc, REALPART_EXPR, itype,
8185 TREE_OPERAND (arg0, 1)));
8186 return fold_convert_loc (loc, type, tem);
8187 }
8188 if (TREE_CODE (arg0) == CONJ_EXPR)
8189 {
8190 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8191 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8192 TREE_OPERAND (arg0, 0));
8193 return fold_convert_loc (loc, type, tem);
8194 }
8195 if (TREE_CODE (arg0) == CALL_EXPR)
8196 {
8197 tree fn = get_callee_fndecl (arg0);
8198 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8199 switch (DECL_FUNCTION_CODE (fn))
8200 {
8201 CASE_FLT_FN (BUILT_IN_CEXPI):
8202 fn = mathfn_built_in (type, BUILT_IN_COS);
8203 if (fn)
8204 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8205 break;
8206
8207 default:
8208 break;
8209 }
8210 }
8211 return NULL_TREE;
8212
8213 case IMAGPART_EXPR:
8214 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8215 return build_zero_cst (type);
8216 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8217 {
8218 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8219 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8220 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8221 TREE_OPERAND (arg0, 0)),
8222 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8223 TREE_OPERAND (arg0, 1)));
8224 return fold_convert_loc (loc, type, tem);
8225 }
8226 if (TREE_CODE (arg0) == CONJ_EXPR)
8227 {
8228 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8229 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8230 return fold_convert_loc (loc, type, negate_expr (tem));
8231 }
8232 if (TREE_CODE (arg0) == CALL_EXPR)
8233 {
8234 tree fn = get_callee_fndecl (arg0);
8235 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8236 switch (DECL_FUNCTION_CODE (fn))
8237 {
8238 CASE_FLT_FN (BUILT_IN_CEXPI):
8239 fn = mathfn_built_in (type, BUILT_IN_SIN);
8240 if (fn)
8241 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8242 break;
8243
8244 default:
8245 break;
8246 }
8247 }
8248 return NULL_TREE;
8249
8250 case INDIRECT_REF:
8251 /* Fold *&X to X if X is an lvalue. */
8252 if (TREE_CODE (op0) == ADDR_EXPR)
8253 {
8254 tree op00 = TREE_OPERAND (op0, 0);
8255 if ((TREE_CODE (op00) == VAR_DECL
8256 || TREE_CODE (op00) == PARM_DECL
8257 || TREE_CODE (op00) == RESULT_DECL)
8258 && !TREE_READONLY (op00))
8259 return op00;
8260 }
8261 return NULL_TREE;
8262
8263 default:
8264 return NULL_TREE;
8265 } /* switch (code) */
8266 }
8267
8268
8269 /* If the operation was a conversion do _not_ mark a resulting constant
8270 with TREE_OVERFLOW if the original constant was not. These conversions
8271 have implementation defined behavior and retaining the TREE_OVERFLOW
8272 flag here would confuse later passes such as VRP. */
8273 tree
8274 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8275 tree type, tree op0)
8276 {
8277 tree res = fold_unary_loc (loc, code, type, op0);
8278 if (res
8279 && TREE_CODE (res) == INTEGER_CST
8280 && TREE_CODE (op0) == INTEGER_CST
8281 && CONVERT_EXPR_CODE_P (code))
8282 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8283
8284 return res;
8285 }
8286
8287 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8288 operands OP0 and OP1. LOC is the location of the resulting expression.
8289 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8290 Return the folded expression if folding is successful. Otherwise,
8291 return NULL_TREE. */
8292 static tree
8293 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8294 tree arg0, tree arg1, tree op0, tree op1)
8295 {
8296 tree tem;
8297
8298 /* We only do these simplifications if we are optimizing. */
8299 if (!optimize)
8300 return NULL_TREE;
8301
8302 /* Check for things like (A || B) && (A || C). We can convert this
8303 to A || (B && C). Note that either operator can be any of the four
8304 truth and/or operations and the transformation will still be
8305 valid. Also note that we only care about order for the
8306 ANDIF and ORIF operators. If B contains side effects, this
8307 might change the truth-value of A. */
8308 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8309 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8310 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8311 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8312 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8313 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8314 {
8315 tree a00 = TREE_OPERAND (arg0, 0);
8316 tree a01 = TREE_OPERAND (arg0, 1);
8317 tree a10 = TREE_OPERAND (arg1, 0);
8318 tree a11 = TREE_OPERAND (arg1, 1);
8319 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8320 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8321 && (code == TRUTH_AND_EXPR
8322 || code == TRUTH_OR_EXPR));
8323
8324 if (operand_equal_p (a00, a10, 0))
8325 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8326 fold_build2_loc (loc, code, type, a01, a11));
8327 else if (commutative && operand_equal_p (a00, a11, 0))
8328 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8329 fold_build2_loc (loc, code, type, a01, a10));
8330 else if (commutative && operand_equal_p (a01, a10, 0))
8331 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8332 fold_build2_loc (loc, code, type, a00, a11));
8333
8334 /* This case if tricky because we must either have commutative
8335 operators or else A10 must not have side-effects. */
8336
8337 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8338 && operand_equal_p (a01, a11, 0))
8339 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8340 fold_build2_loc (loc, code, type, a00, a10),
8341 a01);
8342 }
8343
8344 /* See if we can build a range comparison. */
8345 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8346 return tem;
8347
8348 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8349 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8350 {
8351 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8352 if (tem)
8353 return fold_build2_loc (loc, code, type, tem, arg1);
8354 }
8355
8356 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8357 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8358 {
8359 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8360 if (tem)
8361 return fold_build2_loc (loc, code, type, arg0, tem);
8362 }
8363
8364 /* Check for the possibility of merging component references. If our
8365 lhs is another similar operation, try to merge its rhs with our
8366 rhs. Then try to merge our lhs and rhs. */
8367 if (TREE_CODE (arg0) == code
8368 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8369 TREE_OPERAND (arg0, 1), arg1)))
8370 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8371
8372 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8373 return tem;
8374
8375 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8376 && (code == TRUTH_AND_EXPR
8377 || code == TRUTH_ANDIF_EXPR
8378 || code == TRUTH_OR_EXPR
8379 || code == TRUTH_ORIF_EXPR))
8380 {
8381 enum tree_code ncode, icode;
8382
8383 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8384 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8385 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8386
8387 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8388 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8389 We don't want to pack more than two leafs to a non-IF AND/OR
8390 expression.
8391 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8392 equal to IF-CODE, then we don't want to add right-hand operand.
8393 If the inner right-hand side of left-hand operand has
8394 side-effects, or isn't simple, then we can't add to it,
8395 as otherwise we might destroy if-sequence. */
8396 if (TREE_CODE (arg0) == icode
8397 && simple_operand_p_2 (arg1)
8398 /* Needed for sequence points to handle trappings, and
8399 side-effects. */
8400 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8401 {
8402 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8403 arg1);
8404 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8405 tem);
8406 }
8407 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8408 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8409 else if (TREE_CODE (arg1) == icode
8410 && simple_operand_p_2 (arg0)
8411 /* Needed for sequence points to handle trappings, and
8412 side-effects. */
8413 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8414 {
8415 tem = fold_build2_loc (loc, ncode, type,
8416 arg0, TREE_OPERAND (arg1, 0));
8417 return fold_build2_loc (loc, icode, type, tem,
8418 TREE_OPERAND (arg1, 1));
8419 }
8420 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8421 into (A OR B).
8422 For sequence point consistancy, we need to check for trapping,
8423 and side-effects. */
8424 else if (code == icode && simple_operand_p_2 (arg0)
8425 && simple_operand_p_2 (arg1))
8426 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8427 }
8428
8429 return NULL_TREE;
8430 }
8431
8432 /* Fold a binary expression of code CODE and type TYPE with operands
8433 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8434 Return the folded expression if folding is successful. Otherwise,
8435 return NULL_TREE. */
8436
8437 static tree
8438 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8439 {
8440 enum tree_code compl_code;
8441
8442 if (code == MIN_EXPR)
8443 compl_code = MAX_EXPR;
8444 else if (code == MAX_EXPR)
8445 compl_code = MIN_EXPR;
8446 else
8447 gcc_unreachable ();
8448
8449 /* MIN (MAX (a, b), b) == b. */
8450 if (TREE_CODE (op0) == compl_code
8451 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8452 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8453
8454 /* MIN (MAX (b, a), b) == b. */
8455 if (TREE_CODE (op0) == compl_code
8456 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8457 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8458 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8459
8460 /* MIN (a, MAX (a, b)) == a. */
8461 if (TREE_CODE (op1) == compl_code
8462 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8463 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8464 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8465
8466 /* MIN (a, MAX (b, a)) == a. */
8467 if (TREE_CODE (op1) == compl_code
8468 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8469 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8470 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8471
8472 return NULL_TREE;
8473 }
8474
8475 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8476 by changing CODE to reduce the magnitude of constants involved in
8477 ARG0 of the comparison.
8478 Returns a canonicalized comparison tree if a simplification was
8479 possible, otherwise returns NULL_TREE.
8480 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8481 valid if signed overflow is undefined. */
8482
8483 static tree
8484 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8485 tree arg0, tree arg1,
8486 bool *strict_overflow_p)
8487 {
8488 enum tree_code code0 = TREE_CODE (arg0);
8489 tree t, cst0 = NULL_TREE;
8490 int sgn0;
8491 bool swap = false;
8492
8493 /* Match A +- CST code arg1 and CST code arg1. We can change the
8494 first form only if overflow is undefined. */
8495 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8496 /* In principle pointers also have undefined overflow behavior,
8497 but that causes problems elsewhere. */
8498 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8499 && (code0 == MINUS_EXPR
8500 || code0 == PLUS_EXPR)
8501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8502 || code0 == INTEGER_CST))
8503 return NULL_TREE;
8504
8505 /* Identify the constant in arg0 and its sign. */
8506 if (code0 == INTEGER_CST)
8507 cst0 = arg0;
8508 else
8509 cst0 = TREE_OPERAND (arg0, 1);
8510 sgn0 = tree_int_cst_sgn (cst0);
8511
8512 /* Overflowed constants and zero will cause problems. */
8513 if (integer_zerop (cst0)
8514 || TREE_OVERFLOW (cst0))
8515 return NULL_TREE;
8516
8517 /* See if we can reduce the magnitude of the constant in
8518 arg0 by changing the comparison code. */
8519 if (code0 == INTEGER_CST)
8520 {
8521 /* CST <= arg1 -> CST-1 < arg1. */
8522 if (code == LE_EXPR && sgn0 == 1)
8523 code = LT_EXPR;
8524 /* -CST < arg1 -> -CST-1 <= arg1. */
8525 else if (code == LT_EXPR && sgn0 == -1)
8526 code = LE_EXPR;
8527 /* CST > arg1 -> CST-1 >= arg1. */
8528 else if (code == GT_EXPR && sgn0 == 1)
8529 code = GE_EXPR;
8530 /* -CST >= arg1 -> -CST-1 > arg1. */
8531 else if (code == GE_EXPR && sgn0 == -1)
8532 code = GT_EXPR;
8533 else
8534 return NULL_TREE;
8535 /* arg1 code' CST' might be more canonical. */
8536 swap = true;
8537 }
8538 else
8539 {
8540 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8541 if (code == LT_EXPR
8542 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8543 code = LE_EXPR;
8544 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8545 else if (code == GT_EXPR
8546 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8547 code = GE_EXPR;
8548 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8549 else if (code == LE_EXPR
8550 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8551 code = LT_EXPR;
8552 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8553 else if (code == GE_EXPR
8554 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8555 code = GT_EXPR;
8556 else
8557 return NULL_TREE;
8558 *strict_overflow_p = true;
8559 }
8560
8561 /* Now build the constant reduced in magnitude. But not if that
8562 would produce one outside of its types range. */
8563 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8564 && ((sgn0 == 1
8565 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8566 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8567 || (sgn0 == -1
8568 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8569 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8570 /* We cannot swap the comparison here as that would cause us to
8571 endlessly recurse. */
8572 return NULL_TREE;
8573
8574 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8575 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8576 if (code0 != INTEGER_CST)
8577 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8578 t = fold_convert (TREE_TYPE (arg1), t);
8579
8580 /* If swapping might yield to a more canonical form, do so. */
8581 if (swap)
8582 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8583 else
8584 return fold_build2_loc (loc, code, type, t, arg1);
8585 }
8586
8587 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8588 overflow further. Try to decrease the magnitude of constants involved
8589 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8590 and put sole constants at the second argument position.
8591 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8592
8593 static tree
8594 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8595 tree arg0, tree arg1)
8596 {
8597 tree t;
8598 bool strict_overflow_p;
8599 const char * const warnmsg = G_("assuming signed overflow does not occur "
8600 "when reducing constant in comparison");
8601
8602 /* Try canonicalization by simplifying arg0. */
8603 strict_overflow_p = false;
8604 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8605 &strict_overflow_p);
8606 if (t)
8607 {
8608 if (strict_overflow_p)
8609 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8610 return t;
8611 }
8612
8613 /* Try canonicalization by simplifying arg1 using the swapped
8614 comparison. */
8615 code = swap_tree_comparison (code);
8616 strict_overflow_p = false;
8617 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8618 &strict_overflow_p);
8619 if (t && strict_overflow_p)
8620 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8621 return t;
8622 }
8623
8624 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8625 space. This is used to avoid issuing overflow warnings for
8626 expressions like &p->x which can not wrap. */
8627
8628 static bool
8629 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8630 {
8631 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8632 return true;
8633
8634 if (bitpos < 0)
8635 return true;
8636
8637 wide_int wi_offset;
8638 int precision = TYPE_PRECISION (TREE_TYPE (base));
8639 if (offset == NULL_TREE)
8640 wi_offset = wi::zero (precision);
8641 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8642 return true;
8643 else
8644 wi_offset = offset;
8645
8646 bool overflow;
8647 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8648 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8649 if (overflow)
8650 return true;
8651
8652 if (!wi::fits_uhwi_p (total))
8653 return true;
8654
8655 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8656 if (size <= 0)
8657 return true;
8658
8659 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8660 array. */
8661 if (TREE_CODE (base) == ADDR_EXPR)
8662 {
8663 HOST_WIDE_INT base_size;
8664
8665 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8666 if (base_size > 0 && size < base_size)
8667 size = base_size;
8668 }
8669
8670 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8671 }
8672
8673 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8674 kind INTEGER_CST. This makes sure to properly sign-extend the
8675 constant. */
8676
8677 static HOST_WIDE_INT
8678 size_low_cst (const_tree t)
8679 {
8680 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8681 int prec = TYPE_PRECISION (TREE_TYPE (t));
8682 if (prec < HOST_BITS_PER_WIDE_INT)
8683 return sext_hwi (w, prec);
8684 return w;
8685 }
8686
8687 /* Subroutine of fold_binary. This routine performs all of the
8688 transformations that are common to the equality/inequality
8689 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8690 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8691 fold_binary should call fold_binary. Fold a comparison with
8692 tree code CODE and type TYPE with operands OP0 and OP1. Return
8693 the folded comparison or NULL_TREE. */
8694
8695 static tree
8696 fold_comparison (location_t loc, enum tree_code code, tree type,
8697 tree op0, tree op1)
8698 {
8699 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8700 tree arg0, arg1, tem;
8701
8702 arg0 = op0;
8703 arg1 = op1;
8704
8705 STRIP_SIGN_NOPS (arg0);
8706 STRIP_SIGN_NOPS (arg1);
8707
8708 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8709 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8710 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8712 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8713 && TREE_CODE (arg1) == INTEGER_CST
8714 && !TREE_OVERFLOW (arg1))
8715 {
8716 const enum tree_code
8717 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8718 tree const1 = TREE_OPERAND (arg0, 1);
8719 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8720 tree variable = TREE_OPERAND (arg0, 0);
8721 tree new_const = int_const_binop (reverse_op, const2, const1);
8722
8723 /* If the constant operation overflowed this can be
8724 simplified as a comparison against INT_MAX/INT_MIN. */
8725 if (TREE_OVERFLOW (new_const)
8726 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8727 {
8728 int const1_sgn = tree_int_cst_sgn (const1);
8729 enum tree_code code2 = code;
8730
8731 /* Get the sign of the constant on the lhs if the
8732 operation were VARIABLE + CONST1. */
8733 if (TREE_CODE (arg0) == MINUS_EXPR)
8734 const1_sgn = -const1_sgn;
8735
8736 /* The sign of the constant determines if we overflowed
8737 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8738 Canonicalize to the INT_MIN overflow by swapping the comparison
8739 if necessary. */
8740 if (const1_sgn == -1)
8741 code2 = swap_tree_comparison (code);
8742
8743 /* We now can look at the canonicalized case
8744 VARIABLE + 1 CODE2 INT_MIN
8745 and decide on the result. */
8746 switch (code2)
8747 {
8748 case EQ_EXPR:
8749 case LT_EXPR:
8750 case LE_EXPR:
8751 return
8752 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8753
8754 case NE_EXPR:
8755 case GE_EXPR:
8756 case GT_EXPR:
8757 return
8758 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8759
8760 default:
8761 gcc_unreachable ();
8762 }
8763 }
8764 else
8765 {
8766 if (!equality_code)
8767 fold_overflow_warning ("assuming signed overflow does not occur "
8768 "when changing X +- C1 cmp C2 to "
8769 "X cmp C2 -+ C1",
8770 WARN_STRICT_OVERFLOW_COMPARISON);
8771 return fold_build2_loc (loc, code, type, variable, new_const);
8772 }
8773 }
8774
8775 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8776 if (TREE_CODE (arg0) == MINUS_EXPR
8777 && equality_code
8778 && integer_zerop (arg1))
8779 {
8780 /* ??? The transformation is valid for the other operators if overflow
8781 is undefined for the type, but performing it here badly interacts
8782 with the transformation in fold_cond_expr_with_comparison which
8783 attempts to synthetize ABS_EXPR. */
8784 if (!equality_code)
8785 fold_overflow_warning ("assuming signed overflow does not occur "
8786 "when changing X - Y cmp 0 to X cmp Y",
8787 WARN_STRICT_OVERFLOW_COMPARISON);
8788 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8789 TREE_OPERAND (arg0, 1));
8790 }
8791
8792 /* For comparisons of pointers we can decompose it to a compile time
8793 comparison of the base objects and the offsets into the object.
8794 This requires at least one operand being an ADDR_EXPR or a
8795 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8796 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8797 && (TREE_CODE (arg0) == ADDR_EXPR
8798 || TREE_CODE (arg1) == ADDR_EXPR
8799 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8800 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8801 {
8802 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8803 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8804 machine_mode mode;
8805 int volatilep, unsignedp;
8806 bool indirect_base0 = false, indirect_base1 = false;
8807
8808 /* Get base and offset for the access. Strip ADDR_EXPR for
8809 get_inner_reference, but put it back by stripping INDIRECT_REF
8810 off the base object if possible. indirect_baseN will be true
8811 if baseN is not an address but refers to the object itself. */
8812 base0 = arg0;
8813 if (TREE_CODE (arg0) == ADDR_EXPR)
8814 {
8815 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8816 &bitsize, &bitpos0, &offset0, &mode,
8817 &unsignedp, &volatilep, false);
8818 if (TREE_CODE (base0) == INDIRECT_REF)
8819 base0 = TREE_OPERAND (base0, 0);
8820 else
8821 indirect_base0 = true;
8822 }
8823 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8824 {
8825 base0 = TREE_OPERAND (arg0, 0);
8826 STRIP_SIGN_NOPS (base0);
8827 if (TREE_CODE (base0) == ADDR_EXPR)
8828 {
8829 base0 = TREE_OPERAND (base0, 0);
8830 indirect_base0 = true;
8831 }
8832 offset0 = TREE_OPERAND (arg0, 1);
8833 if (tree_fits_shwi_p (offset0))
8834 {
8835 HOST_WIDE_INT off = size_low_cst (offset0);
8836 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8837 * BITS_PER_UNIT)
8838 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8839 {
8840 bitpos0 = off * BITS_PER_UNIT;
8841 offset0 = NULL_TREE;
8842 }
8843 }
8844 }
8845
8846 base1 = arg1;
8847 if (TREE_CODE (arg1) == ADDR_EXPR)
8848 {
8849 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8850 &bitsize, &bitpos1, &offset1, &mode,
8851 &unsignedp, &volatilep, false);
8852 if (TREE_CODE (base1) == INDIRECT_REF)
8853 base1 = TREE_OPERAND (base1, 0);
8854 else
8855 indirect_base1 = true;
8856 }
8857 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8858 {
8859 base1 = TREE_OPERAND (arg1, 0);
8860 STRIP_SIGN_NOPS (base1);
8861 if (TREE_CODE (base1) == ADDR_EXPR)
8862 {
8863 base1 = TREE_OPERAND (base1, 0);
8864 indirect_base1 = true;
8865 }
8866 offset1 = TREE_OPERAND (arg1, 1);
8867 if (tree_fits_shwi_p (offset1))
8868 {
8869 HOST_WIDE_INT off = size_low_cst (offset1);
8870 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8871 * BITS_PER_UNIT)
8872 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8873 {
8874 bitpos1 = off * BITS_PER_UNIT;
8875 offset1 = NULL_TREE;
8876 }
8877 }
8878 }
8879
8880 /* A local variable can never be pointed to by
8881 the default SSA name of an incoming parameter. */
8882 if ((TREE_CODE (arg0) == ADDR_EXPR
8883 && indirect_base0
8884 && TREE_CODE (base0) == VAR_DECL
8885 && auto_var_in_fn_p (base0, current_function_decl)
8886 && !indirect_base1
8887 && TREE_CODE (base1) == SSA_NAME
8888 && SSA_NAME_IS_DEFAULT_DEF (base1)
8889 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8890 || (TREE_CODE (arg1) == ADDR_EXPR
8891 && indirect_base1
8892 && TREE_CODE (base1) == VAR_DECL
8893 && auto_var_in_fn_p (base1, current_function_decl)
8894 && !indirect_base0
8895 && TREE_CODE (base0) == SSA_NAME
8896 && SSA_NAME_IS_DEFAULT_DEF (base0)
8897 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8898 {
8899 if (code == NE_EXPR)
8900 return constant_boolean_node (1, type);
8901 else if (code == EQ_EXPR)
8902 return constant_boolean_node (0, type);
8903 }
8904 /* If we have equivalent bases we might be able to simplify. */
8905 else if (indirect_base0 == indirect_base1
8906 && operand_equal_p (base0, base1, 0))
8907 {
8908 /* We can fold this expression to a constant if the non-constant
8909 offset parts are equal. */
8910 if ((offset0 == offset1
8911 || (offset0 && offset1
8912 && operand_equal_p (offset0, offset1, 0)))
8913 && (code == EQ_EXPR
8914 || code == NE_EXPR
8915 || (indirect_base0 && DECL_P (base0))
8916 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8917
8918 {
8919 if (!equality_code
8920 && bitpos0 != bitpos1
8921 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8922 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8923 fold_overflow_warning (("assuming pointer wraparound does not "
8924 "occur when comparing P +- C1 with "
8925 "P +- C2"),
8926 WARN_STRICT_OVERFLOW_CONDITIONAL);
8927
8928 switch (code)
8929 {
8930 case EQ_EXPR:
8931 return constant_boolean_node (bitpos0 == bitpos1, type);
8932 case NE_EXPR:
8933 return constant_boolean_node (bitpos0 != bitpos1, type);
8934 case LT_EXPR:
8935 return constant_boolean_node (bitpos0 < bitpos1, type);
8936 case LE_EXPR:
8937 return constant_boolean_node (bitpos0 <= bitpos1, type);
8938 case GE_EXPR:
8939 return constant_boolean_node (bitpos0 >= bitpos1, type);
8940 case GT_EXPR:
8941 return constant_boolean_node (bitpos0 > bitpos1, type);
8942 default:;
8943 }
8944 }
8945 /* We can simplify the comparison to a comparison of the variable
8946 offset parts if the constant offset parts are equal.
8947 Be careful to use signed sizetype here because otherwise we
8948 mess with array offsets in the wrong way. This is possible
8949 because pointer arithmetic is restricted to retain within an
8950 object and overflow on pointer differences is undefined as of
8951 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8952 else if (bitpos0 == bitpos1
8953 && (equality_code
8954 || (indirect_base0 && DECL_P (base0))
8955 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8956 {
8957 /* By converting to signed sizetype we cover middle-end pointer
8958 arithmetic which operates on unsigned pointer types of size
8959 type size and ARRAY_REF offsets which are properly sign or
8960 zero extended from their type in case it is narrower than
8961 sizetype. */
8962 if (offset0 == NULL_TREE)
8963 offset0 = build_int_cst (ssizetype, 0);
8964 else
8965 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8966 if (offset1 == NULL_TREE)
8967 offset1 = build_int_cst (ssizetype, 0);
8968 else
8969 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8970
8971 if (!equality_code
8972 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8973 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8974 fold_overflow_warning (("assuming pointer wraparound does not "
8975 "occur when comparing P +- C1 with "
8976 "P +- C2"),
8977 WARN_STRICT_OVERFLOW_COMPARISON);
8978
8979 return fold_build2_loc (loc, code, type, offset0, offset1);
8980 }
8981 }
8982 /* For non-equal bases we can simplify if they are addresses
8983 of local binding decls or constants. */
8984 else if (indirect_base0 && indirect_base1
8985 /* We know that !operand_equal_p (base0, base1, 0)
8986 because the if condition was false. But make
8987 sure two decls are not the same. */
8988 && base0 != base1
8989 && TREE_CODE (arg0) == ADDR_EXPR
8990 && TREE_CODE (arg1) == ADDR_EXPR
8991 && (((TREE_CODE (base0) == VAR_DECL
8992 || TREE_CODE (base0) == PARM_DECL)
8993 && (targetm.binds_local_p (base0)
8994 || CONSTANT_CLASS_P (base1)))
8995 || CONSTANT_CLASS_P (base0))
8996 && (((TREE_CODE (base1) == VAR_DECL
8997 || TREE_CODE (base1) == PARM_DECL)
8998 && (targetm.binds_local_p (base1)
8999 || CONSTANT_CLASS_P (base0)))
9000 || CONSTANT_CLASS_P (base1)))
9001 {
9002 if (code == EQ_EXPR)
9003 return omit_two_operands_loc (loc, type, boolean_false_node,
9004 arg0, arg1);
9005 else if (code == NE_EXPR)
9006 return omit_two_operands_loc (loc, type, boolean_true_node,
9007 arg0, arg1);
9008 }
9009 /* For equal offsets we can simplify to a comparison of the
9010 base addresses. */
9011 else if (bitpos0 == bitpos1
9012 && (indirect_base0
9013 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9014 && (indirect_base1
9015 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9016 && ((offset0 == offset1)
9017 || (offset0 && offset1
9018 && operand_equal_p (offset0, offset1, 0))))
9019 {
9020 if (indirect_base0)
9021 base0 = build_fold_addr_expr_loc (loc, base0);
9022 if (indirect_base1)
9023 base1 = build_fold_addr_expr_loc (loc, base1);
9024 return fold_build2_loc (loc, code, type, base0, base1);
9025 }
9026 }
9027
9028 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9029 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9030 the resulting offset is smaller in absolute value than the
9031 original one and has the same sign. */
9032 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9033 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9034 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9035 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9036 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9037 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9038 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9039 {
9040 tree const1 = TREE_OPERAND (arg0, 1);
9041 tree const2 = TREE_OPERAND (arg1, 1);
9042 tree variable1 = TREE_OPERAND (arg0, 0);
9043 tree variable2 = TREE_OPERAND (arg1, 0);
9044 tree cst;
9045 const char * const warnmsg = G_("assuming signed overflow does not "
9046 "occur when combining constants around "
9047 "a comparison");
9048
9049 /* Put the constant on the side where it doesn't overflow and is
9050 of lower absolute value and of same sign than before. */
9051 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9052 ? MINUS_EXPR : PLUS_EXPR,
9053 const2, const1);
9054 if (!TREE_OVERFLOW (cst)
9055 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9056 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9057 {
9058 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9059 return fold_build2_loc (loc, code, type,
9060 variable1,
9061 fold_build2_loc (loc, TREE_CODE (arg1),
9062 TREE_TYPE (arg1),
9063 variable2, cst));
9064 }
9065
9066 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9067 ? MINUS_EXPR : PLUS_EXPR,
9068 const1, const2);
9069 if (!TREE_OVERFLOW (cst)
9070 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9071 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9072 {
9073 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9074 return fold_build2_loc (loc, code, type,
9075 fold_build2_loc (loc, TREE_CODE (arg0),
9076 TREE_TYPE (arg0),
9077 variable1, cst),
9078 variable2);
9079 }
9080 }
9081
9082 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9083 signed arithmetic case. That form is created by the compiler
9084 often enough for folding it to be of value. One example is in
9085 computing loop trip counts after Operator Strength Reduction. */
9086 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9087 && TREE_CODE (arg0) == MULT_EXPR
9088 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9089 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9090 && integer_zerop (arg1))
9091 {
9092 tree const1 = TREE_OPERAND (arg0, 1);
9093 tree const2 = arg1; /* zero */
9094 tree variable1 = TREE_OPERAND (arg0, 0);
9095 enum tree_code cmp_code = code;
9096
9097 /* Handle unfolded multiplication by zero. */
9098 if (integer_zerop (const1))
9099 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9100
9101 fold_overflow_warning (("assuming signed overflow does not occur when "
9102 "eliminating multiplication in comparison "
9103 "with zero"),
9104 WARN_STRICT_OVERFLOW_COMPARISON);
9105
9106 /* If const1 is negative we swap the sense of the comparison. */
9107 if (tree_int_cst_sgn (const1) < 0)
9108 cmp_code = swap_tree_comparison (cmp_code);
9109
9110 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9111 }
9112
9113 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9114 if (tem)
9115 return tem;
9116
9117 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9118 {
9119 tree targ0 = strip_float_extensions (arg0);
9120 tree targ1 = strip_float_extensions (arg1);
9121 tree newtype = TREE_TYPE (targ0);
9122
9123 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9124 newtype = TREE_TYPE (targ1);
9125
9126 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9127 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9128 return fold_build2_loc (loc, code, type,
9129 fold_convert_loc (loc, newtype, targ0),
9130 fold_convert_loc (loc, newtype, targ1));
9131
9132 /* (-a) CMP (-b) -> b CMP a */
9133 if (TREE_CODE (arg0) == NEGATE_EXPR
9134 && TREE_CODE (arg1) == NEGATE_EXPR)
9135 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9136 TREE_OPERAND (arg0, 0));
9137
9138 if (TREE_CODE (arg1) == REAL_CST)
9139 {
9140 REAL_VALUE_TYPE cst;
9141 cst = TREE_REAL_CST (arg1);
9142
9143 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9144 if (TREE_CODE (arg0) == NEGATE_EXPR)
9145 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9146 TREE_OPERAND (arg0, 0),
9147 build_real (TREE_TYPE (arg1),
9148 real_value_negate (&cst)));
9149
9150 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9151 /* a CMP (-0) -> a CMP 0 */
9152 if (REAL_VALUE_MINUS_ZERO (cst))
9153 return fold_build2_loc (loc, code, type, arg0,
9154 build_real (TREE_TYPE (arg1), dconst0));
9155
9156 /* x != NaN is always true, other ops are always false. */
9157 if (REAL_VALUE_ISNAN (cst)
9158 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9159 {
9160 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9161 return omit_one_operand_loc (loc, type, tem, arg0);
9162 }
9163
9164 /* Fold comparisons against infinity. */
9165 if (REAL_VALUE_ISINF (cst)
9166 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9167 {
9168 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9169 if (tem != NULL_TREE)
9170 return tem;
9171 }
9172 }
9173
9174 /* If this is a comparison of a real constant with a PLUS_EXPR
9175 or a MINUS_EXPR of a real constant, we can convert it into a
9176 comparison with a revised real constant as long as no overflow
9177 occurs when unsafe_math_optimizations are enabled. */
9178 if (flag_unsafe_math_optimizations
9179 && TREE_CODE (arg1) == REAL_CST
9180 && (TREE_CODE (arg0) == PLUS_EXPR
9181 || TREE_CODE (arg0) == MINUS_EXPR)
9182 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9183 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9184 ? MINUS_EXPR : PLUS_EXPR,
9185 arg1, TREE_OPERAND (arg0, 1)))
9186 && !TREE_OVERFLOW (tem))
9187 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9188
9189 /* Likewise, we can simplify a comparison of a real constant with
9190 a MINUS_EXPR whose first operand is also a real constant, i.e.
9191 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9192 floating-point types only if -fassociative-math is set. */
9193 if (flag_associative_math
9194 && TREE_CODE (arg1) == REAL_CST
9195 && TREE_CODE (arg0) == MINUS_EXPR
9196 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9197 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9198 arg1))
9199 && !TREE_OVERFLOW (tem))
9200 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9201 TREE_OPERAND (arg0, 1), tem);
9202
9203 /* Fold comparisons against built-in math functions. */
9204 if (TREE_CODE (arg1) == REAL_CST
9205 && flag_unsafe_math_optimizations
9206 && ! flag_errno_math)
9207 {
9208 enum built_in_function fcode = builtin_mathfn_code (arg0);
9209
9210 if (fcode != END_BUILTINS)
9211 {
9212 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9213 if (tem != NULL_TREE)
9214 return tem;
9215 }
9216 }
9217 }
9218
9219 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9220 && CONVERT_EXPR_P (arg0))
9221 {
9222 /* If we are widening one operand of an integer comparison,
9223 see if the other operand is similarly being widened. Perhaps we
9224 can do the comparison in the narrower type. */
9225 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9226 if (tem)
9227 return tem;
9228
9229 /* Or if we are changing signedness. */
9230 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9231 if (tem)
9232 return tem;
9233 }
9234
9235 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9236 constant, we can simplify it. */
9237 if (TREE_CODE (arg1) == INTEGER_CST
9238 && (TREE_CODE (arg0) == MIN_EXPR
9239 || TREE_CODE (arg0) == MAX_EXPR)
9240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9241 {
9242 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9243 if (tem)
9244 return tem;
9245 }
9246
9247 /* Simplify comparison of something with itself. (For IEEE
9248 floating-point, we can only do some of these simplifications.) */
9249 if (operand_equal_p (arg0, arg1, 0))
9250 {
9251 switch (code)
9252 {
9253 case EQ_EXPR:
9254 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9255 || ! HONOR_NANS (element_mode (arg0)))
9256 return constant_boolean_node (1, type);
9257 break;
9258
9259 case GE_EXPR:
9260 case LE_EXPR:
9261 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9262 || ! HONOR_NANS (element_mode (arg0)))
9263 return constant_boolean_node (1, type);
9264 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9265
9266 case NE_EXPR:
9267 /* For NE, we can only do this simplification if integer
9268 or we don't honor IEEE floating point NaNs. */
9269 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9270 && HONOR_NANS (element_mode (arg0)))
9271 break;
9272 /* ... fall through ... */
9273 case GT_EXPR:
9274 case LT_EXPR:
9275 return constant_boolean_node (0, type);
9276 default:
9277 gcc_unreachable ();
9278 }
9279 }
9280
9281 /* If we are comparing an expression that just has comparisons
9282 of two integer values, arithmetic expressions of those comparisons,
9283 and constants, we can simplify it. There are only three cases
9284 to check: the two values can either be equal, the first can be
9285 greater, or the second can be greater. Fold the expression for
9286 those three values. Since each value must be 0 or 1, we have
9287 eight possibilities, each of which corresponds to the constant 0
9288 or 1 or one of the six possible comparisons.
9289
9290 This handles common cases like (a > b) == 0 but also handles
9291 expressions like ((x > y) - (y > x)) > 0, which supposedly
9292 occur in macroized code. */
9293
9294 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9295 {
9296 tree cval1 = 0, cval2 = 0;
9297 int save_p = 0;
9298
9299 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9300 /* Don't handle degenerate cases here; they should already
9301 have been handled anyway. */
9302 && cval1 != 0 && cval2 != 0
9303 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9304 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9305 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9306 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9307 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9308 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9309 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9310 {
9311 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9312 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9313
9314 /* We can't just pass T to eval_subst in case cval1 or cval2
9315 was the same as ARG1. */
9316
9317 tree high_result
9318 = fold_build2_loc (loc, code, type,
9319 eval_subst (loc, arg0, cval1, maxval,
9320 cval2, minval),
9321 arg1);
9322 tree equal_result
9323 = fold_build2_loc (loc, code, type,
9324 eval_subst (loc, arg0, cval1, maxval,
9325 cval2, maxval),
9326 arg1);
9327 tree low_result
9328 = fold_build2_loc (loc, code, type,
9329 eval_subst (loc, arg0, cval1, minval,
9330 cval2, maxval),
9331 arg1);
9332
9333 /* All three of these results should be 0 or 1. Confirm they are.
9334 Then use those values to select the proper code to use. */
9335
9336 if (TREE_CODE (high_result) == INTEGER_CST
9337 && TREE_CODE (equal_result) == INTEGER_CST
9338 && TREE_CODE (low_result) == INTEGER_CST)
9339 {
9340 /* Make a 3-bit mask with the high-order bit being the
9341 value for `>', the next for '=', and the low for '<'. */
9342 switch ((integer_onep (high_result) * 4)
9343 + (integer_onep (equal_result) * 2)
9344 + integer_onep (low_result))
9345 {
9346 case 0:
9347 /* Always false. */
9348 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9349 case 1:
9350 code = LT_EXPR;
9351 break;
9352 case 2:
9353 code = EQ_EXPR;
9354 break;
9355 case 3:
9356 code = LE_EXPR;
9357 break;
9358 case 4:
9359 code = GT_EXPR;
9360 break;
9361 case 5:
9362 code = NE_EXPR;
9363 break;
9364 case 6:
9365 code = GE_EXPR;
9366 break;
9367 case 7:
9368 /* Always true. */
9369 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9370 }
9371
9372 if (save_p)
9373 {
9374 tem = save_expr (build2 (code, type, cval1, cval2));
9375 SET_EXPR_LOCATION (tem, loc);
9376 return tem;
9377 }
9378 return fold_build2_loc (loc, code, type, cval1, cval2);
9379 }
9380 }
9381 }
9382
9383 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9384 into a single range test. */
9385 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9386 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9387 && TREE_CODE (arg1) == INTEGER_CST
9388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9389 && !integer_zerop (TREE_OPERAND (arg0, 1))
9390 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9391 && !TREE_OVERFLOW (arg1))
9392 {
9393 tem = fold_div_compare (loc, code, type, arg0, arg1);
9394 if (tem != NULL_TREE)
9395 return tem;
9396 }
9397
9398 /* Fold ~X op ~Y as Y op X. */
9399 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9400 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9401 {
9402 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9403 return fold_build2_loc (loc, code, type,
9404 fold_convert_loc (loc, cmp_type,
9405 TREE_OPERAND (arg1, 0)),
9406 TREE_OPERAND (arg0, 0));
9407 }
9408
9409 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9410 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9411 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9412 {
9413 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9414 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9415 TREE_OPERAND (arg0, 0),
9416 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9417 fold_convert_loc (loc, cmp_type, arg1)));
9418 }
9419
9420 return NULL_TREE;
9421 }
9422
9423
9424 /* Subroutine of fold_binary. Optimize complex multiplications of the
9425 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9426 argument EXPR represents the expression "z" of type TYPE. */
9427
9428 static tree
9429 fold_mult_zconjz (location_t loc, tree type, tree expr)
9430 {
9431 tree itype = TREE_TYPE (type);
9432 tree rpart, ipart, tem;
9433
9434 if (TREE_CODE (expr) == COMPLEX_EXPR)
9435 {
9436 rpart = TREE_OPERAND (expr, 0);
9437 ipart = TREE_OPERAND (expr, 1);
9438 }
9439 else if (TREE_CODE (expr) == COMPLEX_CST)
9440 {
9441 rpart = TREE_REALPART (expr);
9442 ipart = TREE_IMAGPART (expr);
9443 }
9444 else
9445 {
9446 expr = save_expr (expr);
9447 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9448 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9449 }
9450
9451 rpart = save_expr (rpart);
9452 ipart = save_expr (ipart);
9453 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9454 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9455 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9456 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9457 build_zero_cst (itype));
9458 }
9459
9460
9461 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9462 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9463 guarantees that P and N have the same least significant log2(M) bits.
9464 N is not otherwise constrained. In particular, N is not normalized to
9465 0 <= N < M as is common. In general, the precise value of P is unknown.
9466 M is chosen as large as possible such that constant N can be determined.
9467
9468 Returns M and sets *RESIDUE to N.
9469
9470 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9471 account. This is not always possible due to PR 35705.
9472 */
9473
9474 static unsigned HOST_WIDE_INT
9475 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9476 bool allow_func_align)
9477 {
9478 enum tree_code code;
9479
9480 *residue = 0;
9481
9482 code = TREE_CODE (expr);
9483 if (code == ADDR_EXPR)
9484 {
9485 unsigned int bitalign;
9486 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9487 *residue /= BITS_PER_UNIT;
9488 return bitalign / BITS_PER_UNIT;
9489 }
9490 else if (code == POINTER_PLUS_EXPR)
9491 {
9492 tree op0, op1;
9493 unsigned HOST_WIDE_INT modulus;
9494 enum tree_code inner_code;
9495
9496 op0 = TREE_OPERAND (expr, 0);
9497 STRIP_NOPS (op0);
9498 modulus = get_pointer_modulus_and_residue (op0, residue,
9499 allow_func_align);
9500
9501 op1 = TREE_OPERAND (expr, 1);
9502 STRIP_NOPS (op1);
9503 inner_code = TREE_CODE (op1);
9504 if (inner_code == INTEGER_CST)
9505 {
9506 *residue += TREE_INT_CST_LOW (op1);
9507 return modulus;
9508 }
9509 else if (inner_code == MULT_EXPR)
9510 {
9511 op1 = TREE_OPERAND (op1, 1);
9512 if (TREE_CODE (op1) == INTEGER_CST)
9513 {
9514 unsigned HOST_WIDE_INT align;
9515
9516 /* Compute the greatest power-of-2 divisor of op1. */
9517 align = TREE_INT_CST_LOW (op1);
9518 align &= -align;
9519
9520 /* If align is non-zero and less than *modulus, replace
9521 *modulus with align., If align is 0, then either op1 is 0
9522 or the greatest power-of-2 divisor of op1 doesn't fit in an
9523 unsigned HOST_WIDE_INT. In either case, no additional
9524 constraint is imposed. */
9525 if (align)
9526 modulus = MIN (modulus, align);
9527
9528 return modulus;
9529 }
9530 }
9531 }
9532
9533 /* If we get here, we were unable to determine anything useful about the
9534 expression. */
9535 return 1;
9536 }
9537
9538 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9539 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9540
9541 static bool
9542 vec_cst_ctor_to_array (tree arg, tree *elts)
9543 {
9544 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9545
9546 if (TREE_CODE (arg) == VECTOR_CST)
9547 {
9548 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9549 elts[i] = VECTOR_CST_ELT (arg, i);
9550 }
9551 else if (TREE_CODE (arg) == CONSTRUCTOR)
9552 {
9553 constructor_elt *elt;
9554
9555 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9556 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9557 return false;
9558 else
9559 elts[i] = elt->value;
9560 }
9561 else
9562 return false;
9563 for (; i < nelts; i++)
9564 elts[i]
9565 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9566 return true;
9567 }
9568
9569 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9570 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9571 NULL_TREE otherwise. */
9572
9573 static tree
9574 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9575 {
9576 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9577 tree *elts;
9578 bool need_ctor = false;
9579
9580 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9581 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9582 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9583 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9584 return NULL_TREE;
9585
9586 elts = XALLOCAVEC (tree, nelts * 3);
9587 if (!vec_cst_ctor_to_array (arg0, elts)
9588 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9589 return NULL_TREE;
9590
9591 for (i = 0; i < nelts; i++)
9592 {
9593 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9594 need_ctor = true;
9595 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9596 }
9597
9598 if (need_ctor)
9599 {
9600 vec<constructor_elt, va_gc> *v;
9601 vec_alloc (v, nelts);
9602 for (i = 0; i < nelts; i++)
9603 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9604 return build_constructor (type, v);
9605 }
9606 else
9607 return build_vector (type, &elts[2 * nelts]);
9608 }
9609
9610 /* Try to fold a pointer difference of type TYPE two address expressions of
9611 array references AREF0 and AREF1 using location LOC. Return a
9612 simplified expression for the difference or NULL_TREE. */
9613
9614 static tree
9615 fold_addr_of_array_ref_difference (location_t loc, tree type,
9616 tree aref0, tree aref1)
9617 {
9618 tree base0 = TREE_OPERAND (aref0, 0);
9619 tree base1 = TREE_OPERAND (aref1, 0);
9620 tree base_offset = build_int_cst (type, 0);
9621
9622 /* If the bases are array references as well, recurse. If the bases
9623 are pointer indirections compute the difference of the pointers.
9624 If the bases are equal, we are set. */
9625 if ((TREE_CODE (base0) == ARRAY_REF
9626 && TREE_CODE (base1) == ARRAY_REF
9627 && (base_offset
9628 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9629 || (INDIRECT_REF_P (base0)
9630 && INDIRECT_REF_P (base1)
9631 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9632 TREE_OPERAND (base0, 0),
9633 TREE_OPERAND (base1, 0))))
9634 || operand_equal_p (base0, base1, 0))
9635 {
9636 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9637 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9638 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9639 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9640 return fold_build2_loc (loc, PLUS_EXPR, type,
9641 base_offset,
9642 fold_build2_loc (loc, MULT_EXPR, type,
9643 diff, esz));
9644 }
9645 return NULL_TREE;
9646 }
9647
9648 /* If the real or vector real constant CST of type TYPE has an exact
9649 inverse, return it, else return NULL. */
9650
9651 tree
9652 exact_inverse (tree type, tree cst)
9653 {
9654 REAL_VALUE_TYPE r;
9655 tree unit_type, *elts;
9656 machine_mode mode;
9657 unsigned vec_nelts, i;
9658
9659 switch (TREE_CODE (cst))
9660 {
9661 case REAL_CST:
9662 r = TREE_REAL_CST (cst);
9663
9664 if (exact_real_inverse (TYPE_MODE (type), &r))
9665 return build_real (type, r);
9666
9667 return NULL_TREE;
9668
9669 case VECTOR_CST:
9670 vec_nelts = VECTOR_CST_NELTS (cst);
9671 elts = XALLOCAVEC (tree, vec_nelts);
9672 unit_type = TREE_TYPE (type);
9673 mode = TYPE_MODE (unit_type);
9674
9675 for (i = 0; i < vec_nelts; i++)
9676 {
9677 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9678 if (!exact_real_inverse (mode, &r))
9679 return NULL_TREE;
9680 elts[i] = build_real (unit_type, r);
9681 }
9682
9683 return build_vector (type, elts);
9684
9685 default:
9686 return NULL_TREE;
9687 }
9688 }
9689
9690 /* Mask out the tz least significant bits of X of type TYPE where
9691 tz is the number of trailing zeroes in Y. */
9692 static wide_int
9693 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9694 {
9695 int tz = wi::ctz (y);
9696 if (tz > 0)
9697 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9698 return x;
9699 }
9700
9701 /* Return true when T is an address and is known to be nonzero.
9702 For floating point we further ensure that T is not denormal.
9703 Similar logic is present in nonzero_address in rtlanal.h.
9704
9705 If the return value is based on the assumption that signed overflow
9706 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9707 change *STRICT_OVERFLOW_P. */
9708
9709 static bool
9710 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9711 {
9712 tree type = TREE_TYPE (t);
9713 enum tree_code code;
9714
9715 /* Doing something useful for floating point would need more work. */
9716 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9717 return false;
9718
9719 code = TREE_CODE (t);
9720 switch (TREE_CODE_CLASS (code))
9721 {
9722 case tcc_unary:
9723 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9724 strict_overflow_p);
9725 case tcc_binary:
9726 case tcc_comparison:
9727 return tree_binary_nonzero_warnv_p (code, type,
9728 TREE_OPERAND (t, 0),
9729 TREE_OPERAND (t, 1),
9730 strict_overflow_p);
9731 case tcc_constant:
9732 case tcc_declaration:
9733 case tcc_reference:
9734 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9735
9736 default:
9737 break;
9738 }
9739
9740 switch (code)
9741 {
9742 case TRUTH_NOT_EXPR:
9743 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9744 strict_overflow_p);
9745
9746 case TRUTH_AND_EXPR:
9747 case TRUTH_OR_EXPR:
9748 case TRUTH_XOR_EXPR:
9749 return tree_binary_nonzero_warnv_p (code, type,
9750 TREE_OPERAND (t, 0),
9751 TREE_OPERAND (t, 1),
9752 strict_overflow_p);
9753
9754 case COND_EXPR:
9755 case CONSTRUCTOR:
9756 case OBJ_TYPE_REF:
9757 case ASSERT_EXPR:
9758 case ADDR_EXPR:
9759 case WITH_SIZE_EXPR:
9760 case SSA_NAME:
9761 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9762
9763 case COMPOUND_EXPR:
9764 case MODIFY_EXPR:
9765 case BIND_EXPR:
9766 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9767 strict_overflow_p);
9768
9769 case SAVE_EXPR:
9770 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9771 strict_overflow_p);
9772
9773 case CALL_EXPR:
9774 {
9775 tree fndecl = get_callee_fndecl (t);
9776 if (!fndecl) return false;
9777 if (flag_delete_null_pointer_checks && !flag_check_new
9778 && DECL_IS_OPERATOR_NEW (fndecl)
9779 && !TREE_NOTHROW (fndecl))
9780 return true;
9781 if (flag_delete_null_pointer_checks
9782 && lookup_attribute ("returns_nonnull",
9783 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9784 return true;
9785 return alloca_call_p (t);
9786 }
9787
9788 default:
9789 break;
9790 }
9791 return false;
9792 }
9793
9794 /* Return true when T is an address and is known to be nonzero.
9795 Handle warnings about undefined signed overflow. */
9796
9797 static bool
9798 tree_expr_nonzero_p (tree t)
9799 {
9800 bool ret, strict_overflow_p;
9801
9802 strict_overflow_p = false;
9803 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9804 if (strict_overflow_p)
9805 fold_overflow_warning (("assuming signed overflow does not occur when "
9806 "determining that expression is always "
9807 "non-zero"),
9808 WARN_STRICT_OVERFLOW_MISC);
9809 return ret;
9810 }
9811
9812 /* Fold a binary expression of code CODE and type TYPE with operands
9813 OP0 and OP1. LOC is the location of the resulting expression.
9814 Return the folded expression if folding is successful. Otherwise,
9815 return NULL_TREE. */
9816
9817 tree
9818 fold_binary_loc (location_t loc,
9819 enum tree_code code, tree type, tree op0, tree op1)
9820 {
9821 enum tree_code_class kind = TREE_CODE_CLASS (code);
9822 tree arg0, arg1, tem;
9823 tree t1 = NULL_TREE;
9824 bool strict_overflow_p;
9825 unsigned int prec;
9826
9827 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9828 && TREE_CODE_LENGTH (code) == 2
9829 && op0 != NULL_TREE
9830 && op1 != NULL_TREE);
9831
9832 arg0 = op0;
9833 arg1 = op1;
9834
9835 /* Strip any conversions that don't change the mode. This is
9836 safe for every expression, except for a comparison expression
9837 because its signedness is derived from its operands. So, in
9838 the latter case, only strip conversions that don't change the
9839 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9840 preserved.
9841
9842 Note that this is done as an internal manipulation within the
9843 constant folder, in order to find the simplest representation
9844 of the arguments so that their form can be studied. In any
9845 cases, the appropriate type conversions should be put back in
9846 the tree that will get out of the constant folder. */
9847
9848 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9849 {
9850 STRIP_SIGN_NOPS (arg0);
9851 STRIP_SIGN_NOPS (arg1);
9852 }
9853 else
9854 {
9855 STRIP_NOPS (arg0);
9856 STRIP_NOPS (arg1);
9857 }
9858
9859 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9860 constant but we can't do arithmetic on them. */
9861 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9862 {
9863 tem = const_binop (code, type, arg0, arg1);
9864 if (tem != NULL_TREE)
9865 {
9866 if (TREE_TYPE (tem) != type)
9867 tem = fold_convert_loc (loc, type, tem);
9868 return tem;
9869 }
9870 }
9871
9872 /* If this is a commutative operation, and ARG0 is a constant, move it
9873 to ARG1 to reduce the number of tests below. */
9874 if (commutative_tree_code (code)
9875 && tree_swap_operands_p (arg0, arg1, true))
9876 return fold_build2_loc (loc, code, type, op1, op0);
9877
9878 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9879 to ARG1 to reduce the number of tests below. */
9880 if (kind == tcc_comparison
9881 && tree_swap_operands_p (arg0, arg1, true))
9882 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9883
9884 tem = generic_simplify (loc, code, type, op0, op1);
9885 if (tem)
9886 return tem;
9887
9888 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9889
9890 First check for cases where an arithmetic operation is applied to a
9891 compound, conditional, or comparison operation. Push the arithmetic
9892 operation inside the compound or conditional to see if any folding
9893 can then be done. Convert comparison to conditional for this purpose.
9894 The also optimizes non-constant cases that used to be done in
9895 expand_expr.
9896
9897 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9898 one of the operands is a comparison and the other is a comparison, a
9899 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9900 code below would make the expression more complex. Change it to a
9901 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9902 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9903
9904 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9905 || code == EQ_EXPR || code == NE_EXPR)
9906 && TREE_CODE (type) != VECTOR_TYPE
9907 && ((truth_value_p (TREE_CODE (arg0))
9908 && (truth_value_p (TREE_CODE (arg1))
9909 || (TREE_CODE (arg1) == BIT_AND_EXPR
9910 && integer_onep (TREE_OPERAND (arg1, 1)))))
9911 || (truth_value_p (TREE_CODE (arg1))
9912 && (truth_value_p (TREE_CODE (arg0))
9913 || (TREE_CODE (arg0) == BIT_AND_EXPR
9914 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9915 {
9916 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9917 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9918 : TRUTH_XOR_EXPR,
9919 boolean_type_node,
9920 fold_convert_loc (loc, boolean_type_node, arg0),
9921 fold_convert_loc (loc, boolean_type_node, arg1));
9922
9923 if (code == EQ_EXPR)
9924 tem = invert_truthvalue_loc (loc, tem);
9925
9926 return fold_convert_loc (loc, type, tem);
9927 }
9928
9929 if (TREE_CODE_CLASS (code) == tcc_binary
9930 || TREE_CODE_CLASS (code) == tcc_comparison)
9931 {
9932 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9933 {
9934 tem = fold_build2_loc (loc, code, type,
9935 fold_convert_loc (loc, TREE_TYPE (op0),
9936 TREE_OPERAND (arg0, 1)), op1);
9937 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9938 tem);
9939 }
9940 if (TREE_CODE (arg1) == COMPOUND_EXPR
9941 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9942 {
9943 tem = fold_build2_loc (loc, code, type, op0,
9944 fold_convert_loc (loc, TREE_TYPE (op1),
9945 TREE_OPERAND (arg1, 1)));
9946 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9947 tem);
9948 }
9949
9950 if (TREE_CODE (arg0) == COND_EXPR
9951 || TREE_CODE (arg0) == VEC_COND_EXPR
9952 || COMPARISON_CLASS_P (arg0))
9953 {
9954 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9955 arg0, arg1,
9956 /*cond_first_p=*/1);
9957 if (tem != NULL_TREE)
9958 return tem;
9959 }
9960
9961 if (TREE_CODE (arg1) == COND_EXPR
9962 || TREE_CODE (arg1) == VEC_COND_EXPR
9963 || COMPARISON_CLASS_P (arg1))
9964 {
9965 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9966 arg1, arg0,
9967 /*cond_first_p=*/0);
9968 if (tem != NULL_TREE)
9969 return tem;
9970 }
9971 }
9972
9973 switch (code)
9974 {
9975 case MEM_REF:
9976 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9977 if (TREE_CODE (arg0) == ADDR_EXPR
9978 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9979 {
9980 tree iref = TREE_OPERAND (arg0, 0);
9981 return fold_build2 (MEM_REF, type,
9982 TREE_OPERAND (iref, 0),
9983 int_const_binop (PLUS_EXPR, arg1,
9984 TREE_OPERAND (iref, 1)));
9985 }
9986
9987 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9988 if (TREE_CODE (arg0) == ADDR_EXPR
9989 && handled_component_p (TREE_OPERAND (arg0, 0)))
9990 {
9991 tree base;
9992 HOST_WIDE_INT coffset;
9993 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9994 &coffset);
9995 if (!base)
9996 return NULL_TREE;
9997 return fold_build2 (MEM_REF, type,
9998 build_fold_addr_expr (base),
9999 int_const_binop (PLUS_EXPR, arg1,
10000 size_int (coffset)));
10001 }
10002
10003 return NULL_TREE;
10004
10005 case POINTER_PLUS_EXPR:
10006 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10007 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10008 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10009 return fold_convert_loc (loc, type,
10010 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10011 fold_convert_loc (loc, sizetype,
10012 arg1),
10013 fold_convert_loc (loc, sizetype,
10014 arg0)));
10015
10016 return NULL_TREE;
10017
10018 case PLUS_EXPR:
10019 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10020 {
10021 /* X + (X / CST) * -CST is X % CST. */
10022 if (TREE_CODE (arg1) == MULT_EXPR
10023 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10024 && operand_equal_p (arg0,
10025 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10026 {
10027 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10028 tree cst1 = TREE_OPERAND (arg1, 1);
10029 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10030 cst1, cst0);
10031 if (sum && integer_zerop (sum))
10032 return fold_convert_loc (loc, type,
10033 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10034 TREE_TYPE (arg0), arg0,
10035 cst0));
10036 }
10037 }
10038
10039 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10040 one. Make sure the type is not saturating and has the signedness of
10041 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10042 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10043 if ((TREE_CODE (arg0) == MULT_EXPR
10044 || TREE_CODE (arg1) == MULT_EXPR)
10045 && !TYPE_SATURATING (type)
10046 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10047 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10048 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10049 {
10050 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10051 if (tem)
10052 return tem;
10053 }
10054
10055 if (! FLOAT_TYPE_P (type))
10056 {
10057 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10058 with a constant, and the two constants have no bits in common,
10059 we should treat this as a BIT_IOR_EXPR since this may produce more
10060 simplifications. */
10061 if (TREE_CODE (arg0) == BIT_AND_EXPR
10062 && TREE_CODE (arg1) == BIT_AND_EXPR
10063 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10064 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10065 && wi::bit_and (TREE_OPERAND (arg0, 1),
10066 TREE_OPERAND (arg1, 1)) == 0)
10067 {
10068 code = BIT_IOR_EXPR;
10069 goto bit_ior;
10070 }
10071
10072 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10073 (plus (plus (mult) (mult)) (foo)) so that we can
10074 take advantage of the factoring cases below. */
10075 if (TYPE_OVERFLOW_WRAPS (type)
10076 && (((TREE_CODE (arg0) == PLUS_EXPR
10077 || TREE_CODE (arg0) == MINUS_EXPR)
10078 && TREE_CODE (arg1) == MULT_EXPR)
10079 || ((TREE_CODE (arg1) == PLUS_EXPR
10080 || TREE_CODE (arg1) == MINUS_EXPR)
10081 && TREE_CODE (arg0) == MULT_EXPR)))
10082 {
10083 tree parg0, parg1, parg, marg;
10084 enum tree_code pcode;
10085
10086 if (TREE_CODE (arg1) == MULT_EXPR)
10087 parg = arg0, marg = arg1;
10088 else
10089 parg = arg1, marg = arg0;
10090 pcode = TREE_CODE (parg);
10091 parg0 = TREE_OPERAND (parg, 0);
10092 parg1 = TREE_OPERAND (parg, 1);
10093 STRIP_NOPS (parg0);
10094 STRIP_NOPS (parg1);
10095
10096 if (TREE_CODE (parg0) == MULT_EXPR
10097 && TREE_CODE (parg1) != MULT_EXPR)
10098 return fold_build2_loc (loc, pcode, type,
10099 fold_build2_loc (loc, PLUS_EXPR, type,
10100 fold_convert_loc (loc, type,
10101 parg0),
10102 fold_convert_loc (loc, type,
10103 marg)),
10104 fold_convert_loc (loc, type, parg1));
10105 if (TREE_CODE (parg0) != MULT_EXPR
10106 && TREE_CODE (parg1) == MULT_EXPR)
10107 return
10108 fold_build2_loc (loc, PLUS_EXPR, type,
10109 fold_convert_loc (loc, type, parg0),
10110 fold_build2_loc (loc, pcode, type,
10111 fold_convert_loc (loc, type, marg),
10112 fold_convert_loc (loc, type,
10113 parg1)));
10114 }
10115 }
10116 else
10117 {
10118 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10119 to __complex__ ( x, y ). This is not the same for SNaNs or
10120 if signed zeros are involved. */
10121 if (!HONOR_SNANS (element_mode (arg0))
10122 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10123 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10124 {
10125 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10126 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10127 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10128 bool arg0rz = false, arg0iz = false;
10129 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10130 || (arg0i && (arg0iz = real_zerop (arg0i))))
10131 {
10132 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10133 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10134 if (arg0rz && arg1i && real_zerop (arg1i))
10135 {
10136 tree rp = arg1r ? arg1r
10137 : build1 (REALPART_EXPR, rtype, arg1);
10138 tree ip = arg0i ? arg0i
10139 : build1 (IMAGPART_EXPR, rtype, arg0);
10140 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10141 }
10142 else if (arg0iz && arg1r && real_zerop (arg1r))
10143 {
10144 tree rp = arg0r ? arg0r
10145 : build1 (REALPART_EXPR, rtype, arg0);
10146 tree ip = arg1i ? arg1i
10147 : build1 (IMAGPART_EXPR, rtype, arg1);
10148 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10149 }
10150 }
10151 }
10152
10153 if (flag_unsafe_math_optimizations
10154 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10155 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10156 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10157 return tem;
10158
10159 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10160 We associate floats only if the user has specified
10161 -fassociative-math. */
10162 if (flag_associative_math
10163 && TREE_CODE (arg1) == PLUS_EXPR
10164 && TREE_CODE (arg0) != MULT_EXPR)
10165 {
10166 tree tree10 = TREE_OPERAND (arg1, 0);
10167 tree tree11 = TREE_OPERAND (arg1, 1);
10168 if (TREE_CODE (tree11) == MULT_EXPR
10169 && TREE_CODE (tree10) == MULT_EXPR)
10170 {
10171 tree tree0;
10172 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10173 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10174 }
10175 }
10176 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10177 We associate floats only if the user has specified
10178 -fassociative-math. */
10179 if (flag_associative_math
10180 && TREE_CODE (arg0) == PLUS_EXPR
10181 && TREE_CODE (arg1) != MULT_EXPR)
10182 {
10183 tree tree00 = TREE_OPERAND (arg0, 0);
10184 tree tree01 = TREE_OPERAND (arg0, 1);
10185 if (TREE_CODE (tree01) == MULT_EXPR
10186 && TREE_CODE (tree00) == MULT_EXPR)
10187 {
10188 tree tree0;
10189 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10190 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10191 }
10192 }
10193 }
10194
10195 bit_rotate:
10196 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10197 is a rotate of A by C1 bits. */
10198 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10199 is a rotate of A by B bits. */
10200 {
10201 enum tree_code code0, code1;
10202 tree rtype;
10203 code0 = TREE_CODE (arg0);
10204 code1 = TREE_CODE (arg1);
10205 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10206 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10207 && operand_equal_p (TREE_OPERAND (arg0, 0),
10208 TREE_OPERAND (arg1, 0), 0)
10209 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10210 TYPE_UNSIGNED (rtype))
10211 /* Only create rotates in complete modes. Other cases are not
10212 expanded properly. */
10213 && (element_precision (rtype)
10214 == element_precision (TYPE_MODE (rtype))))
10215 {
10216 tree tree01, tree11;
10217 enum tree_code code01, code11;
10218
10219 tree01 = TREE_OPERAND (arg0, 1);
10220 tree11 = TREE_OPERAND (arg1, 1);
10221 STRIP_NOPS (tree01);
10222 STRIP_NOPS (tree11);
10223 code01 = TREE_CODE (tree01);
10224 code11 = TREE_CODE (tree11);
10225 if (code01 == INTEGER_CST
10226 && code11 == INTEGER_CST
10227 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10228 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10229 {
10230 tem = build2_loc (loc, LROTATE_EXPR,
10231 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10232 TREE_OPERAND (arg0, 0),
10233 code0 == LSHIFT_EXPR ? tree01 : tree11);
10234 return fold_convert_loc (loc, type, tem);
10235 }
10236 else if (code11 == MINUS_EXPR)
10237 {
10238 tree tree110, tree111;
10239 tree110 = TREE_OPERAND (tree11, 0);
10240 tree111 = TREE_OPERAND (tree11, 1);
10241 STRIP_NOPS (tree110);
10242 STRIP_NOPS (tree111);
10243 if (TREE_CODE (tree110) == INTEGER_CST
10244 && 0 == compare_tree_int (tree110,
10245 element_precision
10246 (TREE_TYPE (TREE_OPERAND
10247 (arg0, 0))))
10248 && operand_equal_p (tree01, tree111, 0))
10249 return
10250 fold_convert_loc (loc, type,
10251 build2 ((code0 == LSHIFT_EXPR
10252 ? LROTATE_EXPR
10253 : RROTATE_EXPR),
10254 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10255 TREE_OPERAND (arg0, 0), tree01));
10256 }
10257 else if (code01 == MINUS_EXPR)
10258 {
10259 tree tree010, tree011;
10260 tree010 = TREE_OPERAND (tree01, 0);
10261 tree011 = TREE_OPERAND (tree01, 1);
10262 STRIP_NOPS (tree010);
10263 STRIP_NOPS (tree011);
10264 if (TREE_CODE (tree010) == INTEGER_CST
10265 && 0 == compare_tree_int (tree010,
10266 element_precision
10267 (TREE_TYPE (TREE_OPERAND
10268 (arg0, 0))))
10269 && operand_equal_p (tree11, tree011, 0))
10270 return fold_convert_loc
10271 (loc, type,
10272 build2 ((code0 != LSHIFT_EXPR
10273 ? LROTATE_EXPR
10274 : RROTATE_EXPR),
10275 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10276 TREE_OPERAND (arg0, 0), tree11));
10277 }
10278 }
10279 }
10280
10281 associate:
10282 /* In most languages, can't associate operations on floats through
10283 parentheses. Rather than remember where the parentheses were, we
10284 don't associate floats at all, unless the user has specified
10285 -fassociative-math.
10286 And, we need to make sure type is not saturating. */
10287
10288 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10289 && !TYPE_SATURATING (type))
10290 {
10291 tree var0, con0, lit0, minus_lit0;
10292 tree var1, con1, lit1, minus_lit1;
10293 tree atype = type;
10294 bool ok = true;
10295
10296 /* Split both trees into variables, constants, and literals. Then
10297 associate each group together, the constants with literals,
10298 then the result with variables. This increases the chances of
10299 literals being recombined later and of generating relocatable
10300 expressions for the sum of a constant and literal. */
10301 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10302 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10303 code == MINUS_EXPR);
10304
10305 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10306 if (code == MINUS_EXPR)
10307 code = PLUS_EXPR;
10308
10309 /* With undefined overflow prefer doing association in a type
10310 which wraps on overflow, if that is one of the operand types. */
10311 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10312 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10313 {
10314 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10315 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10316 atype = TREE_TYPE (arg0);
10317 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10318 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10319 atype = TREE_TYPE (arg1);
10320 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10321 }
10322
10323 /* With undefined overflow we can only associate constants with one
10324 variable, and constants whose association doesn't overflow. */
10325 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10326 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10327 {
10328 if (var0 && var1)
10329 {
10330 tree tmp0 = var0;
10331 tree tmp1 = var1;
10332
10333 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10334 tmp0 = TREE_OPERAND (tmp0, 0);
10335 if (CONVERT_EXPR_P (tmp0)
10336 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10337 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10338 <= TYPE_PRECISION (atype)))
10339 tmp0 = TREE_OPERAND (tmp0, 0);
10340 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10341 tmp1 = TREE_OPERAND (tmp1, 0);
10342 if (CONVERT_EXPR_P (tmp1)
10343 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10344 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10345 <= TYPE_PRECISION (atype)))
10346 tmp1 = TREE_OPERAND (tmp1, 0);
10347 /* The only case we can still associate with two variables
10348 is if they are the same, modulo negation and bit-pattern
10349 preserving conversions. */
10350 if (!operand_equal_p (tmp0, tmp1, 0))
10351 ok = false;
10352 }
10353 }
10354
10355 /* Only do something if we found more than two objects. Otherwise,
10356 nothing has changed and we risk infinite recursion. */
10357 if (ok
10358 && (2 < ((var0 != 0) + (var1 != 0)
10359 + (con0 != 0) + (con1 != 0)
10360 + (lit0 != 0) + (lit1 != 0)
10361 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10362 {
10363 bool any_overflows = false;
10364 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10365 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10366 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10367 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10368 var0 = associate_trees (loc, var0, var1, code, atype);
10369 con0 = associate_trees (loc, con0, con1, code, atype);
10370 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10371 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10372 code, atype);
10373
10374 /* Preserve the MINUS_EXPR if the negative part of the literal is
10375 greater than the positive part. Otherwise, the multiplicative
10376 folding code (i.e extract_muldiv) may be fooled in case
10377 unsigned constants are subtracted, like in the following
10378 example: ((X*2 + 4) - 8U)/2. */
10379 if (minus_lit0 && lit0)
10380 {
10381 if (TREE_CODE (lit0) == INTEGER_CST
10382 && TREE_CODE (minus_lit0) == INTEGER_CST
10383 && tree_int_cst_lt (lit0, minus_lit0))
10384 {
10385 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10386 MINUS_EXPR, atype);
10387 lit0 = 0;
10388 }
10389 else
10390 {
10391 lit0 = associate_trees (loc, lit0, minus_lit0,
10392 MINUS_EXPR, atype);
10393 minus_lit0 = 0;
10394 }
10395 }
10396
10397 /* Don't introduce overflows through reassociation. */
10398 if (!any_overflows
10399 && ((lit0 && TREE_OVERFLOW (lit0))
10400 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10401 return NULL_TREE;
10402
10403 if (minus_lit0)
10404 {
10405 if (con0 == 0)
10406 return
10407 fold_convert_loc (loc, type,
10408 associate_trees (loc, var0, minus_lit0,
10409 MINUS_EXPR, atype));
10410 else
10411 {
10412 con0 = associate_trees (loc, con0, minus_lit0,
10413 MINUS_EXPR, atype);
10414 return
10415 fold_convert_loc (loc, type,
10416 associate_trees (loc, var0, con0,
10417 PLUS_EXPR, atype));
10418 }
10419 }
10420
10421 con0 = associate_trees (loc, con0, lit0, code, atype);
10422 return
10423 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10424 code, atype));
10425 }
10426 }
10427
10428 return NULL_TREE;
10429
10430 case MINUS_EXPR:
10431 /* Pointer simplifications for subtraction, simple reassociations. */
10432 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10433 {
10434 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10435 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10436 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10437 {
10438 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10439 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10440 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10441 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10442 return fold_build2_loc (loc, PLUS_EXPR, type,
10443 fold_build2_loc (loc, MINUS_EXPR, type,
10444 arg00, arg10),
10445 fold_build2_loc (loc, MINUS_EXPR, type,
10446 arg01, arg11));
10447 }
10448 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10449 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10450 {
10451 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10452 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10453 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10454 fold_convert_loc (loc, type, arg1));
10455 if (tmp)
10456 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10457 }
10458 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10459 simplifies. */
10460 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10461 {
10462 tree arg10 = fold_convert_loc (loc, type,
10463 TREE_OPERAND (arg1, 0));
10464 tree arg11 = fold_convert_loc (loc, type,
10465 TREE_OPERAND (arg1, 1));
10466 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10467 fold_convert_loc (loc, type, arg0),
10468 arg10);
10469 if (tmp)
10470 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10471 }
10472 }
10473 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10474 if (TREE_CODE (arg0) == NEGATE_EXPR
10475 && negate_expr_p (arg1)
10476 && reorder_operands_p (arg0, arg1))
10477 return fold_build2_loc (loc, MINUS_EXPR, type,
10478 fold_convert_loc (loc, type,
10479 negate_expr (arg1)),
10480 fold_convert_loc (loc, type,
10481 TREE_OPERAND (arg0, 0)));
10482
10483 /* X - (X / Y) * Y is X % Y. */
10484 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10485 && TREE_CODE (arg1) == MULT_EXPR
10486 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10487 && operand_equal_p (arg0,
10488 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10489 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10490 TREE_OPERAND (arg1, 1), 0))
10491 return
10492 fold_convert_loc (loc, type,
10493 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10494 arg0, TREE_OPERAND (arg1, 1)));
10495
10496 if (! FLOAT_TYPE_P (type))
10497 {
10498 /* Fold A - (A & B) into ~B & A. */
10499 if (!TREE_SIDE_EFFECTS (arg0)
10500 && TREE_CODE (arg1) == BIT_AND_EXPR)
10501 {
10502 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10503 {
10504 tree arg10 = fold_convert_loc (loc, type,
10505 TREE_OPERAND (arg1, 0));
10506 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10507 fold_build1_loc (loc, BIT_NOT_EXPR,
10508 type, arg10),
10509 fold_convert_loc (loc, type, arg0));
10510 }
10511 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10512 {
10513 tree arg11 = fold_convert_loc (loc,
10514 type, TREE_OPERAND (arg1, 1));
10515 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10516 fold_build1_loc (loc, BIT_NOT_EXPR,
10517 type, arg11),
10518 fold_convert_loc (loc, type, arg0));
10519 }
10520 }
10521
10522 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10523 any power of 2 minus 1. */
10524 if (TREE_CODE (arg0) == BIT_AND_EXPR
10525 && TREE_CODE (arg1) == BIT_AND_EXPR
10526 && operand_equal_p (TREE_OPERAND (arg0, 0),
10527 TREE_OPERAND (arg1, 0), 0))
10528 {
10529 tree mask0 = TREE_OPERAND (arg0, 1);
10530 tree mask1 = TREE_OPERAND (arg1, 1);
10531 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10532
10533 if (operand_equal_p (tem, mask1, 0))
10534 {
10535 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10536 TREE_OPERAND (arg0, 0), mask1);
10537 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10538 }
10539 }
10540 }
10541
10542 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10543 __complex__ ( x, -y ). This is not the same for SNaNs or if
10544 signed zeros are involved. */
10545 if (!HONOR_SNANS (element_mode (arg0))
10546 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10547 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10548 {
10549 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10550 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10551 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10552 bool arg0rz = false, arg0iz = false;
10553 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10554 || (arg0i && (arg0iz = real_zerop (arg0i))))
10555 {
10556 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10557 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10558 if (arg0rz && arg1i && real_zerop (arg1i))
10559 {
10560 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10561 arg1r ? arg1r
10562 : build1 (REALPART_EXPR, rtype, arg1));
10563 tree ip = arg0i ? arg0i
10564 : build1 (IMAGPART_EXPR, rtype, arg0);
10565 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10566 }
10567 else if (arg0iz && arg1r && real_zerop (arg1r))
10568 {
10569 tree rp = arg0r ? arg0r
10570 : build1 (REALPART_EXPR, rtype, arg0);
10571 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10572 arg1i ? arg1i
10573 : build1 (IMAGPART_EXPR, rtype, arg1));
10574 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10575 }
10576 }
10577 }
10578
10579 /* A - B -> A + (-B) if B is easily negatable. */
10580 if (negate_expr_p (arg1)
10581 && !TYPE_OVERFLOW_SANITIZED (type)
10582 && ((FLOAT_TYPE_P (type)
10583 /* Avoid this transformation if B is a positive REAL_CST. */
10584 && (TREE_CODE (arg1) != REAL_CST
10585 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10586 || INTEGRAL_TYPE_P (type)))
10587 return fold_build2_loc (loc, PLUS_EXPR, type,
10588 fold_convert_loc (loc, type, arg0),
10589 fold_convert_loc (loc, type,
10590 negate_expr (arg1)));
10591
10592 /* Try folding difference of addresses. */
10593 {
10594 HOST_WIDE_INT diff;
10595
10596 if ((TREE_CODE (arg0) == ADDR_EXPR
10597 || TREE_CODE (arg1) == ADDR_EXPR)
10598 && ptr_difference_const (arg0, arg1, &diff))
10599 return build_int_cst_type (type, diff);
10600 }
10601
10602 /* Fold &a[i] - &a[j] to i-j. */
10603 if (TREE_CODE (arg0) == ADDR_EXPR
10604 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10605 && TREE_CODE (arg1) == ADDR_EXPR
10606 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10607 {
10608 tree tem = fold_addr_of_array_ref_difference (loc, type,
10609 TREE_OPERAND (arg0, 0),
10610 TREE_OPERAND (arg1, 0));
10611 if (tem)
10612 return tem;
10613 }
10614
10615 if (FLOAT_TYPE_P (type)
10616 && flag_unsafe_math_optimizations
10617 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10618 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10619 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10620 return tem;
10621
10622 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10623 one. Make sure the type is not saturating and has the signedness of
10624 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10625 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10626 if ((TREE_CODE (arg0) == MULT_EXPR
10627 || TREE_CODE (arg1) == MULT_EXPR)
10628 && !TYPE_SATURATING (type)
10629 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10630 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10631 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10632 {
10633 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10634 if (tem)
10635 return tem;
10636 }
10637
10638 goto associate;
10639
10640 case MULT_EXPR:
10641 /* (-A) * (-B) -> A * B */
10642 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10643 return fold_build2_loc (loc, MULT_EXPR, type,
10644 fold_convert_loc (loc, type,
10645 TREE_OPERAND (arg0, 0)),
10646 fold_convert_loc (loc, type,
10647 negate_expr (arg1)));
10648 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10649 return fold_build2_loc (loc, MULT_EXPR, type,
10650 fold_convert_loc (loc, type,
10651 negate_expr (arg0)),
10652 fold_convert_loc (loc, type,
10653 TREE_OPERAND (arg1, 0)));
10654
10655 if (! FLOAT_TYPE_P (type))
10656 {
10657 /* Transform x * -C into -x * C if x is easily negatable. */
10658 if (TREE_CODE (arg1) == INTEGER_CST
10659 && tree_int_cst_sgn (arg1) == -1
10660 && negate_expr_p (arg0)
10661 && (tem = negate_expr (arg1)) != arg1
10662 && !TREE_OVERFLOW (tem))
10663 return fold_build2_loc (loc, MULT_EXPR, type,
10664 fold_convert_loc (loc, type,
10665 negate_expr (arg0)),
10666 tem);
10667
10668 /* (a * (1 << b)) is (a << b) */
10669 if (TREE_CODE (arg1) == LSHIFT_EXPR
10670 && integer_onep (TREE_OPERAND (arg1, 0)))
10671 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10672 TREE_OPERAND (arg1, 1));
10673 if (TREE_CODE (arg0) == LSHIFT_EXPR
10674 && integer_onep (TREE_OPERAND (arg0, 0)))
10675 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10676 TREE_OPERAND (arg0, 1));
10677
10678 /* (A + A) * C -> A * 2 * C */
10679 if (TREE_CODE (arg0) == PLUS_EXPR
10680 && TREE_CODE (arg1) == INTEGER_CST
10681 && operand_equal_p (TREE_OPERAND (arg0, 0),
10682 TREE_OPERAND (arg0, 1), 0))
10683 return fold_build2_loc (loc, MULT_EXPR, type,
10684 omit_one_operand_loc (loc, type,
10685 TREE_OPERAND (arg0, 0),
10686 TREE_OPERAND (arg0, 1)),
10687 fold_build2_loc (loc, MULT_EXPR, type,
10688 build_int_cst (type, 2) , arg1));
10689
10690 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10691 sign-changing only. */
10692 if (TREE_CODE (arg1) == INTEGER_CST
10693 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10694 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10695 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10696
10697 strict_overflow_p = false;
10698 if (TREE_CODE (arg1) == INTEGER_CST
10699 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10700 &strict_overflow_p)))
10701 {
10702 if (strict_overflow_p)
10703 fold_overflow_warning (("assuming signed overflow does not "
10704 "occur when simplifying "
10705 "multiplication"),
10706 WARN_STRICT_OVERFLOW_MISC);
10707 return fold_convert_loc (loc, type, tem);
10708 }
10709
10710 /* Optimize z * conj(z) for integer complex numbers. */
10711 if (TREE_CODE (arg0) == CONJ_EXPR
10712 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10713 return fold_mult_zconjz (loc, type, arg1);
10714 if (TREE_CODE (arg1) == CONJ_EXPR
10715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10716 return fold_mult_zconjz (loc, type, arg0);
10717 }
10718 else
10719 {
10720 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10721 the result for floating point types due to rounding so it is applied
10722 only if -fassociative-math was specify. */
10723 if (flag_associative_math
10724 && TREE_CODE (arg0) == RDIV_EXPR
10725 && TREE_CODE (arg1) == REAL_CST
10726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10727 {
10728 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10729 arg1);
10730 if (tem)
10731 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10732 TREE_OPERAND (arg0, 1));
10733 }
10734
10735 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10736 if (operand_equal_p (arg0, arg1, 0))
10737 {
10738 tree tem = fold_strip_sign_ops (arg0);
10739 if (tem != NULL_TREE)
10740 {
10741 tem = fold_convert_loc (loc, type, tem);
10742 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10743 }
10744 }
10745
10746 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10747 This is not the same for NaNs or if signed zeros are
10748 involved. */
10749 if (!HONOR_NANS (element_mode (arg0))
10750 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10751 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10752 && TREE_CODE (arg1) == COMPLEX_CST
10753 && real_zerop (TREE_REALPART (arg1)))
10754 {
10755 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10756 if (real_onep (TREE_IMAGPART (arg1)))
10757 return
10758 fold_build2_loc (loc, COMPLEX_EXPR, type,
10759 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10760 rtype, arg0)),
10761 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10762 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10763 return
10764 fold_build2_loc (loc, COMPLEX_EXPR, type,
10765 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10766 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10767 rtype, arg0)));
10768 }
10769
10770 /* Optimize z * conj(z) for floating point complex numbers.
10771 Guarded by flag_unsafe_math_optimizations as non-finite
10772 imaginary components don't produce scalar results. */
10773 if (flag_unsafe_math_optimizations
10774 && TREE_CODE (arg0) == CONJ_EXPR
10775 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10776 return fold_mult_zconjz (loc, type, arg1);
10777 if (flag_unsafe_math_optimizations
10778 && TREE_CODE (arg1) == CONJ_EXPR
10779 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10780 return fold_mult_zconjz (loc, type, arg0);
10781
10782 if (flag_unsafe_math_optimizations)
10783 {
10784 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10785 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10786
10787 /* Optimizations of root(...)*root(...). */
10788 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10789 {
10790 tree rootfn, arg;
10791 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10792 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10793
10794 /* Optimize sqrt(x)*sqrt(x) as x. */
10795 if (BUILTIN_SQRT_P (fcode0)
10796 && operand_equal_p (arg00, arg10, 0)
10797 && ! HONOR_SNANS (element_mode (type)))
10798 return arg00;
10799
10800 /* Optimize root(x)*root(y) as root(x*y). */
10801 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10802 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10803 return build_call_expr_loc (loc, rootfn, 1, arg);
10804 }
10805
10806 /* Optimize expN(x)*expN(y) as expN(x+y). */
10807 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10808 {
10809 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10810 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10811 CALL_EXPR_ARG (arg0, 0),
10812 CALL_EXPR_ARG (arg1, 0));
10813 return build_call_expr_loc (loc, expfn, 1, arg);
10814 }
10815
10816 /* Optimizations of pow(...)*pow(...). */
10817 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10818 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10819 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10820 {
10821 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10822 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10823 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10824 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10825
10826 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10827 if (operand_equal_p (arg01, arg11, 0))
10828 {
10829 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10830 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10831 arg00, arg10);
10832 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10833 }
10834
10835 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10836 if (operand_equal_p (arg00, arg10, 0))
10837 {
10838 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10839 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10840 arg01, arg11);
10841 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10842 }
10843 }
10844
10845 /* Optimize tan(x)*cos(x) as sin(x). */
10846 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10847 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10848 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10849 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10850 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10851 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10852 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10853 CALL_EXPR_ARG (arg1, 0), 0))
10854 {
10855 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10856
10857 if (sinfn != NULL_TREE)
10858 return build_call_expr_loc (loc, sinfn, 1,
10859 CALL_EXPR_ARG (arg0, 0));
10860 }
10861
10862 /* Optimize x*pow(x,c) as pow(x,c+1). */
10863 if (fcode1 == BUILT_IN_POW
10864 || fcode1 == BUILT_IN_POWF
10865 || fcode1 == BUILT_IN_POWL)
10866 {
10867 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10868 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10869 if (TREE_CODE (arg11) == REAL_CST
10870 && !TREE_OVERFLOW (arg11)
10871 && operand_equal_p (arg0, arg10, 0))
10872 {
10873 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10874 REAL_VALUE_TYPE c;
10875 tree arg;
10876
10877 c = TREE_REAL_CST (arg11);
10878 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10879 arg = build_real (type, c);
10880 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10881 }
10882 }
10883
10884 /* Optimize pow(x,c)*x as pow(x,c+1). */
10885 if (fcode0 == BUILT_IN_POW
10886 || fcode0 == BUILT_IN_POWF
10887 || fcode0 == BUILT_IN_POWL)
10888 {
10889 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10890 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10891 if (TREE_CODE (arg01) == REAL_CST
10892 && !TREE_OVERFLOW (arg01)
10893 && operand_equal_p (arg1, arg00, 0))
10894 {
10895 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10896 REAL_VALUE_TYPE c;
10897 tree arg;
10898
10899 c = TREE_REAL_CST (arg01);
10900 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10901 arg = build_real (type, c);
10902 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10903 }
10904 }
10905
10906 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10907 if (!in_gimple_form
10908 && optimize
10909 && operand_equal_p (arg0, arg1, 0))
10910 {
10911 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10912
10913 if (powfn)
10914 {
10915 tree arg = build_real (type, dconst2);
10916 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10917 }
10918 }
10919 }
10920 }
10921 goto associate;
10922
10923 case BIT_IOR_EXPR:
10924 bit_ior:
10925 /* ~X | X is -1. */
10926 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10927 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10928 {
10929 t1 = build_zero_cst (type);
10930 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10931 return omit_one_operand_loc (loc, type, t1, arg1);
10932 }
10933
10934 /* X | ~X is -1. */
10935 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10936 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10937 {
10938 t1 = build_zero_cst (type);
10939 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10940 return omit_one_operand_loc (loc, type, t1, arg0);
10941 }
10942
10943 /* Canonicalize (X & C1) | C2. */
10944 if (TREE_CODE (arg0) == BIT_AND_EXPR
10945 && TREE_CODE (arg1) == INTEGER_CST
10946 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10947 {
10948 int width = TYPE_PRECISION (type), w;
10949 wide_int c1 = TREE_OPERAND (arg0, 1);
10950 wide_int c2 = arg1;
10951
10952 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10953 if ((c1 & c2) == c1)
10954 return omit_one_operand_loc (loc, type, arg1,
10955 TREE_OPERAND (arg0, 0));
10956
10957 wide_int msk = wi::mask (width, false,
10958 TYPE_PRECISION (TREE_TYPE (arg1)));
10959
10960 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10961 if (msk.and_not (c1 | c2) == 0)
10962 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10963 TREE_OPERAND (arg0, 0), arg1);
10964
10965 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10966 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10967 mode which allows further optimizations. */
10968 c1 &= msk;
10969 c2 &= msk;
10970 wide_int c3 = c1.and_not (c2);
10971 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10972 {
10973 wide_int mask = wi::mask (w, false,
10974 TYPE_PRECISION (type));
10975 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10976 {
10977 c3 = mask;
10978 break;
10979 }
10980 }
10981
10982 if (c3 != c1)
10983 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10984 fold_build2_loc (loc, BIT_AND_EXPR, type,
10985 TREE_OPERAND (arg0, 0),
10986 wide_int_to_tree (type,
10987 c3)),
10988 arg1);
10989 }
10990
10991 /* (X & ~Y) | (~X & Y) is X ^ Y */
10992 if (TREE_CODE (arg0) == BIT_AND_EXPR
10993 && TREE_CODE (arg1) == BIT_AND_EXPR)
10994 {
10995 tree a0, a1, l0, l1, n0, n1;
10996
10997 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10998 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10999
11000 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11001 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11002
11003 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11004 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11005
11006 if ((operand_equal_p (n0, a0, 0)
11007 && operand_equal_p (n1, a1, 0))
11008 || (operand_equal_p (n0, a1, 0)
11009 && operand_equal_p (n1, a0, 0)))
11010 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11011 }
11012
11013 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11014 if (t1 != NULL_TREE)
11015 return t1;
11016
11017 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11018
11019 This results in more efficient code for machines without a NAND
11020 instruction. Combine will canonicalize to the first form
11021 which will allow use of NAND instructions provided by the
11022 backend if they exist. */
11023 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11024 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11025 {
11026 return
11027 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11028 build2 (BIT_AND_EXPR, type,
11029 fold_convert_loc (loc, type,
11030 TREE_OPERAND (arg0, 0)),
11031 fold_convert_loc (loc, type,
11032 TREE_OPERAND (arg1, 0))));
11033 }
11034
11035 /* See if this can be simplified into a rotate first. If that
11036 is unsuccessful continue in the association code. */
11037 goto bit_rotate;
11038
11039 case BIT_XOR_EXPR:
11040 /* ~X ^ X is -1. */
11041 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11042 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11043 {
11044 t1 = build_zero_cst (type);
11045 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11046 return omit_one_operand_loc (loc, type, t1, arg1);
11047 }
11048
11049 /* X ^ ~X is -1. */
11050 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11051 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11052 {
11053 t1 = build_zero_cst (type);
11054 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11055 return omit_one_operand_loc (loc, type, t1, arg0);
11056 }
11057
11058 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11059 with a constant, and the two constants have no bits in common,
11060 we should treat this as a BIT_IOR_EXPR since this may produce more
11061 simplifications. */
11062 if (TREE_CODE (arg0) == BIT_AND_EXPR
11063 && TREE_CODE (arg1) == BIT_AND_EXPR
11064 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11065 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11066 && wi::bit_and (TREE_OPERAND (arg0, 1),
11067 TREE_OPERAND (arg1, 1)) == 0)
11068 {
11069 code = BIT_IOR_EXPR;
11070 goto bit_ior;
11071 }
11072
11073 /* (X | Y) ^ X -> Y & ~ X*/
11074 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11075 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11076 {
11077 tree t2 = TREE_OPERAND (arg0, 1);
11078 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11079 arg1);
11080 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11081 fold_convert_loc (loc, type, t2),
11082 fold_convert_loc (loc, type, t1));
11083 return t1;
11084 }
11085
11086 /* (Y | X) ^ X -> Y & ~ X*/
11087 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11088 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11089 {
11090 tree t2 = TREE_OPERAND (arg0, 0);
11091 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11092 arg1);
11093 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11094 fold_convert_loc (loc, type, t2),
11095 fold_convert_loc (loc, type, t1));
11096 return t1;
11097 }
11098
11099 /* X ^ (X | Y) -> Y & ~ X*/
11100 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11101 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11102 {
11103 tree t2 = TREE_OPERAND (arg1, 1);
11104 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11105 arg0);
11106 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11107 fold_convert_loc (loc, type, t2),
11108 fold_convert_loc (loc, type, t1));
11109 return t1;
11110 }
11111
11112 /* X ^ (Y | X) -> Y & ~ X*/
11113 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11114 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11115 {
11116 tree t2 = TREE_OPERAND (arg1, 0);
11117 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11118 arg0);
11119 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11120 fold_convert_loc (loc, type, t2),
11121 fold_convert_loc (loc, type, t1));
11122 return t1;
11123 }
11124
11125 /* Convert ~X ^ ~Y to X ^ Y. */
11126 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11127 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11128 return fold_build2_loc (loc, code, type,
11129 fold_convert_loc (loc, type,
11130 TREE_OPERAND (arg0, 0)),
11131 fold_convert_loc (loc, type,
11132 TREE_OPERAND (arg1, 0)));
11133
11134 /* Convert ~X ^ C to X ^ ~C. */
11135 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11136 && TREE_CODE (arg1) == INTEGER_CST)
11137 return fold_build2_loc (loc, code, type,
11138 fold_convert_loc (loc, type,
11139 TREE_OPERAND (arg0, 0)),
11140 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11141
11142 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11143 if (TREE_CODE (arg0) == BIT_AND_EXPR
11144 && INTEGRAL_TYPE_P (type)
11145 && integer_onep (TREE_OPERAND (arg0, 1))
11146 && integer_onep (arg1))
11147 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11148 build_zero_cst (TREE_TYPE (arg0)));
11149
11150 /* Fold (X & Y) ^ Y as ~X & Y. */
11151 if (TREE_CODE (arg0) == BIT_AND_EXPR
11152 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11153 {
11154 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11155 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11156 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11157 fold_convert_loc (loc, type, arg1));
11158 }
11159 /* Fold (X & Y) ^ X as ~Y & X. */
11160 if (TREE_CODE (arg0) == BIT_AND_EXPR
11161 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11162 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11163 {
11164 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11165 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11166 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11167 fold_convert_loc (loc, type, arg1));
11168 }
11169 /* Fold X ^ (X & Y) as X & ~Y. */
11170 if (TREE_CODE (arg1) == BIT_AND_EXPR
11171 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11172 {
11173 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11174 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11175 fold_convert_loc (loc, type, arg0),
11176 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11177 }
11178 /* Fold X ^ (Y & X) as ~Y & X. */
11179 if (TREE_CODE (arg1) == BIT_AND_EXPR
11180 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11181 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11182 {
11183 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11184 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11185 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11186 fold_convert_loc (loc, type, arg0));
11187 }
11188
11189 /* See if this can be simplified into a rotate first. If that
11190 is unsuccessful continue in the association code. */
11191 goto bit_rotate;
11192
11193 case BIT_AND_EXPR:
11194 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11195 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11196 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11197 || (TREE_CODE (arg0) == EQ_EXPR
11198 && integer_zerop (TREE_OPERAND (arg0, 1))))
11199 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11200 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11201
11202 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11203 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11204 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11205 || (TREE_CODE (arg1) == EQ_EXPR
11206 && integer_zerop (TREE_OPERAND (arg1, 1))))
11207 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11208 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11209
11210 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11211 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11212 && INTEGRAL_TYPE_P (type)
11213 && integer_onep (TREE_OPERAND (arg0, 1))
11214 && integer_onep (arg1))
11215 {
11216 tree tem2;
11217 tem = TREE_OPERAND (arg0, 0);
11218 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11219 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11220 tem, tem2);
11221 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11222 build_zero_cst (TREE_TYPE (tem)));
11223 }
11224 /* Fold ~X & 1 as (X & 1) == 0. */
11225 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11226 && INTEGRAL_TYPE_P (type)
11227 && integer_onep (arg1))
11228 {
11229 tree tem2;
11230 tem = TREE_OPERAND (arg0, 0);
11231 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11232 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11233 tem, tem2);
11234 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11235 build_zero_cst (TREE_TYPE (tem)));
11236 }
11237 /* Fold !X & 1 as X == 0. */
11238 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11239 && integer_onep (arg1))
11240 {
11241 tem = TREE_OPERAND (arg0, 0);
11242 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11243 build_zero_cst (TREE_TYPE (tem)));
11244 }
11245
11246 /* Fold (X ^ Y) & Y as ~X & Y. */
11247 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11248 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11249 {
11250 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11251 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11252 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11253 fold_convert_loc (loc, type, arg1));
11254 }
11255 /* Fold (X ^ Y) & X as ~Y & X. */
11256 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11257 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11258 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11259 {
11260 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11261 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11262 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11263 fold_convert_loc (loc, type, arg1));
11264 }
11265 /* Fold X & (X ^ Y) as X & ~Y. */
11266 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11267 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11268 {
11269 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11270 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11271 fold_convert_loc (loc, type, arg0),
11272 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11273 }
11274 /* Fold X & (Y ^ X) as ~Y & X. */
11275 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11276 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11277 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11278 {
11279 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11280 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11281 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11282 fold_convert_loc (loc, type, arg0));
11283 }
11284
11285 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11286 multiple of 1 << CST. */
11287 if (TREE_CODE (arg1) == INTEGER_CST)
11288 {
11289 wide_int cst1 = arg1;
11290 wide_int ncst1 = -cst1;
11291 if ((cst1 & ncst1) == ncst1
11292 && multiple_of_p (type, arg0,
11293 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11294 return fold_convert_loc (loc, type, arg0);
11295 }
11296
11297 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11298 bits from CST2. */
11299 if (TREE_CODE (arg1) == INTEGER_CST
11300 && TREE_CODE (arg0) == MULT_EXPR
11301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11302 {
11303 wide_int warg1 = arg1;
11304 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11305
11306 if (masked == 0)
11307 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11308 arg0, arg1);
11309 else if (masked != warg1)
11310 {
11311 /* Avoid the transform if arg1 is a mask of some
11312 mode which allows further optimizations. */
11313 int pop = wi::popcount (warg1);
11314 if (!(pop >= BITS_PER_UNIT
11315 && exact_log2 (pop) != -1
11316 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11317 return fold_build2_loc (loc, code, type, op0,
11318 wide_int_to_tree (type, masked));
11319 }
11320 }
11321
11322 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11323 ((A & N) + B) & M -> (A + B) & M
11324 Similarly if (N & M) == 0,
11325 ((A | N) + B) & M -> (A + B) & M
11326 and for - instead of + (or unary - instead of +)
11327 and/or ^ instead of |.
11328 If B is constant and (B & M) == 0, fold into A & M. */
11329 if (TREE_CODE (arg1) == INTEGER_CST)
11330 {
11331 wide_int cst1 = arg1;
11332 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11333 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11334 && (TREE_CODE (arg0) == PLUS_EXPR
11335 || TREE_CODE (arg0) == MINUS_EXPR
11336 || TREE_CODE (arg0) == NEGATE_EXPR)
11337 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11338 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11339 {
11340 tree pmop[2];
11341 int which = 0;
11342 wide_int cst0;
11343
11344 /* Now we know that arg0 is (C + D) or (C - D) or
11345 -C and arg1 (M) is == (1LL << cst) - 1.
11346 Store C into PMOP[0] and D into PMOP[1]. */
11347 pmop[0] = TREE_OPERAND (arg0, 0);
11348 pmop[1] = NULL;
11349 if (TREE_CODE (arg0) != NEGATE_EXPR)
11350 {
11351 pmop[1] = TREE_OPERAND (arg0, 1);
11352 which = 1;
11353 }
11354
11355 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11356 which = -1;
11357
11358 for (; which >= 0; which--)
11359 switch (TREE_CODE (pmop[which]))
11360 {
11361 case BIT_AND_EXPR:
11362 case BIT_IOR_EXPR:
11363 case BIT_XOR_EXPR:
11364 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11365 != INTEGER_CST)
11366 break;
11367 cst0 = TREE_OPERAND (pmop[which], 1);
11368 cst0 &= cst1;
11369 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11370 {
11371 if (cst0 != cst1)
11372 break;
11373 }
11374 else if (cst0 != 0)
11375 break;
11376 /* If C or D is of the form (A & N) where
11377 (N & M) == M, or of the form (A | N) or
11378 (A ^ N) where (N & M) == 0, replace it with A. */
11379 pmop[which] = TREE_OPERAND (pmop[which], 0);
11380 break;
11381 case INTEGER_CST:
11382 /* If C or D is a N where (N & M) == 0, it can be
11383 omitted (assumed 0). */
11384 if ((TREE_CODE (arg0) == PLUS_EXPR
11385 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11386 && (cst1 & pmop[which]) == 0)
11387 pmop[which] = NULL;
11388 break;
11389 default:
11390 break;
11391 }
11392
11393 /* Only build anything new if we optimized one or both arguments
11394 above. */
11395 if (pmop[0] != TREE_OPERAND (arg0, 0)
11396 || (TREE_CODE (arg0) != NEGATE_EXPR
11397 && pmop[1] != TREE_OPERAND (arg0, 1)))
11398 {
11399 tree utype = TREE_TYPE (arg0);
11400 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11401 {
11402 /* Perform the operations in a type that has defined
11403 overflow behavior. */
11404 utype = unsigned_type_for (TREE_TYPE (arg0));
11405 if (pmop[0] != NULL)
11406 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11407 if (pmop[1] != NULL)
11408 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11409 }
11410
11411 if (TREE_CODE (arg0) == NEGATE_EXPR)
11412 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11413 else if (TREE_CODE (arg0) == PLUS_EXPR)
11414 {
11415 if (pmop[0] != NULL && pmop[1] != NULL)
11416 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11417 pmop[0], pmop[1]);
11418 else if (pmop[0] != NULL)
11419 tem = pmop[0];
11420 else if (pmop[1] != NULL)
11421 tem = pmop[1];
11422 else
11423 return build_int_cst (type, 0);
11424 }
11425 else if (pmop[0] == NULL)
11426 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11427 else
11428 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11429 pmop[0], pmop[1]);
11430 /* TEM is now the new binary +, - or unary - replacement. */
11431 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11432 fold_convert_loc (loc, utype, arg1));
11433 return fold_convert_loc (loc, type, tem);
11434 }
11435 }
11436 }
11437
11438 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11439 if (t1 != NULL_TREE)
11440 return t1;
11441 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11442 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11443 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11444 {
11445 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11446
11447 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11448 if (mask == -1)
11449 return
11450 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11451 }
11452
11453 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11454
11455 This results in more efficient code for machines without a NOR
11456 instruction. Combine will canonicalize to the first form
11457 which will allow use of NOR instructions provided by the
11458 backend if they exist. */
11459 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11460 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11461 {
11462 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11463 build2 (BIT_IOR_EXPR, type,
11464 fold_convert_loc (loc, type,
11465 TREE_OPERAND (arg0, 0)),
11466 fold_convert_loc (loc, type,
11467 TREE_OPERAND (arg1, 0))));
11468 }
11469
11470 /* If arg0 is derived from the address of an object or function, we may
11471 be able to fold this expression using the object or function's
11472 alignment. */
11473 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11474 {
11475 unsigned HOST_WIDE_INT modulus, residue;
11476 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11477
11478 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11479 integer_onep (arg1));
11480
11481 /* This works because modulus is a power of 2. If this weren't the
11482 case, we'd have to replace it by its greatest power-of-2
11483 divisor: modulus & -modulus. */
11484 if (low < modulus)
11485 return build_int_cst (type, residue & low);
11486 }
11487
11488 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11489 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11490 if the new mask might be further optimized. */
11491 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11492 || TREE_CODE (arg0) == RSHIFT_EXPR)
11493 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11494 && TREE_CODE (arg1) == INTEGER_CST
11495 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11496 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11497 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11498 < TYPE_PRECISION (TREE_TYPE (arg0))))
11499 {
11500 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11501 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11502 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11503 tree shift_type = TREE_TYPE (arg0);
11504
11505 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11506 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11507 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11508 && TYPE_PRECISION (TREE_TYPE (arg0))
11509 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11510 {
11511 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11512 tree arg00 = TREE_OPERAND (arg0, 0);
11513 /* See if more bits can be proven as zero because of
11514 zero extension. */
11515 if (TREE_CODE (arg00) == NOP_EXPR
11516 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11517 {
11518 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11519 if (TYPE_PRECISION (inner_type)
11520 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11521 && TYPE_PRECISION (inner_type) < prec)
11522 {
11523 prec = TYPE_PRECISION (inner_type);
11524 /* See if we can shorten the right shift. */
11525 if (shiftc < prec)
11526 shift_type = inner_type;
11527 /* Otherwise X >> C1 is all zeros, so we'll optimize
11528 it into (X, 0) later on by making sure zerobits
11529 is all ones. */
11530 }
11531 }
11532 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11533 if (shiftc < prec)
11534 {
11535 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11536 zerobits <<= prec - shiftc;
11537 }
11538 /* For arithmetic shift if sign bit could be set, zerobits
11539 can contain actually sign bits, so no transformation is
11540 possible, unless MASK masks them all away. In that
11541 case the shift needs to be converted into logical shift. */
11542 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11543 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11544 {
11545 if ((mask & zerobits) == 0)
11546 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11547 else
11548 zerobits = 0;
11549 }
11550 }
11551
11552 /* ((X << 16) & 0xff00) is (X, 0). */
11553 if ((mask & zerobits) == mask)
11554 return omit_one_operand_loc (loc, type,
11555 build_int_cst (type, 0), arg0);
11556
11557 newmask = mask | zerobits;
11558 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11559 {
11560 /* Only do the transformation if NEWMASK is some integer
11561 mode's mask. */
11562 for (prec = BITS_PER_UNIT;
11563 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11564 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11565 break;
11566 if (prec < HOST_BITS_PER_WIDE_INT
11567 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11568 {
11569 tree newmaskt;
11570
11571 if (shift_type != TREE_TYPE (arg0))
11572 {
11573 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11574 fold_convert_loc (loc, shift_type,
11575 TREE_OPERAND (arg0, 0)),
11576 TREE_OPERAND (arg0, 1));
11577 tem = fold_convert_loc (loc, type, tem);
11578 }
11579 else
11580 tem = op0;
11581 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11582 if (!tree_int_cst_equal (newmaskt, arg1))
11583 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11584 }
11585 }
11586 }
11587
11588 goto associate;
11589
11590 case RDIV_EXPR:
11591 /* Don't touch a floating-point divide by zero unless the mode
11592 of the constant can represent infinity. */
11593 if (TREE_CODE (arg1) == REAL_CST
11594 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11595 && real_zerop (arg1))
11596 return NULL_TREE;
11597
11598 /* (-A) / (-B) -> A / B */
11599 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11600 return fold_build2_loc (loc, RDIV_EXPR, type,
11601 TREE_OPERAND (arg0, 0),
11602 negate_expr (arg1));
11603 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11604 return fold_build2_loc (loc, RDIV_EXPR, type,
11605 negate_expr (arg0),
11606 TREE_OPERAND (arg1, 0));
11607
11608 /* Convert A/B/C to A/(B*C). */
11609 if (flag_reciprocal_math
11610 && TREE_CODE (arg0) == RDIV_EXPR)
11611 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11612 fold_build2_loc (loc, MULT_EXPR, type,
11613 TREE_OPERAND (arg0, 1), arg1));
11614
11615 /* Convert A/(B/C) to (A/B)*C. */
11616 if (flag_reciprocal_math
11617 && TREE_CODE (arg1) == RDIV_EXPR)
11618 return fold_build2_loc (loc, MULT_EXPR, type,
11619 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11620 TREE_OPERAND (arg1, 0)),
11621 TREE_OPERAND (arg1, 1));
11622
11623 /* Convert C1/(X*C2) into (C1/C2)/X. */
11624 if (flag_reciprocal_math
11625 && TREE_CODE (arg1) == MULT_EXPR
11626 && TREE_CODE (arg0) == REAL_CST
11627 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11628 {
11629 tree tem = const_binop (RDIV_EXPR, arg0,
11630 TREE_OPERAND (arg1, 1));
11631 if (tem)
11632 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11633 TREE_OPERAND (arg1, 0));
11634 }
11635
11636 if (flag_unsafe_math_optimizations)
11637 {
11638 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11639 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11640
11641 /* Optimize sin(x)/cos(x) as tan(x). */
11642 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11643 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11644 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11645 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11646 CALL_EXPR_ARG (arg1, 0), 0))
11647 {
11648 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11649
11650 if (tanfn != NULL_TREE)
11651 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11652 }
11653
11654 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11655 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11656 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11657 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11658 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11659 CALL_EXPR_ARG (arg1, 0), 0))
11660 {
11661 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11662
11663 if (tanfn != NULL_TREE)
11664 {
11665 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11666 CALL_EXPR_ARG (arg0, 0));
11667 return fold_build2_loc (loc, RDIV_EXPR, type,
11668 build_real (type, dconst1), tmp);
11669 }
11670 }
11671
11672 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11673 NaNs or Infinities. */
11674 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11675 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11676 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11677 {
11678 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11679 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11680
11681 if (! HONOR_NANS (element_mode (arg00))
11682 && ! HONOR_INFINITIES (element_mode (arg00))
11683 && operand_equal_p (arg00, arg01, 0))
11684 {
11685 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11686
11687 if (cosfn != NULL_TREE)
11688 return build_call_expr_loc (loc, cosfn, 1, arg00);
11689 }
11690 }
11691
11692 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11693 NaNs or Infinities. */
11694 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11695 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11696 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11697 {
11698 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11699 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11700
11701 if (! HONOR_NANS (element_mode (arg00))
11702 && ! HONOR_INFINITIES (element_mode (arg00))
11703 && operand_equal_p (arg00, arg01, 0))
11704 {
11705 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11706
11707 if (cosfn != NULL_TREE)
11708 {
11709 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11710 return fold_build2_loc (loc, RDIV_EXPR, type,
11711 build_real (type, dconst1),
11712 tmp);
11713 }
11714 }
11715 }
11716
11717 /* Optimize pow(x,c)/x as pow(x,c-1). */
11718 if (fcode0 == BUILT_IN_POW
11719 || fcode0 == BUILT_IN_POWF
11720 || fcode0 == BUILT_IN_POWL)
11721 {
11722 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11723 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11724 if (TREE_CODE (arg01) == REAL_CST
11725 && !TREE_OVERFLOW (arg01)
11726 && operand_equal_p (arg1, arg00, 0))
11727 {
11728 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11729 REAL_VALUE_TYPE c;
11730 tree arg;
11731
11732 c = TREE_REAL_CST (arg01);
11733 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11734 arg = build_real (type, c);
11735 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11736 }
11737 }
11738
11739 /* Optimize a/root(b/c) into a*root(c/b). */
11740 if (BUILTIN_ROOT_P (fcode1))
11741 {
11742 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11743
11744 if (TREE_CODE (rootarg) == RDIV_EXPR)
11745 {
11746 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11747 tree b = TREE_OPERAND (rootarg, 0);
11748 tree c = TREE_OPERAND (rootarg, 1);
11749
11750 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11751
11752 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11753 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11754 }
11755 }
11756
11757 /* Optimize x/expN(y) into x*expN(-y). */
11758 if (BUILTIN_EXPONENT_P (fcode1))
11759 {
11760 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11761 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11762 arg1 = build_call_expr_loc (loc,
11763 expfn, 1,
11764 fold_convert_loc (loc, type, arg));
11765 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11766 }
11767
11768 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11769 if (fcode1 == BUILT_IN_POW
11770 || fcode1 == BUILT_IN_POWF
11771 || fcode1 == BUILT_IN_POWL)
11772 {
11773 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11774 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11775 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11776 tree neg11 = fold_convert_loc (loc, type,
11777 negate_expr (arg11));
11778 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11779 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11780 }
11781 }
11782 return NULL_TREE;
11783
11784 case TRUNC_DIV_EXPR:
11785 /* Optimize (X & (-A)) / A where A is a power of 2,
11786 to X >> log2(A) */
11787 if (TREE_CODE (arg0) == BIT_AND_EXPR
11788 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11789 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11790 {
11791 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11792 arg1, TREE_OPERAND (arg0, 1));
11793 if (sum && integer_zerop (sum)) {
11794 tree pow2 = build_int_cst (integer_type_node,
11795 wi::exact_log2 (arg1));
11796 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11797 TREE_OPERAND (arg0, 0), pow2);
11798 }
11799 }
11800
11801 /* Fall through */
11802
11803 case FLOOR_DIV_EXPR:
11804 /* Simplify A / (B << N) where A and B are positive and B is
11805 a power of 2, to A >> (N + log2(B)). */
11806 strict_overflow_p = false;
11807 if (TREE_CODE (arg1) == LSHIFT_EXPR
11808 && (TYPE_UNSIGNED (type)
11809 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11810 {
11811 tree sval = TREE_OPERAND (arg1, 0);
11812 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11813 {
11814 tree sh_cnt = TREE_OPERAND (arg1, 1);
11815 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11816 wi::exact_log2 (sval));
11817
11818 if (strict_overflow_p)
11819 fold_overflow_warning (("assuming signed overflow does not "
11820 "occur when simplifying A / (B << N)"),
11821 WARN_STRICT_OVERFLOW_MISC);
11822
11823 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11824 sh_cnt, pow2);
11825 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11826 fold_convert_loc (loc, type, arg0), sh_cnt);
11827 }
11828 }
11829
11830 /* Fall through */
11831
11832 case ROUND_DIV_EXPR:
11833 case CEIL_DIV_EXPR:
11834 case EXACT_DIV_EXPR:
11835 if (integer_zerop (arg1))
11836 return NULL_TREE;
11837
11838 /* Convert -A / -B to A / B when the type is signed and overflow is
11839 undefined. */
11840 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11841 && TREE_CODE (arg0) == NEGATE_EXPR
11842 && negate_expr_p (arg1))
11843 {
11844 if (INTEGRAL_TYPE_P (type))
11845 fold_overflow_warning (("assuming signed overflow does not occur "
11846 "when distributing negation across "
11847 "division"),
11848 WARN_STRICT_OVERFLOW_MISC);
11849 return fold_build2_loc (loc, code, type,
11850 fold_convert_loc (loc, type,
11851 TREE_OPERAND (arg0, 0)),
11852 fold_convert_loc (loc, type,
11853 negate_expr (arg1)));
11854 }
11855 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11856 && TREE_CODE (arg1) == NEGATE_EXPR
11857 && negate_expr_p (arg0))
11858 {
11859 if (INTEGRAL_TYPE_P (type))
11860 fold_overflow_warning (("assuming signed overflow does not occur "
11861 "when distributing negation across "
11862 "division"),
11863 WARN_STRICT_OVERFLOW_MISC);
11864 return fold_build2_loc (loc, code, type,
11865 fold_convert_loc (loc, type,
11866 negate_expr (arg0)),
11867 fold_convert_loc (loc, type,
11868 TREE_OPERAND (arg1, 0)));
11869 }
11870
11871 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11872 operation, EXACT_DIV_EXPR.
11873
11874 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11875 At one time others generated faster code, it's not clear if they do
11876 after the last round to changes to the DIV code in expmed.c. */
11877 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11878 && multiple_of_p (type, arg0, arg1))
11879 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11880
11881 strict_overflow_p = false;
11882 if (TREE_CODE (arg1) == INTEGER_CST
11883 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11884 &strict_overflow_p)))
11885 {
11886 if (strict_overflow_p)
11887 fold_overflow_warning (("assuming signed overflow does not occur "
11888 "when simplifying division"),
11889 WARN_STRICT_OVERFLOW_MISC);
11890 return fold_convert_loc (loc, type, tem);
11891 }
11892
11893 return NULL_TREE;
11894
11895 case CEIL_MOD_EXPR:
11896 case FLOOR_MOD_EXPR:
11897 case ROUND_MOD_EXPR:
11898 case TRUNC_MOD_EXPR:
11899 /* X % -Y is the same as X % Y. */
11900 if (code == TRUNC_MOD_EXPR
11901 && !TYPE_UNSIGNED (type)
11902 && TREE_CODE (arg1) == NEGATE_EXPR
11903 && !TYPE_OVERFLOW_TRAPS (type))
11904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11905 fold_convert_loc (loc, type,
11906 TREE_OPERAND (arg1, 0)));
11907
11908 strict_overflow_p = false;
11909 if (TREE_CODE (arg1) == INTEGER_CST
11910 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11911 &strict_overflow_p)))
11912 {
11913 if (strict_overflow_p)
11914 fold_overflow_warning (("assuming signed overflow does not occur "
11915 "when simplifying modulus"),
11916 WARN_STRICT_OVERFLOW_MISC);
11917 return fold_convert_loc (loc, type, tem);
11918 }
11919
11920 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11921 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11922 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11923 && (TYPE_UNSIGNED (type)
11924 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11925 {
11926 tree c = arg1;
11927 /* Also optimize A % (C << N) where C is a power of 2,
11928 to A & ((C << N) - 1). */
11929 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11930 c = TREE_OPERAND (arg1, 0);
11931
11932 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11933 {
11934 tree mask
11935 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11936 build_int_cst (TREE_TYPE (arg1), 1));
11937 if (strict_overflow_p)
11938 fold_overflow_warning (("assuming signed overflow does not "
11939 "occur when simplifying "
11940 "X % (power of two)"),
11941 WARN_STRICT_OVERFLOW_MISC);
11942 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11943 fold_convert_loc (loc, type, arg0),
11944 fold_convert_loc (loc, type, mask));
11945 }
11946 }
11947
11948 return NULL_TREE;
11949
11950 case LROTATE_EXPR:
11951 case RROTATE_EXPR:
11952 case RSHIFT_EXPR:
11953 case LSHIFT_EXPR:
11954 /* Since negative shift count is not well-defined,
11955 don't try to compute it in the compiler. */
11956 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11957 return NULL_TREE;
11958
11959 prec = element_precision (type);
11960
11961 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11962 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11963 && tree_to_uhwi (arg1) < prec
11964 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11965 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11966 {
11967 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11968 + tree_to_uhwi (arg1));
11969
11970 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11971 being well defined. */
11972 if (low >= prec)
11973 {
11974 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11975 low = low % prec;
11976 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11977 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11978 TREE_OPERAND (arg0, 0));
11979 else
11980 low = prec - 1;
11981 }
11982
11983 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11984 build_int_cst (TREE_TYPE (arg1), low));
11985 }
11986
11987 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11988 into x & ((unsigned)-1 >> c) for unsigned types. */
11989 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11990 || (TYPE_UNSIGNED (type)
11991 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11992 && tree_fits_uhwi_p (arg1)
11993 && tree_to_uhwi (arg1) < prec
11994 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11995 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11996 {
11997 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11998 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11999 tree lshift;
12000 tree arg00;
12001
12002 if (low0 == low1)
12003 {
12004 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12005
12006 lshift = build_minus_one_cst (type);
12007 lshift = const_binop (code, lshift, arg1);
12008
12009 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12010 }
12011 }
12012
12013 /* If we have a rotate of a bit operation with the rotate count and
12014 the second operand of the bit operation both constant,
12015 permute the two operations. */
12016 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12017 && (TREE_CODE (arg0) == BIT_AND_EXPR
12018 || TREE_CODE (arg0) == BIT_IOR_EXPR
12019 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12020 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12021 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12022 fold_build2_loc (loc, code, type,
12023 TREE_OPERAND (arg0, 0), arg1),
12024 fold_build2_loc (loc, code, type,
12025 TREE_OPERAND (arg0, 1), arg1));
12026
12027 /* Two consecutive rotates adding up to the some integer
12028 multiple of the precision of the type can be ignored. */
12029 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12030 && TREE_CODE (arg0) == RROTATE_EXPR
12031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12032 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12033 prec) == 0)
12034 return TREE_OPERAND (arg0, 0);
12035
12036 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12037 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12038 if the latter can be further optimized. */
12039 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12040 && TREE_CODE (arg0) == BIT_AND_EXPR
12041 && TREE_CODE (arg1) == INTEGER_CST
12042 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12043 {
12044 tree mask = fold_build2_loc (loc, code, type,
12045 fold_convert_loc (loc, type,
12046 TREE_OPERAND (arg0, 1)),
12047 arg1);
12048 tree shift = fold_build2_loc (loc, code, type,
12049 fold_convert_loc (loc, type,
12050 TREE_OPERAND (arg0, 0)),
12051 arg1);
12052 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12053 if (tem)
12054 return tem;
12055 }
12056
12057 return NULL_TREE;
12058
12059 case MIN_EXPR:
12060 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12061 if (tem)
12062 return tem;
12063 goto associate;
12064
12065 case MAX_EXPR:
12066 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12067 if (tem)
12068 return tem;
12069 goto associate;
12070
12071 case TRUTH_ANDIF_EXPR:
12072 /* Note that the operands of this must be ints
12073 and their values must be 0 or 1.
12074 ("true" is a fixed value perhaps depending on the language.) */
12075 /* If first arg is constant zero, return it. */
12076 if (integer_zerop (arg0))
12077 return fold_convert_loc (loc, type, arg0);
12078 case TRUTH_AND_EXPR:
12079 /* If either arg is constant true, drop it. */
12080 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12081 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12082 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12083 /* Preserve sequence points. */
12084 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12085 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12086 /* If second arg is constant zero, result is zero, but first arg
12087 must be evaluated. */
12088 if (integer_zerop (arg1))
12089 return omit_one_operand_loc (loc, type, arg1, arg0);
12090 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12091 case will be handled here. */
12092 if (integer_zerop (arg0))
12093 return omit_one_operand_loc (loc, type, arg0, arg1);
12094
12095 /* !X && X is always false. */
12096 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12097 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12098 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12099 /* X && !X is always false. */
12100 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12101 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12102 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12103
12104 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12105 means A >= Y && A != MAX, but in this case we know that
12106 A < X <= MAX. */
12107
12108 if (!TREE_SIDE_EFFECTS (arg0)
12109 && !TREE_SIDE_EFFECTS (arg1))
12110 {
12111 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12112 if (tem && !operand_equal_p (tem, arg0, 0))
12113 return fold_build2_loc (loc, code, type, tem, arg1);
12114
12115 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12116 if (tem && !operand_equal_p (tem, arg1, 0))
12117 return fold_build2_loc (loc, code, type, arg0, tem);
12118 }
12119
12120 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12121 != NULL_TREE)
12122 return tem;
12123
12124 return NULL_TREE;
12125
12126 case TRUTH_ORIF_EXPR:
12127 /* Note that the operands of this must be ints
12128 and their values must be 0 or true.
12129 ("true" is a fixed value perhaps depending on the language.) */
12130 /* If first arg is constant true, return it. */
12131 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12132 return fold_convert_loc (loc, type, arg0);
12133 case TRUTH_OR_EXPR:
12134 /* If either arg is constant zero, drop it. */
12135 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12136 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12137 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12138 /* Preserve sequence points. */
12139 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12140 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12141 /* If second arg is constant true, result is true, but we must
12142 evaluate first arg. */
12143 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12144 return omit_one_operand_loc (loc, type, arg1, arg0);
12145 /* Likewise for first arg, but note this only occurs here for
12146 TRUTH_OR_EXPR. */
12147 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12148 return omit_one_operand_loc (loc, type, arg0, arg1);
12149
12150 /* !X || X is always true. */
12151 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12152 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12153 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12154 /* X || !X is always true. */
12155 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12156 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12157 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12158
12159 /* (X && !Y) || (!X && Y) is X ^ Y */
12160 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12161 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12162 {
12163 tree a0, a1, l0, l1, n0, n1;
12164
12165 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12166 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12167
12168 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12169 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12170
12171 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12172 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12173
12174 if ((operand_equal_p (n0, a0, 0)
12175 && operand_equal_p (n1, a1, 0))
12176 || (operand_equal_p (n0, a1, 0)
12177 && operand_equal_p (n1, a0, 0)))
12178 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12179 }
12180
12181 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12182 != NULL_TREE)
12183 return tem;
12184
12185 return NULL_TREE;
12186
12187 case TRUTH_XOR_EXPR:
12188 /* If the second arg is constant zero, drop it. */
12189 if (integer_zerop (arg1))
12190 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12191 /* If the second arg is constant true, this is a logical inversion. */
12192 if (integer_onep (arg1))
12193 {
12194 tem = invert_truthvalue_loc (loc, arg0);
12195 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12196 }
12197 /* Identical arguments cancel to zero. */
12198 if (operand_equal_p (arg0, arg1, 0))
12199 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12200
12201 /* !X ^ X is always true. */
12202 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12203 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12204 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12205
12206 /* X ^ !X is always true. */
12207 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12208 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12209 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12210
12211 return NULL_TREE;
12212
12213 case EQ_EXPR:
12214 case NE_EXPR:
12215 STRIP_NOPS (arg0);
12216 STRIP_NOPS (arg1);
12217
12218 tem = fold_comparison (loc, code, type, op0, op1);
12219 if (tem != NULL_TREE)
12220 return tem;
12221
12222 /* bool_var != 0 becomes bool_var. */
12223 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12224 && code == NE_EXPR)
12225 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12226
12227 /* bool_var == 1 becomes bool_var. */
12228 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12229 && code == EQ_EXPR)
12230 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12231
12232 /* bool_var != 1 becomes !bool_var. */
12233 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12234 && code == NE_EXPR)
12235 return fold_convert_loc (loc, type,
12236 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12237 TREE_TYPE (arg0), arg0));
12238
12239 /* bool_var == 0 becomes !bool_var. */
12240 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12241 && code == EQ_EXPR)
12242 return fold_convert_loc (loc, type,
12243 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12244 TREE_TYPE (arg0), arg0));
12245
12246 /* !exp != 0 becomes !exp */
12247 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12248 && code == NE_EXPR)
12249 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12250
12251 /* If this is an equality comparison of the address of two non-weak,
12252 unaliased symbols neither of which are extern (since we do not
12253 have access to attributes for externs), then we know the result. */
12254 if (TREE_CODE (arg0) == ADDR_EXPR
12255 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12256 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12257 && ! lookup_attribute ("alias",
12258 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12259 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12260 && TREE_CODE (arg1) == ADDR_EXPR
12261 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12262 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12263 && ! lookup_attribute ("alias",
12264 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12265 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12266 {
12267 /* We know that we're looking at the address of two
12268 non-weak, unaliased, static _DECL nodes.
12269
12270 It is both wasteful and incorrect to call operand_equal_p
12271 to compare the two ADDR_EXPR nodes. It is wasteful in that
12272 all we need to do is test pointer equality for the arguments
12273 to the two ADDR_EXPR nodes. It is incorrect to use
12274 operand_equal_p as that function is NOT equivalent to a
12275 C equality test. It can in fact return false for two
12276 objects which would test as equal using the C equality
12277 operator. */
12278 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12279 return constant_boolean_node (equal
12280 ? code == EQ_EXPR : code != EQ_EXPR,
12281 type);
12282 }
12283
12284 /* Similarly for a NEGATE_EXPR. */
12285 if (TREE_CODE (arg0) == NEGATE_EXPR
12286 && TREE_CODE (arg1) == INTEGER_CST
12287 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12288 arg1)))
12289 && TREE_CODE (tem) == INTEGER_CST
12290 && !TREE_OVERFLOW (tem))
12291 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12292
12293 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12294 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12295 && TREE_CODE (arg1) == INTEGER_CST
12296 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12297 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12298 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12299 fold_convert_loc (loc,
12300 TREE_TYPE (arg0),
12301 arg1),
12302 TREE_OPERAND (arg0, 1)));
12303
12304 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12305 if ((TREE_CODE (arg0) == PLUS_EXPR
12306 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12307 || TREE_CODE (arg0) == MINUS_EXPR)
12308 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12309 0)),
12310 arg1, 0)
12311 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12312 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12313 {
12314 tree val = TREE_OPERAND (arg0, 1);
12315 return omit_two_operands_loc (loc, type,
12316 fold_build2_loc (loc, code, type,
12317 val,
12318 build_int_cst (TREE_TYPE (val),
12319 0)),
12320 TREE_OPERAND (arg0, 0), arg1);
12321 }
12322
12323 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12324 if (TREE_CODE (arg0) == MINUS_EXPR
12325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12326 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12327 1)),
12328 arg1, 0)
12329 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12330 {
12331 return omit_two_operands_loc (loc, type,
12332 code == NE_EXPR
12333 ? boolean_true_node : boolean_false_node,
12334 TREE_OPERAND (arg0, 1), arg1);
12335 }
12336
12337 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12338 if (TREE_CODE (arg0) == ABS_EXPR
12339 && (integer_zerop (arg1) || real_zerop (arg1)))
12340 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12341
12342 /* If this is an EQ or NE comparison with zero and ARG0 is
12343 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12344 two operations, but the latter can be done in one less insn
12345 on machines that have only two-operand insns or on which a
12346 constant cannot be the first operand. */
12347 if (TREE_CODE (arg0) == BIT_AND_EXPR
12348 && integer_zerop (arg1))
12349 {
12350 tree arg00 = TREE_OPERAND (arg0, 0);
12351 tree arg01 = TREE_OPERAND (arg0, 1);
12352 if (TREE_CODE (arg00) == LSHIFT_EXPR
12353 && integer_onep (TREE_OPERAND (arg00, 0)))
12354 {
12355 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12356 arg01, TREE_OPERAND (arg00, 1));
12357 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12358 build_int_cst (TREE_TYPE (arg0), 1));
12359 return fold_build2_loc (loc, code, type,
12360 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12361 arg1);
12362 }
12363 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12364 && integer_onep (TREE_OPERAND (arg01, 0)))
12365 {
12366 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12367 arg00, TREE_OPERAND (arg01, 1));
12368 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12369 build_int_cst (TREE_TYPE (arg0), 1));
12370 return fold_build2_loc (loc, code, type,
12371 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12372 arg1);
12373 }
12374 }
12375
12376 /* If this is an NE or EQ comparison of zero against the result of a
12377 signed MOD operation whose second operand is a power of 2, make
12378 the MOD operation unsigned since it is simpler and equivalent. */
12379 if (integer_zerop (arg1)
12380 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12381 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12382 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12383 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12384 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12385 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12386 {
12387 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12388 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12389 fold_convert_loc (loc, newtype,
12390 TREE_OPERAND (arg0, 0)),
12391 fold_convert_loc (loc, newtype,
12392 TREE_OPERAND (arg0, 1)));
12393
12394 return fold_build2_loc (loc, code, type, newmod,
12395 fold_convert_loc (loc, newtype, arg1));
12396 }
12397
12398 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12399 C1 is a valid shift constant, and C2 is a power of two, i.e.
12400 a single bit. */
12401 if (TREE_CODE (arg0) == BIT_AND_EXPR
12402 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12403 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12404 == INTEGER_CST
12405 && integer_pow2p (TREE_OPERAND (arg0, 1))
12406 && integer_zerop (arg1))
12407 {
12408 tree itype = TREE_TYPE (arg0);
12409 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12410 prec = TYPE_PRECISION (itype);
12411
12412 /* Check for a valid shift count. */
12413 if (wi::ltu_p (arg001, prec))
12414 {
12415 tree arg01 = TREE_OPERAND (arg0, 1);
12416 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12417 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12418 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12419 can be rewritten as (X & (C2 << C1)) != 0. */
12420 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12421 {
12422 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12423 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12424 return fold_build2_loc (loc, code, type, tem,
12425 fold_convert_loc (loc, itype, arg1));
12426 }
12427 /* Otherwise, for signed (arithmetic) shifts,
12428 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12429 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12430 else if (!TYPE_UNSIGNED (itype))
12431 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12432 arg000, build_int_cst (itype, 0));
12433 /* Otherwise, of unsigned (logical) shifts,
12434 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12435 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12436 else
12437 return omit_one_operand_loc (loc, type,
12438 code == EQ_EXPR ? integer_one_node
12439 : integer_zero_node,
12440 arg000);
12441 }
12442 }
12443
12444 /* If we have (A & C) == C where C is a power of 2, convert this into
12445 (A & C) != 0. Similarly for NE_EXPR. */
12446 if (TREE_CODE (arg0) == BIT_AND_EXPR
12447 && integer_pow2p (TREE_OPERAND (arg0, 1))
12448 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12449 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12450 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12451 integer_zero_node));
12452
12453 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12454 bit, then fold the expression into A < 0 or A >= 0. */
12455 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12456 if (tem)
12457 return tem;
12458
12459 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12460 Similarly for NE_EXPR. */
12461 if (TREE_CODE (arg0) == BIT_AND_EXPR
12462 && TREE_CODE (arg1) == INTEGER_CST
12463 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12464 {
12465 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12466 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12467 TREE_OPERAND (arg0, 1));
12468 tree dandnotc
12469 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12470 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12471 notc);
12472 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12473 if (integer_nonzerop (dandnotc))
12474 return omit_one_operand_loc (loc, type, rslt, arg0);
12475 }
12476
12477 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12478 Similarly for NE_EXPR. */
12479 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12480 && TREE_CODE (arg1) == INTEGER_CST
12481 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12482 {
12483 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12484 tree candnotd
12485 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12486 TREE_OPERAND (arg0, 1),
12487 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12488 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12489 if (integer_nonzerop (candnotd))
12490 return omit_one_operand_loc (loc, type, rslt, arg0);
12491 }
12492
12493 /* If this is a comparison of a field, we may be able to simplify it. */
12494 if ((TREE_CODE (arg0) == COMPONENT_REF
12495 || TREE_CODE (arg0) == BIT_FIELD_REF)
12496 /* Handle the constant case even without -O
12497 to make sure the warnings are given. */
12498 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12499 {
12500 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12501 if (t1)
12502 return t1;
12503 }
12504
12505 /* Optimize comparisons of strlen vs zero to a compare of the
12506 first character of the string vs zero. To wit,
12507 strlen(ptr) == 0 => *ptr == 0
12508 strlen(ptr) != 0 => *ptr != 0
12509 Other cases should reduce to one of these two (or a constant)
12510 due to the return value of strlen being unsigned. */
12511 if (TREE_CODE (arg0) == CALL_EXPR
12512 && integer_zerop (arg1))
12513 {
12514 tree fndecl = get_callee_fndecl (arg0);
12515
12516 if (fndecl
12517 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12518 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12519 && call_expr_nargs (arg0) == 1
12520 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12521 {
12522 tree iref = build_fold_indirect_ref_loc (loc,
12523 CALL_EXPR_ARG (arg0, 0));
12524 return fold_build2_loc (loc, code, type, iref,
12525 build_int_cst (TREE_TYPE (iref), 0));
12526 }
12527 }
12528
12529 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12530 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12531 if (TREE_CODE (arg0) == RSHIFT_EXPR
12532 && integer_zerop (arg1)
12533 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12534 {
12535 tree arg00 = TREE_OPERAND (arg0, 0);
12536 tree arg01 = TREE_OPERAND (arg0, 1);
12537 tree itype = TREE_TYPE (arg00);
12538 if (wi::eq_p (arg01, element_precision (itype) - 1))
12539 {
12540 if (TYPE_UNSIGNED (itype))
12541 {
12542 itype = signed_type_for (itype);
12543 arg00 = fold_convert_loc (loc, itype, arg00);
12544 }
12545 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12546 type, arg00, build_zero_cst (itype));
12547 }
12548 }
12549
12550 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12551 if (integer_zerop (arg1)
12552 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12553 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12554 TREE_OPERAND (arg0, 1));
12555
12556 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12557 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12558 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12559 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12560 build_zero_cst (TREE_TYPE (arg0)));
12561 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12562 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12563 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12564 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12565 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12566 build_zero_cst (TREE_TYPE (arg0)));
12567
12568 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12569 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12570 && TREE_CODE (arg1) == INTEGER_CST
12571 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12572 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12573 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12574 TREE_OPERAND (arg0, 1), arg1));
12575
12576 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12577 (X & C) == 0 when C is a single bit. */
12578 if (TREE_CODE (arg0) == BIT_AND_EXPR
12579 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12580 && integer_zerop (arg1)
12581 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12582 {
12583 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12584 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12585 TREE_OPERAND (arg0, 1));
12586 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12587 type, tem,
12588 fold_convert_loc (loc, TREE_TYPE (arg0),
12589 arg1));
12590 }
12591
12592 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12593 constant C is a power of two, i.e. a single bit. */
12594 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12595 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12596 && integer_zerop (arg1)
12597 && integer_pow2p (TREE_OPERAND (arg0, 1))
12598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12599 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12600 {
12601 tree arg00 = TREE_OPERAND (arg0, 0);
12602 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12603 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12604 }
12605
12606 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12607 when is C is a power of two, i.e. a single bit. */
12608 if (TREE_CODE (arg0) == BIT_AND_EXPR
12609 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12610 && integer_zerop (arg1)
12611 && integer_pow2p (TREE_OPERAND (arg0, 1))
12612 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12613 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12614 {
12615 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12616 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12617 arg000, TREE_OPERAND (arg0, 1));
12618 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12619 tem, build_int_cst (TREE_TYPE (tem), 0));
12620 }
12621
12622 if (integer_zerop (arg1)
12623 && tree_expr_nonzero_p (arg0))
12624 {
12625 tree res = constant_boolean_node (code==NE_EXPR, type);
12626 return omit_one_operand_loc (loc, type, res, arg0);
12627 }
12628
12629 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12630 if (TREE_CODE (arg0) == NEGATE_EXPR
12631 && TREE_CODE (arg1) == NEGATE_EXPR)
12632 return fold_build2_loc (loc, code, type,
12633 TREE_OPERAND (arg0, 0),
12634 fold_convert_loc (loc, TREE_TYPE (arg0),
12635 TREE_OPERAND (arg1, 0)));
12636
12637 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12638 if (TREE_CODE (arg0) == BIT_AND_EXPR
12639 && TREE_CODE (arg1) == BIT_AND_EXPR)
12640 {
12641 tree arg00 = TREE_OPERAND (arg0, 0);
12642 tree arg01 = TREE_OPERAND (arg0, 1);
12643 tree arg10 = TREE_OPERAND (arg1, 0);
12644 tree arg11 = TREE_OPERAND (arg1, 1);
12645 tree itype = TREE_TYPE (arg0);
12646
12647 if (operand_equal_p (arg01, arg11, 0))
12648 return fold_build2_loc (loc, code, type,
12649 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12650 fold_build2_loc (loc,
12651 BIT_XOR_EXPR, itype,
12652 arg00, arg10),
12653 arg01),
12654 build_zero_cst (itype));
12655
12656 if (operand_equal_p (arg01, arg10, 0))
12657 return fold_build2_loc (loc, code, type,
12658 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12659 fold_build2_loc (loc,
12660 BIT_XOR_EXPR, itype,
12661 arg00, arg11),
12662 arg01),
12663 build_zero_cst (itype));
12664
12665 if (operand_equal_p (arg00, arg11, 0))
12666 return fold_build2_loc (loc, code, type,
12667 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12668 fold_build2_loc (loc,
12669 BIT_XOR_EXPR, itype,
12670 arg01, arg10),
12671 arg00),
12672 build_zero_cst (itype));
12673
12674 if (operand_equal_p (arg00, arg10, 0))
12675 return fold_build2_loc (loc, code, type,
12676 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12677 fold_build2_loc (loc,
12678 BIT_XOR_EXPR, itype,
12679 arg01, arg11),
12680 arg00),
12681 build_zero_cst (itype));
12682 }
12683
12684 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12685 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12686 {
12687 tree arg00 = TREE_OPERAND (arg0, 0);
12688 tree arg01 = TREE_OPERAND (arg0, 1);
12689 tree arg10 = TREE_OPERAND (arg1, 0);
12690 tree arg11 = TREE_OPERAND (arg1, 1);
12691 tree itype = TREE_TYPE (arg0);
12692
12693 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12694 operand_equal_p guarantees no side-effects so we don't need
12695 to use omit_one_operand on Z. */
12696 if (operand_equal_p (arg01, arg11, 0))
12697 return fold_build2_loc (loc, code, type, arg00,
12698 fold_convert_loc (loc, TREE_TYPE (arg00),
12699 arg10));
12700 if (operand_equal_p (arg01, arg10, 0))
12701 return fold_build2_loc (loc, code, type, arg00,
12702 fold_convert_loc (loc, TREE_TYPE (arg00),
12703 arg11));
12704 if (operand_equal_p (arg00, arg11, 0))
12705 return fold_build2_loc (loc, code, type, arg01,
12706 fold_convert_loc (loc, TREE_TYPE (arg01),
12707 arg10));
12708 if (operand_equal_p (arg00, arg10, 0))
12709 return fold_build2_loc (loc, code, type, arg01,
12710 fold_convert_loc (loc, TREE_TYPE (arg01),
12711 arg11));
12712
12713 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12714 if (TREE_CODE (arg01) == INTEGER_CST
12715 && TREE_CODE (arg11) == INTEGER_CST)
12716 {
12717 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12718 fold_convert_loc (loc, itype, arg11));
12719 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12720 return fold_build2_loc (loc, code, type, tem,
12721 fold_convert_loc (loc, itype, arg10));
12722 }
12723 }
12724
12725 /* Attempt to simplify equality/inequality comparisons of complex
12726 values. Only lower the comparison if the result is known or
12727 can be simplified to a single scalar comparison. */
12728 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12729 || TREE_CODE (arg0) == COMPLEX_CST)
12730 && (TREE_CODE (arg1) == COMPLEX_EXPR
12731 || TREE_CODE (arg1) == COMPLEX_CST))
12732 {
12733 tree real0, imag0, real1, imag1;
12734 tree rcond, icond;
12735
12736 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12737 {
12738 real0 = TREE_OPERAND (arg0, 0);
12739 imag0 = TREE_OPERAND (arg0, 1);
12740 }
12741 else
12742 {
12743 real0 = TREE_REALPART (arg0);
12744 imag0 = TREE_IMAGPART (arg0);
12745 }
12746
12747 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12748 {
12749 real1 = TREE_OPERAND (arg1, 0);
12750 imag1 = TREE_OPERAND (arg1, 1);
12751 }
12752 else
12753 {
12754 real1 = TREE_REALPART (arg1);
12755 imag1 = TREE_IMAGPART (arg1);
12756 }
12757
12758 rcond = fold_binary_loc (loc, code, type, real0, real1);
12759 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12760 {
12761 if (integer_zerop (rcond))
12762 {
12763 if (code == EQ_EXPR)
12764 return omit_two_operands_loc (loc, type, boolean_false_node,
12765 imag0, imag1);
12766 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12767 }
12768 else
12769 {
12770 if (code == NE_EXPR)
12771 return omit_two_operands_loc (loc, type, boolean_true_node,
12772 imag0, imag1);
12773 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12774 }
12775 }
12776
12777 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12778 if (icond && TREE_CODE (icond) == INTEGER_CST)
12779 {
12780 if (integer_zerop (icond))
12781 {
12782 if (code == EQ_EXPR)
12783 return omit_two_operands_loc (loc, type, boolean_false_node,
12784 real0, real1);
12785 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12786 }
12787 else
12788 {
12789 if (code == NE_EXPR)
12790 return omit_two_operands_loc (loc, type, boolean_true_node,
12791 real0, real1);
12792 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12793 }
12794 }
12795 }
12796
12797 return NULL_TREE;
12798
12799 case LT_EXPR:
12800 case GT_EXPR:
12801 case LE_EXPR:
12802 case GE_EXPR:
12803 tem = fold_comparison (loc, code, type, op0, op1);
12804 if (tem != NULL_TREE)
12805 return tem;
12806
12807 /* Transform comparisons of the form X +- C CMP X. */
12808 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12809 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12810 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12811 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12812 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12813 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12814 {
12815 tree arg01 = TREE_OPERAND (arg0, 1);
12816 enum tree_code code0 = TREE_CODE (arg0);
12817 int is_positive;
12818
12819 if (TREE_CODE (arg01) == REAL_CST)
12820 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12821 else
12822 is_positive = tree_int_cst_sgn (arg01);
12823
12824 /* (X - c) > X becomes false. */
12825 if (code == GT_EXPR
12826 && ((code0 == MINUS_EXPR && is_positive >= 0)
12827 || (code0 == PLUS_EXPR && is_positive <= 0)))
12828 {
12829 if (TREE_CODE (arg01) == INTEGER_CST
12830 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12831 fold_overflow_warning (("assuming signed overflow does not "
12832 "occur when assuming that (X - c) > X "
12833 "is always false"),
12834 WARN_STRICT_OVERFLOW_ALL);
12835 return constant_boolean_node (0, type);
12836 }
12837
12838 /* Likewise (X + c) < X becomes false. */
12839 if (code == LT_EXPR
12840 && ((code0 == PLUS_EXPR && is_positive >= 0)
12841 || (code0 == MINUS_EXPR && is_positive <= 0)))
12842 {
12843 if (TREE_CODE (arg01) == INTEGER_CST
12844 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12845 fold_overflow_warning (("assuming signed overflow does not "
12846 "occur when assuming that "
12847 "(X + c) < X is always false"),
12848 WARN_STRICT_OVERFLOW_ALL);
12849 return constant_boolean_node (0, type);
12850 }
12851
12852 /* Convert (X - c) <= X to true. */
12853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12854 && code == LE_EXPR
12855 && ((code0 == MINUS_EXPR && is_positive >= 0)
12856 || (code0 == PLUS_EXPR && is_positive <= 0)))
12857 {
12858 if (TREE_CODE (arg01) == INTEGER_CST
12859 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12860 fold_overflow_warning (("assuming signed overflow does not "
12861 "occur when assuming that "
12862 "(X - c) <= X is always true"),
12863 WARN_STRICT_OVERFLOW_ALL);
12864 return constant_boolean_node (1, type);
12865 }
12866
12867 /* Convert (X + c) >= X to true. */
12868 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12869 && code == GE_EXPR
12870 && ((code0 == PLUS_EXPR && is_positive >= 0)
12871 || (code0 == MINUS_EXPR && is_positive <= 0)))
12872 {
12873 if (TREE_CODE (arg01) == INTEGER_CST
12874 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12875 fold_overflow_warning (("assuming signed overflow does not "
12876 "occur when assuming that "
12877 "(X + c) >= X is always true"),
12878 WARN_STRICT_OVERFLOW_ALL);
12879 return constant_boolean_node (1, type);
12880 }
12881
12882 if (TREE_CODE (arg01) == INTEGER_CST)
12883 {
12884 /* Convert X + c > X and X - c < X to true for integers. */
12885 if (code == GT_EXPR
12886 && ((code0 == PLUS_EXPR && is_positive > 0)
12887 || (code0 == MINUS_EXPR && is_positive < 0)))
12888 {
12889 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12890 fold_overflow_warning (("assuming signed overflow does "
12891 "not occur when assuming that "
12892 "(X + c) > X is always true"),
12893 WARN_STRICT_OVERFLOW_ALL);
12894 return constant_boolean_node (1, type);
12895 }
12896
12897 if (code == LT_EXPR
12898 && ((code0 == MINUS_EXPR && is_positive > 0)
12899 || (code0 == PLUS_EXPR && is_positive < 0)))
12900 {
12901 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12902 fold_overflow_warning (("assuming signed overflow does "
12903 "not occur when assuming that "
12904 "(X - c) < X is always true"),
12905 WARN_STRICT_OVERFLOW_ALL);
12906 return constant_boolean_node (1, type);
12907 }
12908
12909 /* Convert X + c <= X and X - c >= X to false for integers. */
12910 if (code == LE_EXPR
12911 && ((code0 == PLUS_EXPR && is_positive > 0)
12912 || (code0 == MINUS_EXPR && is_positive < 0)))
12913 {
12914 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12915 fold_overflow_warning (("assuming signed overflow does "
12916 "not occur when assuming that "
12917 "(X + c) <= X is always false"),
12918 WARN_STRICT_OVERFLOW_ALL);
12919 return constant_boolean_node (0, type);
12920 }
12921
12922 if (code == GE_EXPR
12923 && ((code0 == MINUS_EXPR && is_positive > 0)
12924 || (code0 == PLUS_EXPR && is_positive < 0)))
12925 {
12926 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12927 fold_overflow_warning (("assuming signed overflow does "
12928 "not occur when assuming that "
12929 "(X - c) >= X is always false"),
12930 WARN_STRICT_OVERFLOW_ALL);
12931 return constant_boolean_node (0, type);
12932 }
12933 }
12934 }
12935
12936 /* Comparisons with the highest or lowest possible integer of
12937 the specified precision will have known values. */
12938 {
12939 tree arg1_type = TREE_TYPE (arg1);
12940 unsigned int prec = TYPE_PRECISION (arg1_type);
12941
12942 if (TREE_CODE (arg1) == INTEGER_CST
12943 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12944 {
12945 wide_int max = wi::max_value (arg1_type);
12946 wide_int signed_max = wi::max_value (prec, SIGNED);
12947 wide_int min = wi::min_value (arg1_type);
12948
12949 if (wi::eq_p (arg1, max))
12950 switch (code)
12951 {
12952 case GT_EXPR:
12953 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12954
12955 case GE_EXPR:
12956 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12957
12958 case LE_EXPR:
12959 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12960
12961 case LT_EXPR:
12962 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12963
12964 /* The GE_EXPR and LT_EXPR cases above are not normally
12965 reached because of previous transformations. */
12966
12967 default:
12968 break;
12969 }
12970 else if (wi::eq_p (arg1, max - 1))
12971 switch (code)
12972 {
12973 case GT_EXPR:
12974 arg1 = const_binop (PLUS_EXPR, arg1,
12975 build_int_cst (TREE_TYPE (arg1), 1));
12976 return fold_build2_loc (loc, EQ_EXPR, type,
12977 fold_convert_loc (loc,
12978 TREE_TYPE (arg1), arg0),
12979 arg1);
12980 case LE_EXPR:
12981 arg1 = const_binop (PLUS_EXPR, arg1,
12982 build_int_cst (TREE_TYPE (arg1), 1));
12983 return fold_build2_loc (loc, NE_EXPR, type,
12984 fold_convert_loc (loc, TREE_TYPE (arg1),
12985 arg0),
12986 arg1);
12987 default:
12988 break;
12989 }
12990 else if (wi::eq_p (arg1, min))
12991 switch (code)
12992 {
12993 case LT_EXPR:
12994 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12995
12996 case LE_EXPR:
12997 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12998
12999 case GE_EXPR:
13000 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13001
13002 case GT_EXPR:
13003 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13004
13005 default:
13006 break;
13007 }
13008 else if (wi::eq_p (arg1, min + 1))
13009 switch (code)
13010 {
13011 case GE_EXPR:
13012 arg1 = const_binop (MINUS_EXPR, arg1,
13013 build_int_cst (TREE_TYPE (arg1), 1));
13014 return fold_build2_loc (loc, NE_EXPR, type,
13015 fold_convert_loc (loc,
13016 TREE_TYPE (arg1), arg0),
13017 arg1);
13018 case LT_EXPR:
13019 arg1 = const_binop (MINUS_EXPR, arg1,
13020 build_int_cst (TREE_TYPE (arg1), 1));
13021 return fold_build2_loc (loc, EQ_EXPR, type,
13022 fold_convert_loc (loc, TREE_TYPE (arg1),
13023 arg0),
13024 arg1);
13025 default:
13026 break;
13027 }
13028
13029 else if (wi::eq_p (arg1, signed_max)
13030 && TYPE_UNSIGNED (arg1_type)
13031 /* We will flip the signedness of the comparison operator
13032 associated with the mode of arg1, so the sign bit is
13033 specified by this mode. Check that arg1 is the signed
13034 max associated with this sign bit. */
13035 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13036 /* signed_type does not work on pointer types. */
13037 && INTEGRAL_TYPE_P (arg1_type))
13038 {
13039 /* The following case also applies to X < signed_max+1
13040 and X >= signed_max+1 because previous transformations. */
13041 if (code == LE_EXPR || code == GT_EXPR)
13042 {
13043 tree st = signed_type_for (arg1_type);
13044 return fold_build2_loc (loc,
13045 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13046 type, fold_convert_loc (loc, st, arg0),
13047 build_int_cst (st, 0));
13048 }
13049 }
13050 }
13051 }
13052
13053 /* If we are comparing an ABS_EXPR with a constant, we can
13054 convert all the cases into explicit comparisons, but they may
13055 well not be faster than doing the ABS and one comparison.
13056 But ABS (X) <= C is a range comparison, which becomes a subtraction
13057 and a comparison, and is probably faster. */
13058 if (code == LE_EXPR
13059 && TREE_CODE (arg1) == INTEGER_CST
13060 && TREE_CODE (arg0) == ABS_EXPR
13061 && ! TREE_SIDE_EFFECTS (arg0)
13062 && (0 != (tem = negate_expr (arg1)))
13063 && TREE_CODE (tem) == INTEGER_CST
13064 && !TREE_OVERFLOW (tem))
13065 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13066 build2 (GE_EXPR, type,
13067 TREE_OPERAND (arg0, 0), tem),
13068 build2 (LE_EXPR, type,
13069 TREE_OPERAND (arg0, 0), arg1));
13070
13071 /* Convert ABS_EXPR<x> >= 0 to true. */
13072 strict_overflow_p = false;
13073 if (code == GE_EXPR
13074 && (integer_zerop (arg1)
13075 || (! HONOR_NANS (element_mode (arg0))
13076 && real_zerop (arg1)))
13077 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13078 {
13079 if (strict_overflow_p)
13080 fold_overflow_warning (("assuming signed overflow does not occur "
13081 "when simplifying comparison of "
13082 "absolute value and zero"),
13083 WARN_STRICT_OVERFLOW_CONDITIONAL);
13084 return omit_one_operand_loc (loc, type,
13085 constant_boolean_node (true, type),
13086 arg0);
13087 }
13088
13089 /* Convert ABS_EXPR<x> < 0 to false. */
13090 strict_overflow_p = false;
13091 if (code == LT_EXPR
13092 && (integer_zerop (arg1) || real_zerop (arg1))
13093 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13094 {
13095 if (strict_overflow_p)
13096 fold_overflow_warning (("assuming signed overflow does not occur "
13097 "when simplifying comparison of "
13098 "absolute value and zero"),
13099 WARN_STRICT_OVERFLOW_CONDITIONAL);
13100 return omit_one_operand_loc (loc, type,
13101 constant_boolean_node (false, type),
13102 arg0);
13103 }
13104
13105 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13106 and similarly for >= into !=. */
13107 if ((code == LT_EXPR || code == GE_EXPR)
13108 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13109 && TREE_CODE (arg1) == LSHIFT_EXPR
13110 && integer_onep (TREE_OPERAND (arg1, 0)))
13111 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13112 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13113 TREE_OPERAND (arg1, 1)),
13114 build_zero_cst (TREE_TYPE (arg0)));
13115
13116 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13117 otherwise Y might be >= # of bits in X's type and thus e.g.
13118 (unsigned char) (1 << Y) for Y 15 might be 0.
13119 If the cast is widening, then 1 << Y should have unsigned type,
13120 otherwise if Y is number of bits in the signed shift type minus 1,
13121 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13122 31 might be 0xffffffff80000000. */
13123 if ((code == LT_EXPR || code == GE_EXPR)
13124 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13125 && CONVERT_EXPR_P (arg1)
13126 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13127 && (element_precision (TREE_TYPE (arg1))
13128 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13129 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13130 || (element_precision (TREE_TYPE (arg1))
13131 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13132 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13133 {
13134 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13135 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13136 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13137 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13138 build_zero_cst (TREE_TYPE (arg0)));
13139 }
13140
13141 return NULL_TREE;
13142
13143 case UNORDERED_EXPR:
13144 case ORDERED_EXPR:
13145 case UNLT_EXPR:
13146 case UNLE_EXPR:
13147 case UNGT_EXPR:
13148 case UNGE_EXPR:
13149 case UNEQ_EXPR:
13150 case LTGT_EXPR:
13151 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13152 {
13153 t1 = fold_relational_const (code, type, arg0, arg1);
13154 if (t1 != NULL_TREE)
13155 return t1;
13156 }
13157
13158 /* If the first operand is NaN, the result is constant. */
13159 if (TREE_CODE (arg0) == REAL_CST
13160 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13161 && (code != LTGT_EXPR || ! flag_trapping_math))
13162 {
13163 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13164 ? integer_zero_node
13165 : integer_one_node;
13166 return omit_one_operand_loc (loc, type, t1, arg1);
13167 }
13168
13169 /* If the second operand is NaN, the result is constant. */
13170 if (TREE_CODE (arg1) == REAL_CST
13171 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13172 && (code != LTGT_EXPR || ! flag_trapping_math))
13173 {
13174 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13175 ? integer_zero_node
13176 : integer_one_node;
13177 return omit_one_operand_loc (loc, type, t1, arg0);
13178 }
13179
13180 /* Simplify unordered comparison of something with itself. */
13181 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13182 && operand_equal_p (arg0, arg1, 0))
13183 return constant_boolean_node (1, type);
13184
13185 if (code == LTGT_EXPR
13186 && !flag_trapping_math
13187 && operand_equal_p (arg0, arg1, 0))
13188 return constant_boolean_node (0, type);
13189
13190 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13191 {
13192 tree targ0 = strip_float_extensions (arg0);
13193 tree targ1 = strip_float_extensions (arg1);
13194 tree newtype = TREE_TYPE (targ0);
13195
13196 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13197 newtype = TREE_TYPE (targ1);
13198
13199 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13200 return fold_build2_loc (loc, code, type,
13201 fold_convert_loc (loc, newtype, targ0),
13202 fold_convert_loc (loc, newtype, targ1));
13203 }
13204
13205 return NULL_TREE;
13206
13207 case COMPOUND_EXPR:
13208 /* When pedantic, a compound expression can be neither an lvalue
13209 nor an integer constant expression. */
13210 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13211 return NULL_TREE;
13212 /* Don't let (0, 0) be null pointer constant. */
13213 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13214 : fold_convert_loc (loc, type, arg1);
13215 return pedantic_non_lvalue_loc (loc, tem);
13216
13217 case ASSERT_EXPR:
13218 /* An ASSERT_EXPR should never be passed to fold_binary. */
13219 gcc_unreachable ();
13220
13221 default:
13222 return NULL_TREE;
13223 } /* switch (code) */
13224 }
13225
13226 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13227 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13228 of GOTO_EXPR. */
13229
13230 static tree
13231 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13232 {
13233 switch (TREE_CODE (*tp))
13234 {
13235 case LABEL_EXPR:
13236 return *tp;
13237
13238 case GOTO_EXPR:
13239 *walk_subtrees = 0;
13240
13241 /* ... fall through ... */
13242
13243 default:
13244 return NULL_TREE;
13245 }
13246 }
13247
13248 /* Return whether the sub-tree ST contains a label which is accessible from
13249 outside the sub-tree. */
13250
13251 static bool
13252 contains_label_p (tree st)
13253 {
13254 return
13255 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13256 }
13257
13258 /* Fold a ternary expression of code CODE and type TYPE with operands
13259 OP0, OP1, and OP2. Return the folded expression if folding is
13260 successful. Otherwise, return NULL_TREE. */
13261
13262 tree
13263 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13264 tree op0, tree op1, tree op2)
13265 {
13266 tree tem;
13267 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13268 enum tree_code_class kind = TREE_CODE_CLASS (code);
13269
13270 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13271 && TREE_CODE_LENGTH (code) == 3);
13272
13273 /* If this is a commutative operation, and OP0 is a constant, move it
13274 to OP1 to reduce the number of tests below. */
13275 if (commutative_ternary_tree_code (code)
13276 && tree_swap_operands_p (op0, op1, true))
13277 return fold_build3_loc (loc, code, type, op1, op0, op2);
13278
13279 tem = generic_simplify (loc, code, type, op0, op1, op2);
13280 if (tem)
13281 return tem;
13282
13283 /* Strip any conversions that don't change the mode. This is safe
13284 for every expression, except for a comparison expression because
13285 its signedness is derived from its operands. So, in the latter
13286 case, only strip conversions that don't change the signedness.
13287
13288 Note that this is done as an internal manipulation within the
13289 constant folder, in order to find the simplest representation of
13290 the arguments so that their form can be studied. In any cases,
13291 the appropriate type conversions should be put back in the tree
13292 that will get out of the constant folder. */
13293 if (op0)
13294 {
13295 arg0 = op0;
13296 STRIP_NOPS (arg0);
13297 }
13298
13299 if (op1)
13300 {
13301 arg1 = op1;
13302 STRIP_NOPS (arg1);
13303 }
13304
13305 if (op2)
13306 {
13307 arg2 = op2;
13308 STRIP_NOPS (arg2);
13309 }
13310
13311 switch (code)
13312 {
13313 case COMPONENT_REF:
13314 if (TREE_CODE (arg0) == CONSTRUCTOR
13315 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13316 {
13317 unsigned HOST_WIDE_INT idx;
13318 tree field, value;
13319 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13320 if (field == arg1)
13321 return value;
13322 }
13323 return NULL_TREE;
13324
13325 case COND_EXPR:
13326 case VEC_COND_EXPR:
13327 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13328 so all simple results must be passed through pedantic_non_lvalue. */
13329 if (TREE_CODE (arg0) == INTEGER_CST)
13330 {
13331 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13332 tem = integer_zerop (arg0) ? op2 : op1;
13333 /* Only optimize constant conditions when the selected branch
13334 has the same type as the COND_EXPR. This avoids optimizing
13335 away "c ? x : throw", where the throw has a void type.
13336 Avoid throwing away that operand which contains label. */
13337 if ((!TREE_SIDE_EFFECTS (unused_op)
13338 || !contains_label_p (unused_op))
13339 && (! VOID_TYPE_P (TREE_TYPE (tem))
13340 || VOID_TYPE_P (type)))
13341 return pedantic_non_lvalue_loc (loc, tem);
13342 return NULL_TREE;
13343 }
13344 else if (TREE_CODE (arg0) == VECTOR_CST)
13345 {
13346 if ((TREE_CODE (arg1) == VECTOR_CST
13347 || TREE_CODE (arg1) == CONSTRUCTOR)
13348 && (TREE_CODE (arg2) == VECTOR_CST
13349 || TREE_CODE (arg2) == CONSTRUCTOR))
13350 {
13351 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13352 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13353 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13354 for (i = 0; i < nelts; i++)
13355 {
13356 tree val = VECTOR_CST_ELT (arg0, i);
13357 if (integer_all_onesp (val))
13358 sel[i] = i;
13359 else if (integer_zerop (val))
13360 sel[i] = nelts + i;
13361 else /* Currently unreachable. */
13362 return NULL_TREE;
13363 }
13364 tree t = fold_vec_perm (type, arg1, arg2, sel);
13365 if (t != NULL_TREE)
13366 return t;
13367 }
13368 }
13369
13370 /* If we have A op B ? A : C, we may be able to convert this to a
13371 simpler expression, depending on the operation and the values
13372 of B and C. Signed zeros prevent all of these transformations,
13373 for reasons given above each one.
13374
13375 Also try swapping the arguments and inverting the conditional. */
13376 if (COMPARISON_CLASS_P (arg0)
13377 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13378 arg1, TREE_OPERAND (arg0, 1))
13379 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13380 {
13381 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13382 if (tem)
13383 return tem;
13384 }
13385
13386 if (COMPARISON_CLASS_P (arg0)
13387 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13388 op2,
13389 TREE_OPERAND (arg0, 1))
13390 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13391 {
13392 location_t loc0 = expr_location_or (arg0, loc);
13393 tem = fold_invert_truthvalue (loc0, arg0);
13394 if (tem && COMPARISON_CLASS_P (tem))
13395 {
13396 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13397 if (tem)
13398 return tem;
13399 }
13400 }
13401
13402 /* If the second operand is simpler than the third, swap them
13403 since that produces better jump optimization results. */
13404 if (truth_value_p (TREE_CODE (arg0))
13405 && tree_swap_operands_p (op1, op2, false))
13406 {
13407 location_t loc0 = expr_location_or (arg0, loc);
13408 /* See if this can be inverted. If it can't, possibly because
13409 it was a floating-point inequality comparison, don't do
13410 anything. */
13411 tem = fold_invert_truthvalue (loc0, arg0);
13412 if (tem)
13413 return fold_build3_loc (loc, code, type, tem, op2, op1);
13414 }
13415
13416 /* Convert A ? 1 : 0 to simply A. */
13417 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13418 : (integer_onep (op1)
13419 && !VECTOR_TYPE_P (type)))
13420 && integer_zerop (op2)
13421 /* If we try to convert OP0 to our type, the
13422 call to fold will try to move the conversion inside
13423 a COND, which will recurse. In that case, the COND_EXPR
13424 is probably the best choice, so leave it alone. */
13425 && type == TREE_TYPE (arg0))
13426 return pedantic_non_lvalue_loc (loc, arg0);
13427
13428 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13429 over COND_EXPR in cases such as floating point comparisons. */
13430 if (integer_zerop (op1)
13431 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13432 : (integer_onep (op2)
13433 && !VECTOR_TYPE_P (type)))
13434 && truth_value_p (TREE_CODE (arg0)))
13435 return pedantic_non_lvalue_loc (loc,
13436 fold_convert_loc (loc, type,
13437 invert_truthvalue_loc (loc,
13438 arg0)));
13439
13440 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13441 if (TREE_CODE (arg0) == LT_EXPR
13442 && integer_zerop (TREE_OPERAND (arg0, 1))
13443 && integer_zerop (op2)
13444 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13445 {
13446 /* sign_bit_p looks through both zero and sign extensions,
13447 but for this optimization only sign extensions are
13448 usable. */
13449 tree tem2 = TREE_OPERAND (arg0, 0);
13450 while (tem != tem2)
13451 {
13452 if (TREE_CODE (tem2) != NOP_EXPR
13453 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13454 {
13455 tem = NULL_TREE;
13456 break;
13457 }
13458 tem2 = TREE_OPERAND (tem2, 0);
13459 }
13460 /* sign_bit_p only checks ARG1 bits within A's precision.
13461 If <sign bit of A> has wider type than A, bits outside
13462 of A's precision in <sign bit of A> need to be checked.
13463 If they are all 0, this optimization needs to be done
13464 in unsigned A's type, if they are all 1 in signed A's type,
13465 otherwise this can't be done. */
13466 if (tem
13467 && TYPE_PRECISION (TREE_TYPE (tem))
13468 < TYPE_PRECISION (TREE_TYPE (arg1))
13469 && TYPE_PRECISION (TREE_TYPE (tem))
13470 < TYPE_PRECISION (type))
13471 {
13472 int inner_width, outer_width;
13473 tree tem_type;
13474
13475 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13476 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13477 if (outer_width > TYPE_PRECISION (type))
13478 outer_width = TYPE_PRECISION (type);
13479
13480 wide_int mask = wi::shifted_mask
13481 (inner_width, outer_width - inner_width, false,
13482 TYPE_PRECISION (TREE_TYPE (arg1)));
13483
13484 wide_int common = mask & arg1;
13485 if (common == mask)
13486 {
13487 tem_type = signed_type_for (TREE_TYPE (tem));
13488 tem = fold_convert_loc (loc, tem_type, tem);
13489 }
13490 else if (common == 0)
13491 {
13492 tem_type = unsigned_type_for (TREE_TYPE (tem));
13493 tem = fold_convert_loc (loc, tem_type, tem);
13494 }
13495 else
13496 tem = NULL;
13497 }
13498
13499 if (tem)
13500 return
13501 fold_convert_loc (loc, type,
13502 fold_build2_loc (loc, BIT_AND_EXPR,
13503 TREE_TYPE (tem), tem,
13504 fold_convert_loc (loc,
13505 TREE_TYPE (tem),
13506 arg1)));
13507 }
13508
13509 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13510 already handled above. */
13511 if (TREE_CODE (arg0) == BIT_AND_EXPR
13512 && integer_onep (TREE_OPERAND (arg0, 1))
13513 && integer_zerop (op2)
13514 && integer_pow2p (arg1))
13515 {
13516 tree tem = TREE_OPERAND (arg0, 0);
13517 STRIP_NOPS (tem);
13518 if (TREE_CODE (tem) == RSHIFT_EXPR
13519 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13520 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13521 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13522 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13523 TREE_OPERAND (tem, 0), arg1);
13524 }
13525
13526 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13527 is probably obsolete because the first operand should be a
13528 truth value (that's why we have the two cases above), but let's
13529 leave it in until we can confirm this for all front-ends. */
13530 if (integer_zerop (op2)
13531 && TREE_CODE (arg0) == NE_EXPR
13532 && integer_zerop (TREE_OPERAND (arg0, 1))
13533 && integer_pow2p (arg1)
13534 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13535 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13536 arg1, OEP_ONLY_CONST))
13537 return pedantic_non_lvalue_loc (loc,
13538 fold_convert_loc (loc, type,
13539 TREE_OPERAND (arg0, 0)));
13540
13541 /* Disable the transformations below for vectors, since
13542 fold_binary_op_with_conditional_arg may undo them immediately,
13543 yielding an infinite loop. */
13544 if (code == VEC_COND_EXPR)
13545 return NULL_TREE;
13546
13547 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13548 if (integer_zerop (op2)
13549 && truth_value_p (TREE_CODE (arg0))
13550 && truth_value_p (TREE_CODE (arg1))
13551 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13552 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13553 : TRUTH_ANDIF_EXPR,
13554 type, fold_convert_loc (loc, type, arg0), arg1);
13555
13556 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13557 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13558 && truth_value_p (TREE_CODE (arg0))
13559 && truth_value_p (TREE_CODE (arg1))
13560 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13561 {
13562 location_t loc0 = expr_location_or (arg0, loc);
13563 /* Only perform transformation if ARG0 is easily inverted. */
13564 tem = fold_invert_truthvalue (loc0, arg0);
13565 if (tem)
13566 return fold_build2_loc (loc, code == VEC_COND_EXPR
13567 ? BIT_IOR_EXPR
13568 : TRUTH_ORIF_EXPR,
13569 type, fold_convert_loc (loc, type, tem),
13570 arg1);
13571 }
13572
13573 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13574 if (integer_zerop (arg1)
13575 && truth_value_p (TREE_CODE (arg0))
13576 && truth_value_p (TREE_CODE (op2))
13577 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13578 {
13579 location_t loc0 = expr_location_or (arg0, loc);
13580 /* Only perform transformation if ARG0 is easily inverted. */
13581 tem = fold_invert_truthvalue (loc0, arg0);
13582 if (tem)
13583 return fold_build2_loc (loc, code == VEC_COND_EXPR
13584 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13585 type, fold_convert_loc (loc, type, tem),
13586 op2);
13587 }
13588
13589 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13590 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13591 && truth_value_p (TREE_CODE (arg0))
13592 && truth_value_p (TREE_CODE (op2))
13593 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13594 return fold_build2_loc (loc, code == VEC_COND_EXPR
13595 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13596 type, fold_convert_loc (loc, type, arg0), op2);
13597
13598 return NULL_TREE;
13599
13600 case CALL_EXPR:
13601 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13602 of fold_ternary on them. */
13603 gcc_unreachable ();
13604
13605 case BIT_FIELD_REF:
13606 if ((TREE_CODE (arg0) == VECTOR_CST
13607 || (TREE_CODE (arg0) == CONSTRUCTOR
13608 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13609 && (type == TREE_TYPE (TREE_TYPE (arg0))
13610 || (TREE_CODE (type) == VECTOR_TYPE
13611 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13612 {
13613 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13614 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13615 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13616 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13617
13618 if (n != 0
13619 && (idx % width) == 0
13620 && (n % width) == 0
13621 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13622 {
13623 idx = idx / width;
13624 n = n / width;
13625
13626 if (TREE_CODE (arg0) == VECTOR_CST)
13627 {
13628 if (n == 1)
13629 return VECTOR_CST_ELT (arg0, idx);
13630
13631 tree *vals = XALLOCAVEC (tree, n);
13632 for (unsigned i = 0; i < n; ++i)
13633 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13634 return build_vector (type, vals);
13635 }
13636
13637 /* Constructor elements can be subvectors. */
13638 unsigned HOST_WIDE_INT k = 1;
13639 if (CONSTRUCTOR_NELTS (arg0) != 0)
13640 {
13641 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13642 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13643 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13644 }
13645
13646 /* We keep an exact subset of the constructor elements. */
13647 if ((idx % k) == 0 && (n % k) == 0)
13648 {
13649 if (CONSTRUCTOR_NELTS (arg0) == 0)
13650 return build_constructor (type, NULL);
13651 idx /= k;
13652 n /= k;
13653 if (n == 1)
13654 {
13655 if (idx < CONSTRUCTOR_NELTS (arg0))
13656 return CONSTRUCTOR_ELT (arg0, idx)->value;
13657 return build_zero_cst (type);
13658 }
13659
13660 vec<constructor_elt, va_gc> *vals;
13661 vec_alloc (vals, n);
13662 for (unsigned i = 0;
13663 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13664 ++i)
13665 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13666 CONSTRUCTOR_ELT
13667 (arg0, idx + i)->value);
13668 return build_constructor (type, vals);
13669 }
13670 /* The bitfield references a single constructor element. */
13671 else if (idx + n <= (idx / k + 1) * k)
13672 {
13673 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13674 return build_zero_cst (type);
13675 else if (n == k)
13676 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13677 else
13678 return fold_build3_loc (loc, code, type,
13679 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13680 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13681 }
13682 }
13683 }
13684
13685 /* A bit-field-ref that referenced the full argument can be stripped. */
13686 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13687 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13688 && integer_zerop (op2))
13689 return fold_convert_loc (loc, type, arg0);
13690
13691 /* On constants we can use native encode/interpret to constant
13692 fold (nearly) all BIT_FIELD_REFs. */
13693 if (CONSTANT_CLASS_P (arg0)
13694 && can_native_interpret_type_p (type)
13695 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13696 /* This limitation should not be necessary, we just need to
13697 round this up to mode size. */
13698 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13699 /* Need bit-shifting of the buffer to relax the following. */
13700 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13701 {
13702 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13703 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13704 unsigned HOST_WIDE_INT clen;
13705 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13706 /* ??? We cannot tell native_encode_expr to start at
13707 some random byte only. So limit us to a reasonable amount
13708 of work. */
13709 if (clen <= 4096)
13710 {
13711 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13712 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13713 if (len > 0
13714 && len * BITS_PER_UNIT >= bitpos + bitsize)
13715 {
13716 tree v = native_interpret_expr (type,
13717 b + bitpos / BITS_PER_UNIT,
13718 bitsize / BITS_PER_UNIT);
13719 if (v)
13720 return v;
13721 }
13722 }
13723 }
13724
13725 return NULL_TREE;
13726
13727 case FMA_EXPR:
13728 /* For integers we can decompose the FMA if possible. */
13729 if (TREE_CODE (arg0) == INTEGER_CST
13730 && TREE_CODE (arg1) == INTEGER_CST)
13731 return fold_build2_loc (loc, PLUS_EXPR, type,
13732 const_binop (MULT_EXPR, arg0, arg1), arg2);
13733 if (integer_zerop (arg2))
13734 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13735
13736 return fold_fma (loc, type, arg0, arg1, arg2);
13737
13738 case VEC_PERM_EXPR:
13739 if (TREE_CODE (arg2) == VECTOR_CST)
13740 {
13741 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13742 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13743 unsigned char *sel2 = sel + nelts;
13744 bool need_mask_canon = false;
13745 bool need_mask_canon2 = false;
13746 bool all_in_vec0 = true;
13747 bool all_in_vec1 = true;
13748 bool maybe_identity = true;
13749 bool single_arg = (op0 == op1);
13750 bool changed = false;
13751
13752 mask2 = 2 * nelts - 1;
13753 mask = single_arg ? (nelts - 1) : mask2;
13754 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13755 for (i = 0; i < nelts; i++)
13756 {
13757 tree val = VECTOR_CST_ELT (arg2, i);
13758 if (TREE_CODE (val) != INTEGER_CST)
13759 return NULL_TREE;
13760
13761 /* Make sure that the perm value is in an acceptable
13762 range. */
13763 wide_int t = val;
13764 need_mask_canon |= wi::gtu_p (t, mask);
13765 need_mask_canon2 |= wi::gtu_p (t, mask2);
13766 sel[i] = t.to_uhwi () & mask;
13767 sel2[i] = t.to_uhwi () & mask2;
13768
13769 if (sel[i] < nelts)
13770 all_in_vec1 = false;
13771 else
13772 all_in_vec0 = false;
13773
13774 if ((sel[i] & (nelts-1)) != i)
13775 maybe_identity = false;
13776 }
13777
13778 if (maybe_identity)
13779 {
13780 if (all_in_vec0)
13781 return op0;
13782 if (all_in_vec1)
13783 return op1;
13784 }
13785
13786 if (all_in_vec0)
13787 op1 = op0;
13788 else if (all_in_vec1)
13789 {
13790 op0 = op1;
13791 for (i = 0; i < nelts; i++)
13792 sel[i] -= nelts;
13793 need_mask_canon = true;
13794 }
13795
13796 if ((TREE_CODE (op0) == VECTOR_CST
13797 || TREE_CODE (op0) == CONSTRUCTOR)
13798 && (TREE_CODE (op1) == VECTOR_CST
13799 || TREE_CODE (op1) == CONSTRUCTOR))
13800 {
13801 tree t = fold_vec_perm (type, op0, op1, sel);
13802 if (t != NULL_TREE)
13803 return t;
13804 }
13805
13806 if (op0 == op1 && !single_arg)
13807 changed = true;
13808
13809 /* Some targets are deficient and fail to expand a single
13810 argument permutation while still allowing an equivalent
13811 2-argument version. */
13812 if (need_mask_canon && arg2 == op2
13813 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13814 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13815 {
13816 need_mask_canon = need_mask_canon2;
13817 sel = sel2;
13818 }
13819
13820 if (need_mask_canon && arg2 == op2)
13821 {
13822 tree *tsel = XALLOCAVEC (tree, nelts);
13823 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13824 for (i = 0; i < nelts; i++)
13825 tsel[i] = build_int_cst (eltype, sel[i]);
13826 op2 = build_vector (TREE_TYPE (arg2), tsel);
13827 changed = true;
13828 }
13829
13830 if (changed)
13831 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13832 }
13833 return NULL_TREE;
13834
13835 default:
13836 return NULL_TREE;
13837 } /* switch (code) */
13838 }
13839
13840 /* Perform constant folding and related simplification of EXPR.
13841 The related simplifications include x*1 => x, x*0 => 0, etc.,
13842 and application of the associative law.
13843 NOP_EXPR conversions may be removed freely (as long as we
13844 are careful not to change the type of the overall expression).
13845 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13846 but we can constant-fold them if they have constant operands. */
13847
13848 #ifdef ENABLE_FOLD_CHECKING
13849 # define fold(x) fold_1 (x)
13850 static tree fold_1 (tree);
13851 static
13852 #endif
13853 tree
13854 fold (tree expr)
13855 {
13856 const tree t = expr;
13857 enum tree_code code = TREE_CODE (t);
13858 enum tree_code_class kind = TREE_CODE_CLASS (code);
13859 tree tem;
13860 location_t loc = EXPR_LOCATION (expr);
13861
13862 /* Return right away if a constant. */
13863 if (kind == tcc_constant)
13864 return t;
13865
13866 /* CALL_EXPR-like objects with variable numbers of operands are
13867 treated specially. */
13868 if (kind == tcc_vl_exp)
13869 {
13870 if (code == CALL_EXPR)
13871 {
13872 tem = fold_call_expr (loc, expr, false);
13873 return tem ? tem : expr;
13874 }
13875 return expr;
13876 }
13877
13878 if (IS_EXPR_CODE_CLASS (kind))
13879 {
13880 tree type = TREE_TYPE (t);
13881 tree op0, op1, op2;
13882
13883 switch (TREE_CODE_LENGTH (code))
13884 {
13885 case 1:
13886 op0 = TREE_OPERAND (t, 0);
13887 tem = fold_unary_loc (loc, code, type, op0);
13888 return tem ? tem : expr;
13889 case 2:
13890 op0 = TREE_OPERAND (t, 0);
13891 op1 = TREE_OPERAND (t, 1);
13892 tem = fold_binary_loc (loc, code, type, op0, op1);
13893 return tem ? tem : expr;
13894 case 3:
13895 op0 = TREE_OPERAND (t, 0);
13896 op1 = TREE_OPERAND (t, 1);
13897 op2 = TREE_OPERAND (t, 2);
13898 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13899 return tem ? tem : expr;
13900 default:
13901 break;
13902 }
13903 }
13904
13905 switch (code)
13906 {
13907 case ARRAY_REF:
13908 {
13909 tree op0 = TREE_OPERAND (t, 0);
13910 tree op1 = TREE_OPERAND (t, 1);
13911
13912 if (TREE_CODE (op1) == INTEGER_CST
13913 && TREE_CODE (op0) == CONSTRUCTOR
13914 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13915 {
13916 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13917 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13918 unsigned HOST_WIDE_INT begin = 0;
13919
13920 /* Find a matching index by means of a binary search. */
13921 while (begin != end)
13922 {
13923 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13924 tree index = (*elts)[middle].index;
13925
13926 if (TREE_CODE (index) == INTEGER_CST
13927 && tree_int_cst_lt (index, op1))
13928 begin = middle + 1;
13929 else if (TREE_CODE (index) == INTEGER_CST
13930 && tree_int_cst_lt (op1, index))
13931 end = middle;
13932 else if (TREE_CODE (index) == RANGE_EXPR
13933 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13934 begin = middle + 1;
13935 else if (TREE_CODE (index) == RANGE_EXPR
13936 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13937 end = middle;
13938 else
13939 return (*elts)[middle].value;
13940 }
13941 }
13942
13943 return t;
13944 }
13945
13946 /* Return a VECTOR_CST if possible. */
13947 case CONSTRUCTOR:
13948 {
13949 tree type = TREE_TYPE (t);
13950 if (TREE_CODE (type) != VECTOR_TYPE)
13951 return t;
13952
13953 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13954 unsigned HOST_WIDE_INT idx, pos = 0;
13955 tree value;
13956
13957 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13958 {
13959 if (!CONSTANT_CLASS_P (value))
13960 return t;
13961 if (TREE_CODE (value) == VECTOR_CST)
13962 {
13963 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13964 vec[pos++] = VECTOR_CST_ELT (value, i);
13965 }
13966 else
13967 vec[pos++] = value;
13968 }
13969 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13970 vec[pos] = build_zero_cst (TREE_TYPE (type));
13971
13972 return build_vector (type, vec);
13973 }
13974
13975 case CONST_DECL:
13976 return fold (DECL_INITIAL (t));
13977
13978 default:
13979 return t;
13980 } /* switch (code) */
13981 }
13982
13983 #ifdef ENABLE_FOLD_CHECKING
13984 #undef fold
13985
13986 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13987 hash_table<pointer_hash<const tree_node> > *);
13988 static void fold_check_failed (const_tree, const_tree);
13989 void print_fold_checksum (const_tree);
13990
13991 /* When --enable-checking=fold, compute a digest of expr before
13992 and after actual fold call to see if fold did not accidentally
13993 change original expr. */
13994
13995 tree
13996 fold (tree expr)
13997 {
13998 tree ret;
13999 struct md5_ctx ctx;
14000 unsigned char checksum_before[16], checksum_after[16];
14001 hash_table<pointer_hash<const tree_node> > ht (32);
14002
14003 md5_init_ctx (&ctx);
14004 fold_checksum_tree (expr, &ctx, &ht);
14005 md5_finish_ctx (&ctx, checksum_before);
14006 ht.empty ();
14007
14008 ret = fold_1 (expr);
14009
14010 md5_init_ctx (&ctx);
14011 fold_checksum_tree (expr, &ctx, &ht);
14012 md5_finish_ctx (&ctx, checksum_after);
14013
14014 if (memcmp (checksum_before, checksum_after, 16))
14015 fold_check_failed (expr, ret);
14016
14017 return ret;
14018 }
14019
14020 void
14021 print_fold_checksum (const_tree expr)
14022 {
14023 struct md5_ctx ctx;
14024 unsigned char checksum[16], cnt;
14025 hash_table<pointer_hash<const tree_node> > ht (32);
14026
14027 md5_init_ctx (&ctx);
14028 fold_checksum_tree (expr, &ctx, &ht);
14029 md5_finish_ctx (&ctx, checksum);
14030 for (cnt = 0; cnt < 16; ++cnt)
14031 fprintf (stderr, "%02x", checksum[cnt]);
14032 putc ('\n', stderr);
14033 }
14034
14035 static void
14036 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14037 {
14038 internal_error ("fold check: original tree changed by fold");
14039 }
14040
14041 static void
14042 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14043 hash_table<pointer_hash <const tree_node> > *ht)
14044 {
14045 const tree_node **slot;
14046 enum tree_code code;
14047 union tree_node buf;
14048 int i, len;
14049
14050 recursive_label:
14051 if (expr == NULL)
14052 return;
14053 slot = ht->find_slot (expr, INSERT);
14054 if (*slot != NULL)
14055 return;
14056 *slot = expr;
14057 code = TREE_CODE (expr);
14058 if (TREE_CODE_CLASS (code) == tcc_declaration
14059 && DECL_ASSEMBLER_NAME_SET_P (expr))
14060 {
14061 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14062 memcpy ((char *) &buf, expr, tree_size (expr));
14063 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14064 expr = (tree) &buf;
14065 }
14066 else if (TREE_CODE_CLASS (code) == tcc_type
14067 && (TYPE_POINTER_TO (expr)
14068 || TYPE_REFERENCE_TO (expr)
14069 || TYPE_CACHED_VALUES_P (expr)
14070 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14071 || TYPE_NEXT_VARIANT (expr)))
14072 {
14073 /* Allow these fields to be modified. */
14074 tree tmp;
14075 memcpy ((char *) &buf, expr, tree_size (expr));
14076 expr = tmp = (tree) &buf;
14077 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14078 TYPE_POINTER_TO (tmp) = NULL;
14079 TYPE_REFERENCE_TO (tmp) = NULL;
14080 TYPE_NEXT_VARIANT (tmp) = NULL;
14081 if (TYPE_CACHED_VALUES_P (tmp))
14082 {
14083 TYPE_CACHED_VALUES_P (tmp) = 0;
14084 TYPE_CACHED_VALUES (tmp) = NULL;
14085 }
14086 }
14087 md5_process_bytes (expr, tree_size (expr), ctx);
14088 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14089 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14090 if (TREE_CODE_CLASS (code) != tcc_type
14091 && TREE_CODE_CLASS (code) != tcc_declaration
14092 && code != TREE_LIST
14093 && code != SSA_NAME
14094 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14095 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14096 switch (TREE_CODE_CLASS (code))
14097 {
14098 case tcc_constant:
14099 switch (code)
14100 {
14101 case STRING_CST:
14102 md5_process_bytes (TREE_STRING_POINTER (expr),
14103 TREE_STRING_LENGTH (expr), ctx);
14104 break;
14105 case COMPLEX_CST:
14106 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14107 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14108 break;
14109 case VECTOR_CST:
14110 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14111 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14112 break;
14113 default:
14114 break;
14115 }
14116 break;
14117 case tcc_exceptional:
14118 switch (code)
14119 {
14120 case TREE_LIST:
14121 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14122 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14123 expr = TREE_CHAIN (expr);
14124 goto recursive_label;
14125 break;
14126 case TREE_VEC:
14127 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14128 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14129 break;
14130 default:
14131 break;
14132 }
14133 break;
14134 case tcc_expression:
14135 case tcc_reference:
14136 case tcc_comparison:
14137 case tcc_unary:
14138 case tcc_binary:
14139 case tcc_statement:
14140 case tcc_vl_exp:
14141 len = TREE_OPERAND_LENGTH (expr);
14142 for (i = 0; i < len; ++i)
14143 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14144 break;
14145 case tcc_declaration:
14146 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14147 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14148 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14149 {
14150 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14151 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14152 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14153 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14154 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14155 }
14156
14157 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14158 {
14159 if (TREE_CODE (expr) == FUNCTION_DECL)
14160 {
14161 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14162 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14163 }
14164 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14165 }
14166 break;
14167 case tcc_type:
14168 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14169 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14170 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14171 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14172 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14173 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14174 if (INTEGRAL_TYPE_P (expr)
14175 || SCALAR_FLOAT_TYPE_P (expr))
14176 {
14177 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14178 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14179 }
14180 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14181 if (TREE_CODE (expr) == RECORD_TYPE
14182 || TREE_CODE (expr) == UNION_TYPE
14183 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14184 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14185 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14186 break;
14187 default:
14188 break;
14189 }
14190 }
14191
14192 /* Helper function for outputting the checksum of a tree T. When
14193 debugging with gdb, you can "define mynext" to be "next" followed
14194 by "call debug_fold_checksum (op0)", then just trace down till the
14195 outputs differ. */
14196
14197 DEBUG_FUNCTION void
14198 debug_fold_checksum (const_tree t)
14199 {
14200 int i;
14201 unsigned char checksum[16];
14202 struct md5_ctx ctx;
14203 hash_table<pointer_hash<const tree_node> > ht (32);
14204
14205 md5_init_ctx (&ctx);
14206 fold_checksum_tree (t, &ctx, &ht);
14207 md5_finish_ctx (&ctx, checksum);
14208 ht.empty ();
14209
14210 for (i = 0; i < 16; i++)
14211 fprintf (stderr, "%d ", checksum[i]);
14212
14213 fprintf (stderr, "\n");
14214 }
14215
14216 #endif
14217
14218 /* Fold a unary tree expression with code CODE of type TYPE with an
14219 operand OP0. LOC is the location of the resulting expression.
14220 Return a folded expression if successful. Otherwise, return a tree
14221 expression with code CODE of type TYPE with an operand OP0. */
14222
14223 tree
14224 fold_build1_stat_loc (location_t loc,
14225 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14226 {
14227 tree tem;
14228 #ifdef ENABLE_FOLD_CHECKING
14229 unsigned char checksum_before[16], checksum_after[16];
14230 struct md5_ctx ctx;
14231 hash_table<pointer_hash<const tree_node> > ht (32);
14232
14233 md5_init_ctx (&ctx);
14234 fold_checksum_tree (op0, &ctx, &ht);
14235 md5_finish_ctx (&ctx, checksum_before);
14236 ht.empty ();
14237 #endif
14238
14239 tem = fold_unary_loc (loc, code, type, op0);
14240 if (!tem)
14241 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14242
14243 #ifdef ENABLE_FOLD_CHECKING
14244 md5_init_ctx (&ctx);
14245 fold_checksum_tree (op0, &ctx, &ht);
14246 md5_finish_ctx (&ctx, checksum_after);
14247
14248 if (memcmp (checksum_before, checksum_after, 16))
14249 fold_check_failed (op0, tem);
14250 #endif
14251 return tem;
14252 }
14253
14254 /* Fold a binary tree expression with code CODE of type TYPE with
14255 operands OP0 and OP1. LOC is the location of the resulting
14256 expression. Return a folded expression if successful. Otherwise,
14257 return a tree expression with code CODE of type TYPE with operands
14258 OP0 and OP1. */
14259
14260 tree
14261 fold_build2_stat_loc (location_t loc,
14262 enum tree_code code, tree type, tree op0, tree op1
14263 MEM_STAT_DECL)
14264 {
14265 tree tem;
14266 #ifdef ENABLE_FOLD_CHECKING
14267 unsigned char checksum_before_op0[16],
14268 checksum_before_op1[16],
14269 checksum_after_op0[16],
14270 checksum_after_op1[16];
14271 struct md5_ctx ctx;
14272 hash_table<pointer_hash<const tree_node> > ht (32);
14273
14274 md5_init_ctx (&ctx);
14275 fold_checksum_tree (op0, &ctx, &ht);
14276 md5_finish_ctx (&ctx, checksum_before_op0);
14277 ht.empty ();
14278
14279 md5_init_ctx (&ctx);
14280 fold_checksum_tree (op1, &ctx, &ht);
14281 md5_finish_ctx (&ctx, checksum_before_op1);
14282 ht.empty ();
14283 #endif
14284
14285 tem = fold_binary_loc (loc, code, type, op0, op1);
14286 if (!tem)
14287 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14288
14289 #ifdef ENABLE_FOLD_CHECKING
14290 md5_init_ctx (&ctx);
14291 fold_checksum_tree (op0, &ctx, &ht);
14292 md5_finish_ctx (&ctx, checksum_after_op0);
14293 ht.empty ();
14294
14295 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14296 fold_check_failed (op0, tem);
14297
14298 md5_init_ctx (&ctx);
14299 fold_checksum_tree (op1, &ctx, &ht);
14300 md5_finish_ctx (&ctx, checksum_after_op1);
14301
14302 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14303 fold_check_failed (op1, tem);
14304 #endif
14305 return tem;
14306 }
14307
14308 /* Fold a ternary tree expression with code CODE of type TYPE with
14309 operands OP0, OP1, and OP2. Return a folded expression if
14310 successful. Otherwise, return a tree expression with code CODE of
14311 type TYPE with operands OP0, OP1, and OP2. */
14312
14313 tree
14314 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14315 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14316 {
14317 tree tem;
14318 #ifdef ENABLE_FOLD_CHECKING
14319 unsigned char checksum_before_op0[16],
14320 checksum_before_op1[16],
14321 checksum_before_op2[16],
14322 checksum_after_op0[16],
14323 checksum_after_op1[16],
14324 checksum_after_op2[16];
14325 struct md5_ctx ctx;
14326 hash_table<pointer_hash<const tree_node> > ht (32);
14327
14328 md5_init_ctx (&ctx);
14329 fold_checksum_tree (op0, &ctx, &ht);
14330 md5_finish_ctx (&ctx, checksum_before_op0);
14331 ht.empty ();
14332
14333 md5_init_ctx (&ctx);
14334 fold_checksum_tree (op1, &ctx, &ht);
14335 md5_finish_ctx (&ctx, checksum_before_op1);
14336 ht.empty ();
14337
14338 md5_init_ctx (&ctx);
14339 fold_checksum_tree (op2, &ctx, &ht);
14340 md5_finish_ctx (&ctx, checksum_before_op2);
14341 ht.empty ();
14342 #endif
14343
14344 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14345 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14346 if (!tem)
14347 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14348
14349 #ifdef ENABLE_FOLD_CHECKING
14350 md5_init_ctx (&ctx);
14351 fold_checksum_tree (op0, &ctx, &ht);
14352 md5_finish_ctx (&ctx, checksum_after_op0);
14353 ht.empty ();
14354
14355 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14356 fold_check_failed (op0, tem);
14357
14358 md5_init_ctx (&ctx);
14359 fold_checksum_tree (op1, &ctx, &ht);
14360 md5_finish_ctx (&ctx, checksum_after_op1);
14361 ht.empty ();
14362
14363 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14364 fold_check_failed (op1, tem);
14365
14366 md5_init_ctx (&ctx);
14367 fold_checksum_tree (op2, &ctx, &ht);
14368 md5_finish_ctx (&ctx, checksum_after_op2);
14369
14370 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14371 fold_check_failed (op2, tem);
14372 #endif
14373 return tem;
14374 }
14375
14376 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14377 arguments in ARGARRAY, and a null static chain.
14378 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14379 of type TYPE from the given operands as constructed by build_call_array. */
14380
14381 tree
14382 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14383 int nargs, tree *argarray)
14384 {
14385 tree tem;
14386 #ifdef ENABLE_FOLD_CHECKING
14387 unsigned char checksum_before_fn[16],
14388 checksum_before_arglist[16],
14389 checksum_after_fn[16],
14390 checksum_after_arglist[16];
14391 struct md5_ctx ctx;
14392 hash_table<pointer_hash<const tree_node> > ht (32);
14393 int i;
14394
14395 md5_init_ctx (&ctx);
14396 fold_checksum_tree (fn, &ctx, &ht);
14397 md5_finish_ctx (&ctx, checksum_before_fn);
14398 ht.empty ();
14399
14400 md5_init_ctx (&ctx);
14401 for (i = 0; i < nargs; i++)
14402 fold_checksum_tree (argarray[i], &ctx, &ht);
14403 md5_finish_ctx (&ctx, checksum_before_arglist);
14404 ht.empty ();
14405 #endif
14406
14407 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14408 if (!tem)
14409 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14410
14411 #ifdef ENABLE_FOLD_CHECKING
14412 md5_init_ctx (&ctx);
14413 fold_checksum_tree (fn, &ctx, &ht);
14414 md5_finish_ctx (&ctx, checksum_after_fn);
14415 ht.empty ();
14416
14417 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14418 fold_check_failed (fn, tem);
14419
14420 md5_init_ctx (&ctx);
14421 for (i = 0; i < nargs; i++)
14422 fold_checksum_tree (argarray[i], &ctx, &ht);
14423 md5_finish_ctx (&ctx, checksum_after_arglist);
14424
14425 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14426 fold_check_failed (NULL_TREE, tem);
14427 #endif
14428 return tem;
14429 }
14430
14431 /* Perform constant folding and related simplification of initializer
14432 expression EXPR. These behave identically to "fold_buildN" but ignore
14433 potential run-time traps and exceptions that fold must preserve. */
14434
14435 #define START_FOLD_INIT \
14436 int saved_signaling_nans = flag_signaling_nans;\
14437 int saved_trapping_math = flag_trapping_math;\
14438 int saved_rounding_math = flag_rounding_math;\
14439 int saved_trapv = flag_trapv;\
14440 int saved_folding_initializer = folding_initializer;\
14441 flag_signaling_nans = 0;\
14442 flag_trapping_math = 0;\
14443 flag_rounding_math = 0;\
14444 flag_trapv = 0;\
14445 folding_initializer = 1;
14446
14447 #define END_FOLD_INIT \
14448 flag_signaling_nans = saved_signaling_nans;\
14449 flag_trapping_math = saved_trapping_math;\
14450 flag_rounding_math = saved_rounding_math;\
14451 flag_trapv = saved_trapv;\
14452 folding_initializer = saved_folding_initializer;
14453
14454 tree
14455 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14456 tree type, tree op)
14457 {
14458 tree result;
14459 START_FOLD_INIT;
14460
14461 result = fold_build1_loc (loc, code, type, op);
14462
14463 END_FOLD_INIT;
14464 return result;
14465 }
14466
14467 tree
14468 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14469 tree type, tree op0, tree op1)
14470 {
14471 tree result;
14472 START_FOLD_INIT;
14473
14474 result = fold_build2_loc (loc, code, type, op0, op1);
14475
14476 END_FOLD_INIT;
14477 return result;
14478 }
14479
14480 tree
14481 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14482 int nargs, tree *argarray)
14483 {
14484 tree result;
14485 START_FOLD_INIT;
14486
14487 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14488
14489 END_FOLD_INIT;
14490 return result;
14491 }
14492
14493 #undef START_FOLD_INIT
14494 #undef END_FOLD_INIT
14495
14496 /* Determine if first argument is a multiple of second argument. Return 0 if
14497 it is not, or we cannot easily determined it to be.
14498
14499 An example of the sort of thing we care about (at this point; this routine
14500 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14501 fold cases do now) is discovering that
14502
14503 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14504
14505 is a multiple of
14506
14507 SAVE_EXPR (J * 8)
14508
14509 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14510
14511 This code also handles discovering that
14512
14513 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14514
14515 is a multiple of 8 so we don't have to worry about dealing with a
14516 possible remainder.
14517
14518 Note that we *look* inside a SAVE_EXPR only to determine how it was
14519 calculated; it is not safe for fold to do much of anything else with the
14520 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14521 at run time. For example, the latter example above *cannot* be implemented
14522 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14523 evaluation time of the original SAVE_EXPR is not necessarily the same at
14524 the time the new expression is evaluated. The only optimization of this
14525 sort that would be valid is changing
14526
14527 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14528
14529 divided by 8 to
14530
14531 SAVE_EXPR (I) * SAVE_EXPR (J)
14532
14533 (where the same SAVE_EXPR (J) is used in the original and the
14534 transformed version). */
14535
14536 int
14537 multiple_of_p (tree type, const_tree top, const_tree bottom)
14538 {
14539 if (operand_equal_p (top, bottom, 0))
14540 return 1;
14541
14542 if (TREE_CODE (type) != INTEGER_TYPE)
14543 return 0;
14544
14545 switch (TREE_CODE (top))
14546 {
14547 case BIT_AND_EXPR:
14548 /* Bitwise and provides a power of two multiple. If the mask is
14549 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14550 if (!integer_pow2p (bottom))
14551 return 0;
14552 /* FALLTHRU */
14553
14554 case MULT_EXPR:
14555 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14556 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14557
14558 case PLUS_EXPR:
14559 case MINUS_EXPR:
14560 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14561 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14562
14563 case LSHIFT_EXPR:
14564 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14565 {
14566 tree op1, t1;
14567
14568 op1 = TREE_OPERAND (top, 1);
14569 /* const_binop may not detect overflow correctly,
14570 so check for it explicitly here. */
14571 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14572 && 0 != (t1 = fold_convert (type,
14573 const_binop (LSHIFT_EXPR,
14574 size_one_node,
14575 op1)))
14576 && !TREE_OVERFLOW (t1))
14577 return multiple_of_p (type, t1, bottom);
14578 }
14579 return 0;
14580
14581 case NOP_EXPR:
14582 /* Can't handle conversions from non-integral or wider integral type. */
14583 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14584 || (TYPE_PRECISION (type)
14585 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14586 return 0;
14587
14588 /* .. fall through ... */
14589
14590 case SAVE_EXPR:
14591 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14592
14593 case COND_EXPR:
14594 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14595 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14596
14597 case INTEGER_CST:
14598 if (TREE_CODE (bottom) != INTEGER_CST
14599 || integer_zerop (bottom)
14600 || (TYPE_UNSIGNED (type)
14601 && (tree_int_cst_sgn (top) < 0
14602 || tree_int_cst_sgn (bottom) < 0)))
14603 return 0;
14604 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14605 SIGNED);
14606
14607 default:
14608 return 0;
14609 }
14610 }
14611
14612 /* Return true if CODE or TYPE is known to be non-negative. */
14613
14614 static bool
14615 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14616 {
14617 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14618 && truth_value_p (code))
14619 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14620 have a signed:1 type (where the value is -1 and 0). */
14621 return true;
14622 return false;
14623 }
14624
14625 /* Return true if (CODE OP0) is known to be non-negative. If the return
14626 value is based on the assumption that signed overflow is undefined,
14627 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14628 *STRICT_OVERFLOW_P. */
14629
14630 bool
14631 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14632 bool *strict_overflow_p)
14633 {
14634 if (TYPE_UNSIGNED (type))
14635 return true;
14636
14637 switch (code)
14638 {
14639 case ABS_EXPR:
14640 /* We can't return 1 if flag_wrapv is set because
14641 ABS_EXPR<INT_MIN> = INT_MIN. */
14642 if (!INTEGRAL_TYPE_P (type))
14643 return true;
14644 if (TYPE_OVERFLOW_UNDEFINED (type))
14645 {
14646 *strict_overflow_p = true;
14647 return true;
14648 }
14649 break;
14650
14651 case NON_LVALUE_EXPR:
14652 case FLOAT_EXPR:
14653 case FIX_TRUNC_EXPR:
14654 return tree_expr_nonnegative_warnv_p (op0,
14655 strict_overflow_p);
14656
14657 CASE_CONVERT:
14658 {
14659 tree inner_type = TREE_TYPE (op0);
14660 tree outer_type = type;
14661
14662 if (TREE_CODE (outer_type) == REAL_TYPE)
14663 {
14664 if (TREE_CODE (inner_type) == REAL_TYPE)
14665 return tree_expr_nonnegative_warnv_p (op0,
14666 strict_overflow_p);
14667 if (INTEGRAL_TYPE_P (inner_type))
14668 {
14669 if (TYPE_UNSIGNED (inner_type))
14670 return true;
14671 return tree_expr_nonnegative_warnv_p (op0,
14672 strict_overflow_p);
14673 }
14674 }
14675 else if (INTEGRAL_TYPE_P (outer_type))
14676 {
14677 if (TREE_CODE (inner_type) == REAL_TYPE)
14678 return tree_expr_nonnegative_warnv_p (op0,
14679 strict_overflow_p);
14680 if (INTEGRAL_TYPE_P (inner_type))
14681 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14682 && TYPE_UNSIGNED (inner_type);
14683 }
14684 }
14685 break;
14686
14687 default:
14688 return tree_simple_nonnegative_warnv_p (code, type);
14689 }
14690
14691 /* We don't know sign of `t', so be conservative and return false. */
14692 return false;
14693 }
14694
14695 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14696 value is based on the assumption that signed overflow is undefined,
14697 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14698 *STRICT_OVERFLOW_P. */
14699
14700 bool
14701 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14702 tree op1, bool *strict_overflow_p)
14703 {
14704 if (TYPE_UNSIGNED (type))
14705 return true;
14706
14707 switch (code)
14708 {
14709 case POINTER_PLUS_EXPR:
14710 case PLUS_EXPR:
14711 if (FLOAT_TYPE_P (type))
14712 return (tree_expr_nonnegative_warnv_p (op0,
14713 strict_overflow_p)
14714 && tree_expr_nonnegative_warnv_p (op1,
14715 strict_overflow_p));
14716
14717 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14718 both unsigned and at least 2 bits shorter than the result. */
14719 if (TREE_CODE (type) == INTEGER_TYPE
14720 && TREE_CODE (op0) == NOP_EXPR
14721 && TREE_CODE (op1) == NOP_EXPR)
14722 {
14723 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14724 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14725 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14726 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14727 {
14728 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14729 TYPE_PRECISION (inner2)) + 1;
14730 return prec < TYPE_PRECISION (type);
14731 }
14732 }
14733 break;
14734
14735 case MULT_EXPR:
14736 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14737 {
14738 /* x * x is always non-negative for floating point x
14739 or without overflow. */
14740 if (operand_equal_p (op0, op1, 0)
14741 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14742 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14743 {
14744 if (TYPE_OVERFLOW_UNDEFINED (type))
14745 *strict_overflow_p = true;
14746 return true;
14747 }
14748 }
14749
14750 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14751 both unsigned and their total bits is shorter than the result. */
14752 if (TREE_CODE (type) == INTEGER_TYPE
14753 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14754 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14755 {
14756 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14757 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14758 : TREE_TYPE (op0);
14759 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14760 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14761 : TREE_TYPE (op1);
14762
14763 bool unsigned0 = TYPE_UNSIGNED (inner0);
14764 bool unsigned1 = TYPE_UNSIGNED (inner1);
14765
14766 if (TREE_CODE (op0) == INTEGER_CST)
14767 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14768
14769 if (TREE_CODE (op1) == INTEGER_CST)
14770 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14771
14772 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14773 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14774 {
14775 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14776 ? tree_int_cst_min_precision (op0, UNSIGNED)
14777 : TYPE_PRECISION (inner0);
14778
14779 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14780 ? tree_int_cst_min_precision (op1, UNSIGNED)
14781 : TYPE_PRECISION (inner1);
14782
14783 return precision0 + precision1 < TYPE_PRECISION (type);
14784 }
14785 }
14786 return false;
14787
14788 case BIT_AND_EXPR:
14789 case MAX_EXPR:
14790 return (tree_expr_nonnegative_warnv_p (op0,
14791 strict_overflow_p)
14792 || tree_expr_nonnegative_warnv_p (op1,
14793 strict_overflow_p));
14794
14795 case BIT_IOR_EXPR:
14796 case BIT_XOR_EXPR:
14797 case MIN_EXPR:
14798 case RDIV_EXPR:
14799 case TRUNC_DIV_EXPR:
14800 case CEIL_DIV_EXPR:
14801 case FLOOR_DIV_EXPR:
14802 case ROUND_DIV_EXPR:
14803 return (tree_expr_nonnegative_warnv_p (op0,
14804 strict_overflow_p)
14805 && tree_expr_nonnegative_warnv_p (op1,
14806 strict_overflow_p));
14807
14808 case TRUNC_MOD_EXPR:
14809 case CEIL_MOD_EXPR:
14810 case FLOOR_MOD_EXPR:
14811 case ROUND_MOD_EXPR:
14812 return tree_expr_nonnegative_warnv_p (op0,
14813 strict_overflow_p);
14814 default:
14815 return tree_simple_nonnegative_warnv_p (code, type);
14816 }
14817
14818 /* We don't know sign of `t', so be conservative and return false. */
14819 return false;
14820 }
14821
14822 /* Return true if T is known to be non-negative. If the return
14823 value is based on the assumption that signed overflow is undefined,
14824 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14825 *STRICT_OVERFLOW_P. */
14826
14827 bool
14828 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14829 {
14830 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14831 return true;
14832
14833 switch (TREE_CODE (t))
14834 {
14835 case INTEGER_CST:
14836 return tree_int_cst_sgn (t) >= 0;
14837
14838 case REAL_CST:
14839 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14840
14841 case FIXED_CST:
14842 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14843
14844 case COND_EXPR:
14845 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14846 strict_overflow_p)
14847 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14848 strict_overflow_p));
14849 default:
14850 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14851 TREE_TYPE (t));
14852 }
14853 /* We don't know sign of `t', so be conservative and return false. */
14854 return false;
14855 }
14856
14857 /* Return true if T is known to be non-negative. If the return
14858 value is based on the assumption that signed overflow is undefined,
14859 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14860 *STRICT_OVERFLOW_P. */
14861
14862 bool
14863 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14864 tree arg0, tree arg1, bool *strict_overflow_p)
14865 {
14866 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14867 switch (DECL_FUNCTION_CODE (fndecl))
14868 {
14869 CASE_FLT_FN (BUILT_IN_ACOS):
14870 CASE_FLT_FN (BUILT_IN_ACOSH):
14871 CASE_FLT_FN (BUILT_IN_CABS):
14872 CASE_FLT_FN (BUILT_IN_COSH):
14873 CASE_FLT_FN (BUILT_IN_ERFC):
14874 CASE_FLT_FN (BUILT_IN_EXP):
14875 CASE_FLT_FN (BUILT_IN_EXP10):
14876 CASE_FLT_FN (BUILT_IN_EXP2):
14877 CASE_FLT_FN (BUILT_IN_FABS):
14878 CASE_FLT_FN (BUILT_IN_FDIM):
14879 CASE_FLT_FN (BUILT_IN_HYPOT):
14880 CASE_FLT_FN (BUILT_IN_POW10):
14881 CASE_INT_FN (BUILT_IN_FFS):
14882 CASE_INT_FN (BUILT_IN_PARITY):
14883 CASE_INT_FN (BUILT_IN_POPCOUNT):
14884 CASE_INT_FN (BUILT_IN_CLZ):
14885 CASE_INT_FN (BUILT_IN_CLRSB):
14886 case BUILT_IN_BSWAP32:
14887 case BUILT_IN_BSWAP64:
14888 /* Always true. */
14889 return true;
14890
14891 CASE_FLT_FN (BUILT_IN_SQRT):
14892 /* sqrt(-0.0) is -0.0. */
14893 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14894 return true;
14895 return tree_expr_nonnegative_warnv_p (arg0,
14896 strict_overflow_p);
14897
14898 CASE_FLT_FN (BUILT_IN_ASINH):
14899 CASE_FLT_FN (BUILT_IN_ATAN):
14900 CASE_FLT_FN (BUILT_IN_ATANH):
14901 CASE_FLT_FN (BUILT_IN_CBRT):
14902 CASE_FLT_FN (BUILT_IN_CEIL):
14903 CASE_FLT_FN (BUILT_IN_ERF):
14904 CASE_FLT_FN (BUILT_IN_EXPM1):
14905 CASE_FLT_FN (BUILT_IN_FLOOR):
14906 CASE_FLT_FN (BUILT_IN_FMOD):
14907 CASE_FLT_FN (BUILT_IN_FREXP):
14908 CASE_FLT_FN (BUILT_IN_ICEIL):
14909 CASE_FLT_FN (BUILT_IN_IFLOOR):
14910 CASE_FLT_FN (BUILT_IN_IRINT):
14911 CASE_FLT_FN (BUILT_IN_IROUND):
14912 CASE_FLT_FN (BUILT_IN_LCEIL):
14913 CASE_FLT_FN (BUILT_IN_LDEXP):
14914 CASE_FLT_FN (BUILT_IN_LFLOOR):
14915 CASE_FLT_FN (BUILT_IN_LLCEIL):
14916 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14917 CASE_FLT_FN (BUILT_IN_LLRINT):
14918 CASE_FLT_FN (BUILT_IN_LLROUND):
14919 CASE_FLT_FN (BUILT_IN_LRINT):
14920 CASE_FLT_FN (BUILT_IN_LROUND):
14921 CASE_FLT_FN (BUILT_IN_MODF):
14922 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14923 CASE_FLT_FN (BUILT_IN_RINT):
14924 CASE_FLT_FN (BUILT_IN_ROUND):
14925 CASE_FLT_FN (BUILT_IN_SCALB):
14926 CASE_FLT_FN (BUILT_IN_SCALBLN):
14927 CASE_FLT_FN (BUILT_IN_SCALBN):
14928 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14929 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14930 CASE_FLT_FN (BUILT_IN_SINH):
14931 CASE_FLT_FN (BUILT_IN_TANH):
14932 CASE_FLT_FN (BUILT_IN_TRUNC):
14933 /* True if the 1st argument is nonnegative. */
14934 return tree_expr_nonnegative_warnv_p (arg0,
14935 strict_overflow_p);
14936
14937 CASE_FLT_FN (BUILT_IN_FMAX):
14938 /* True if the 1st OR 2nd arguments are nonnegative. */
14939 return (tree_expr_nonnegative_warnv_p (arg0,
14940 strict_overflow_p)
14941 || (tree_expr_nonnegative_warnv_p (arg1,
14942 strict_overflow_p)));
14943
14944 CASE_FLT_FN (BUILT_IN_FMIN):
14945 /* True if the 1st AND 2nd arguments are nonnegative. */
14946 return (tree_expr_nonnegative_warnv_p (arg0,
14947 strict_overflow_p)
14948 && (tree_expr_nonnegative_warnv_p (arg1,
14949 strict_overflow_p)));
14950
14951 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14952 /* True if the 2nd argument is nonnegative. */
14953 return tree_expr_nonnegative_warnv_p (arg1,
14954 strict_overflow_p);
14955
14956 CASE_FLT_FN (BUILT_IN_POWI):
14957 /* True if the 1st argument is nonnegative or the second
14958 argument is an even integer. */
14959 if (TREE_CODE (arg1) == INTEGER_CST
14960 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14961 return true;
14962 return tree_expr_nonnegative_warnv_p (arg0,
14963 strict_overflow_p);
14964
14965 CASE_FLT_FN (BUILT_IN_POW):
14966 /* True if the 1st argument is nonnegative or the second
14967 argument is an even integer valued real. */
14968 if (TREE_CODE (arg1) == REAL_CST)
14969 {
14970 REAL_VALUE_TYPE c;
14971 HOST_WIDE_INT n;
14972
14973 c = TREE_REAL_CST (arg1);
14974 n = real_to_integer (&c);
14975 if ((n & 1) == 0)
14976 {
14977 REAL_VALUE_TYPE cint;
14978 real_from_integer (&cint, VOIDmode, n, SIGNED);
14979 if (real_identical (&c, &cint))
14980 return true;
14981 }
14982 }
14983 return tree_expr_nonnegative_warnv_p (arg0,
14984 strict_overflow_p);
14985
14986 default:
14987 break;
14988 }
14989 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14990 type);
14991 }
14992
14993 /* Return true if T is known to be non-negative. If the return
14994 value is based on the assumption that signed overflow is undefined,
14995 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14996 *STRICT_OVERFLOW_P. */
14997
14998 static bool
14999 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15000 {
15001 enum tree_code code = TREE_CODE (t);
15002 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15003 return true;
15004
15005 switch (code)
15006 {
15007 case TARGET_EXPR:
15008 {
15009 tree temp = TARGET_EXPR_SLOT (t);
15010 t = TARGET_EXPR_INITIAL (t);
15011
15012 /* If the initializer is non-void, then it's a normal expression
15013 that will be assigned to the slot. */
15014 if (!VOID_TYPE_P (t))
15015 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15016
15017 /* Otherwise, the initializer sets the slot in some way. One common
15018 way is an assignment statement at the end of the initializer. */
15019 while (1)
15020 {
15021 if (TREE_CODE (t) == BIND_EXPR)
15022 t = expr_last (BIND_EXPR_BODY (t));
15023 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15024 || TREE_CODE (t) == TRY_CATCH_EXPR)
15025 t = expr_last (TREE_OPERAND (t, 0));
15026 else if (TREE_CODE (t) == STATEMENT_LIST)
15027 t = expr_last (t);
15028 else
15029 break;
15030 }
15031 if (TREE_CODE (t) == MODIFY_EXPR
15032 && TREE_OPERAND (t, 0) == temp)
15033 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15034 strict_overflow_p);
15035
15036 return false;
15037 }
15038
15039 case CALL_EXPR:
15040 {
15041 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15042 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15043
15044 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15045 get_callee_fndecl (t),
15046 arg0,
15047 arg1,
15048 strict_overflow_p);
15049 }
15050 case COMPOUND_EXPR:
15051 case MODIFY_EXPR:
15052 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15053 strict_overflow_p);
15054 case BIND_EXPR:
15055 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15056 strict_overflow_p);
15057 case SAVE_EXPR:
15058 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15059 strict_overflow_p);
15060
15061 default:
15062 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15063 TREE_TYPE (t));
15064 }
15065
15066 /* We don't know sign of `t', so be conservative and return false. */
15067 return false;
15068 }
15069
15070 /* Return true if T is known to be non-negative. If the return
15071 value is based on the assumption that signed overflow is undefined,
15072 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15073 *STRICT_OVERFLOW_P. */
15074
15075 bool
15076 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15077 {
15078 enum tree_code code;
15079 if (t == error_mark_node)
15080 return false;
15081
15082 code = TREE_CODE (t);
15083 switch (TREE_CODE_CLASS (code))
15084 {
15085 case tcc_binary:
15086 case tcc_comparison:
15087 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15088 TREE_TYPE (t),
15089 TREE_OPERAND (t, 0),
15090 TREE_OPERAND (t, 1),
15091 strict_overflow_p);
15092
15093 case tcc_unary:
15094 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15095 TREE_TYPE (t),
15096 TREE_OPERAND (t, 0),
15097 strict_overflow_p);
15098
15099 case tcc_constant:
15100 case tcc_declaration:
15101 case tcc_reference:
15102 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15103
15104 default:
15105 break;
15106 }
15107
15108 switch (code)
15109 {
15110 case TRUTH_AND_EXPR:
15111 case TRUTH_OR_EXPR:
15112 case TRUTH_XOR_EXPR:
15113 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15114 TREE_TYPE (t),
15115 TREE_OPERAND (t, 0),
15116 TREE_OPERAND (t, 1),
15117 strict_overflow_p);
15118 case TRUTH_NOT_EXPR:
15119 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15120 TREE_TYPE (t),
15121 TREE_OPERAND (t, 0),
15122 strict_overflow_p);
15123
15124 case COND_EXPR:
15125 case CONSTRUCTOR:
15126 case OBJ_TYPE_REF:
15127 case ASSERT_EXPR:
15128 case ADDR_EXPR:
15129 case WITH_SIZE_EXPR:
15130 case SSA_NAME:
15131 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15132
15133 default:
15134 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15135 }
15136 }
15137
15138 /* Return true if `t' is known to be non-negative. Handle warnings
15139 about undefined signed overflow. */
15140
15141 bool
15142 tree_expr_nonnegative_p (tree t)
15143 {
15144 bool ret, strict_overflow_p;
15145
15146 strict_overflow_p = false;
15147 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15148 if (strict_overflow_p)
15149 fold_overflow_warning (("assuming signed overflow does not occur when "
15150 "determining that expression is always "
15151 "non-negative"),
15152 WARN_STRICT_OVERFLOW_MISC);
15153 return ret;
15154 }
15155
15156
15157 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15158 For floating point we further ensure that T is not denormal.
15159 Similar logic is present in nonzero_address in rtlanal.h.
15160
15161 If the return value is based on the assumption that signed overflow
15162 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15163 change *STRICT_OVERFLOW_P. */
15164
15165 bool
15166 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15167 bool *strict_overflow_p)
15168 {
15169 switch (code)
15170 {
15171 case ABS_EXPR:
15172 return tree_expr_nonzero_warnv_p (op0,
15173 strict_overflow_p);
15174
15175 case NOP_EXPR:
15176 {
15177 tree inner_type = TREE_TYPE (op0);
15178 tree outer_type = type;
15179
15180 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15181 && tree_expr_nonzero_warnv_p (op0,
15182 strict_overflow_p));
15183 }
15184 break;
15185
15186 case NON_LVALUE_EXPR:
15187 return tree_expr_nonzero_warnv_p (op0,
15188 strict_overflow_p);
15189
15190 default:
15191 break;
15192 }
15193
15194 return false;
15195 }
15196
15197 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15198 For floating point we further ensure that T is not denormal.
15199 Similar logic is present in nonzero_address in rtlanal.h.
15200
15201 If the return value is based on the assumption that signed overflow
15202 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15203 change *STRICT_OVERFLOW_P. */
15204
15205 bool
15206 tree_binary_nonzero_warnv_p (enum tree_code code,
15207 tree type,
15208 tree op0,
15209 tree op1, bool *strict_overflow_p)
15210 {
15211 bool sub_strict_overflow_p;
15212 switch (code)
15213 {
15214 case POINTER_PLUS_EXPR:
15215 case PLUS_EXPR:
15216 if (TYPE_OVERFLOW_UNDEFINED (type))
15217 {
15218 /* With the presence of negative values it is hard
15219 to say something. */
15220 sub_strict_overflow_p = false;
15221 if (!tree_expr_nonnegative_warnv_p (op0,
15222 &sub_strict_overflow_p)
15223 || !tree_expr_nonnegative_warnv_p (op1,
15224 &sub_strict_overflow_p))
15225 return false;
15226 /* One of operands must be positive and the other non-negative. */
15227 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15228 overflows, on a twos-complement machine the sum of two
15229 nonnegative numbers can never be zero. */
15230 return (tree_expr_nonzero_warnv_p (op0,
15231 strict_overflow_p)
15232 || tree_expr_nonzero_warnv_p (op1,
15233 strict_overflow_p));
15234 }
15235 break;
15236
15237 case MULT_EXPR:
15238 if (TYPE_OVERFLOW_UNDEFINED (type))
15239 {
15240 if (tree_expr_nonzero_warnv_p (op0,
15241 strict_overflow_p)
15242 && tree_expr_nonzero_warnv_p (op1,
15243 strict_overflow_p))
15244 {
15245 *strict_overflow_p = true;
15246 return true;
15247 }
15248 }
15249 break;
15250
15251 case MIN_EXPR:
15252 sub_strict_overflow_p = false;
15253 if (tree_expr_nonzero_warnv_p (op0,
15254 &sub_strict_overflow_p)
15255 && tree_expr_nonzero_warnv_p (op1,
15256 &sub_strict_overflow_p))
15257 {
15258 if (sub_strict_overflow_p)
15259 *strict_overflow_p = true;
15260 }
15261 break;
15262
15263 case MAX_EXPR:
15264 sub_strict_overflow_p = false;
15265 if (tree_expr_nonzero_warnv_p (op0,
15266 &sub_strict_overflow_p))
15267 {
15268 if (sub_strict_overflow_p)
15269 *strict_overflow_p = true;
15270
15271 /* When both operands are nonzero, then MAX must be too. */
15272 if (tree_expr_nonzero_warnv_p (op1,
15273 strict_overflow_p))
15274 return true;
15275
15276 /* MAX where operand 0 is positive is positive. */
15277 return tree_expr_nonnegative_warnv_p (op0,
15278 strict_overflow_p);
15279 }
15280 /* MAX where operand 1 is positive is positive. */
15281 else if (tree_expr_nonzero_warnv_p (op1,
15282 &sub_strict_overflow_p)
15283 && tree_expr_nonnegative_warnv_p (op1,
15284 &sub_strict_overflow_p))
15285 {
15286 if (sub_strict_overflow_p)
15287 *strict_overflow_p = true;
15288 return true;
15289 }
15290 break;
15291
15292 case BIT_IOR_EXPR:
15293 return (tree_expr_nonzero_warnv_p (op1,
15294 strict_overflow_p)
15295 || tree_expr_nonzero_warnv_p (op0,
15296 strict_overflow_p));
15297
15298 default:
15299 break;
15300 }
15301
15302 return false;
15303 }
15304
15305 /* Return true when T is an address and is known to be nonzero.
15306 For floating point we further ensure that T is not denormal.
15307 Similar logic is present in nonzero_address in rtlanal.h.
15308
15309 If the return value is based on the assumption that signed overflow
15310 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15311 change *STRICT_OVERFLOW_P. */
15312
15313 bool
15314 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15315 {
15316 bool sub_strict_overflow_p;
15317 switch (TREE_CODE (t))
15318 {
15319 case INTEGER_CST:
15320 return !integer_zerop (t);
15321
15322 case ADDR_EXPR:
15323 {
15324 tree base = TREE_OPERAND (t, 0);
15325
15326 if (!DECL_P (base))
15327 base = get_base_address (base);
15328
15329 if (!base)
15330 return false;
15331
15332 /* For objects in symbol table check if we know they are non-zero.
15333 Don't do anything for variables and functions before symtab is built;
15334 it is quite possible that they will be declared weak later. */
15335 if (DECL_P (base) && decl_in_symtab_p (base))
15336 {
15337 struct symtab_node *symbol;
15338
15339 symbol = symtab_node::get_create (base);
15340 if (symbol)
15341 return symbol->nonzero_address ();
15342 else
15343 return false;
15344 }
15345
15346 /* Function local objects are never NULL. */
15347 if (DECL_P (base)
15348 && (DECL_CONTEXT (base)
15349 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15350 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15351 return true;
15352
15353 /* Constants are never weak. */
15354 if (CONSTANT_CLASS_P (base))
15355 return true;
15356
15357 return false;
15358 }
15359
15360 case COND_EXPR:
15361 sub_strict_overflow_p = false;
15362 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15363 &sub_strict_overflow_p)
15364 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15365 &sub_strict_overflow_p))
15366 {
15367 if (sub_strict_overflow_p)
15368 *strict_overflow_p = true;
15369 return true;
15370 }
15371 break;
15372
15373 default:
15374 break;
15375 }
15376 return false;
15377 }
15378
15379 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15380 attempt to fold the expression to a constant without modifying TYPE,
15381 OP0 or OP1.
15382
15383 If the expression could be simplified to a constant, then return
15384 the constant. If the expression would not be simplified to a
15385 constant, then return NULL_TREE. */
15386
15387 tree
15388 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15389 {
15390 tree tem = fold_binary (code, type, op0, op1);
15391 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15392 }
15393
15394 /* Given the components of a unary expression CODE, TYPE and OP0,
15395 attempt to fold the expression to a constant without modifying
15396 TYPE or OP0.
15397
15398 If the expression could be simplified to a constant, then return
15399 the constant. If the expression would not be simplified to a
15400 constant, then return NULL_TREE. */
15401
15402 tree
15403 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15404 {
15405 tree tem = fold_unary (code, type, op0);
15406 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15407 }
15408
15409 /* If EXP represents referencing an element in a constant string
15410 (either via pointer arithmetic or array indexing), return the
15411 tree representing the value accessed, otherwise return NULL. */
15412
15413 tree
15414 fold_read_from_constant_string (tree exp)
15415 {
15416 if ((TREE_CODE (exp) == INDIRECT_REF
15417 || TREE_CODE (exp) == ARRAY_REF)
15418 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15419 {
15420 tree exp1 = TREE_OPERAND (exp, 0);
15421 tree index;
15422 tree string;
15423 location_t loc = EXPR_LOCATION (exp);
15424
15425 if (TREE_CODE (exp) == INDIRECT_REF)
15426 string = string_constant (exp1, &index);
15427 else
15428 {
15429 tree low_bound = array_ref_low_bound (exp);
15430 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15431
15432 /* Optimize the special-case of a zero lower bound.
15433
15434 We convert the low_bound to sizetype to avoid some problems
15435 with constant folding. (E.g. suppose the lower bound is 1,
15436 and its mode is QI. Without the conversion,l (ARRAY
15437 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15438 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15439 if (! integer_zerop (low_bound))
15440 index = size_diffop_loc (loc, index,
15441 fold_convert_loc (loc, sizetype, low_bound));
15442
15443 string = exp1;
15444 }
15445
15446 if (string
15447 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15448 && TREE_CODE (string) == STRING_CST
15449 && TREE_CODE (index) == INTEGER_CST
15450 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15451 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15452 == MODE_INT)
15453 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15454 return build_int_cst_type (TREE_TYPE (exp),
15455 (TREE_STRING_POINTER (string)
15456 [TREE_INT_CST_LOW (index)]));
15457 }
15458 return NULL;
15459 }
15460
15461 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15462 an integer constant, real, or fixed-point constant.
15463
15464 TYPE is the type of the result. */
15465
15466 static tree
15467 fold_negate_const (tree arg0, tree type)
15468 {
15469 tree t = NULL_TREE;
15470
15471 switch (TREE_CODE (arg0))
15472 {
15473 case INTEGER_CST:
15474 {
15475 bool overflow;
15476 wide_int val = wi::neg (arg0, &overflow);
15477 t = force_fit_type (type, val, 1,
15478 (overflow | TREE_OVERFLOW (arg0))
15479 && !TYPE_UNSIGNED (type));
15480 break;
15481 }
15482
15483 case REAL_CST:
15484 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15485 break;
15486
15487 case FIXED_CST:
15488 {
15489 FIXED_VALUE_TYPE f;
15490 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15491 &(TREE_FIXED_CST (arg0)), NULL,
15492 TYPE_SATURATING (type));
15493 t = build_fixed (type, f);
15494 /* Propagate overflow flags. */
15495 if (overflow_p | TREE_OVERFLOW (arg0))
15496 TREE_OVERFLOW (t) = 1;
15497 break;
15498 }
15499
15500 default:
15501 gcc_unreachable ();
15502 }
15503
15504 return t;
15505 }
15506
15507 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15508 an integer constant or real constant.
15509
15510 TYPE is the type of the result. */
15511
15512 tree
15513 fold_abs_const (tree arg0, tree type)
15514 {
15515 tree t = NULL_TREE;
15516
15517 switch (TREE_CODE (arg0))
15518 {
15519 case INTEGER_CST:
15520 {
15521 /* If the value is unsigned or non-negative, then the absolute value
15522 is the same as the ordinary value. */
15523 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15524 t = arg0;
15525
15526 /* If the value is negative, then the absolute value is
15527 its negation. */
15528 else
15529 {
15530 bool overflow;
15531 wide_int val = wi::neg (arg0, &overflow);
15532 t = force_fit_type (type, val, -1,
15533 overflow | TREE_OVERFLOW (arg0));
15534 }
15535 }
15536 break;
15537
15538 case REAL_CST:
15539 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15540 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15541 else
15542 t = arg0;
15543 break;
15544
15545 default:
15546 gcc_unreachable ();
15547 }
15548
15549 return t;
15550 }
15551
15552 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15553 constant. TYPE is the type of the result. */
15554
15555 static tree
15556 fold_not_const (const_tree arg0, tree type)
15557 {
15558 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15559
15560 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15561 }
15562
15563 /* Given CODE, a relational operator, the target type, TYPE and two
15564 constant operands OP0 and OP1, return the result of the
15565 relational operation. If the result is not a compile time
15566 constant, then return NULL_TREE. */
15567
15568 static tree
15569 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15570 {
15571 int result, invert;
15572
15573 /* From here on, the only cases we handle are when the result is
15574 known to be a constant. */
15575
15576 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15577 {
15578 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15579 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15580
15581 /* Handle the cases where either operand is a NaN. */
15582 if (real_isnan (c0) || real_isnan (c1))
15583 {
15584 switch (code)
15585 {
15586 case EQ_EXPR:
15587 case ORDERED_EXPR:
15588 result = 0;
15589 break;
15590
15591 case NE_EXPR:
15592 case UNORDERED_EXPR:
15593 case UNLT_EXPR:
15594 case UNLE_EXPR:
15595 case UNGT_EXPR:
15596 case UNGE_EXPR:
15597 case UNEQ_EXPR:
15598 result = 1;
15599 break;
15600
15601 case LT_EXPR:
15602 case LE_EXPR:
15603 case GT_EXPR:
15604 case GE_EXPR:
15605 case LTGT_EXPR:
15606 if (flag_trapping_math)
15607 return NULL_TREE;
15608 result = 0;
15609 break;
15610
15611 default:
15612 gcc_unreachable ();
15613 }
15614
15615 return constant_boolean_node (result, type);
15616 }
15617
15618 return constant_boolean_node (real_compare (code, c0, c1), type);
15619 }
15620
15621 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15622 {
15623 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15624 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15625 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15626 }
15627
15628 /* Handle equality/inequality of complex constants. */
15629 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15630 {
15631 tree rcond = fold_relational_const (code, type,
15632 TREE_REALPART (op0),
15633 TREE_REALPART (op1));
15634 tree icond = fold_relational_const (code, type,
15635 TREE_IMAGPART (op0),
15636 TREE_IMAGPART (op1));
15637 if (code == EQ_EXPR)
15638 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15639 else if (code == NE_EXPR)
15640 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15641 else
15642 return NULL_TREE;
15643 }
15644
15645 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15646 {
15647 unsigned count = VECTOR_CST_NELTS (op0);
15648 tree *elts = XALLOCAVEC (tree, count);
15649 gcc_assert (VECTOR_CST_NELTS (op1) == count
15650 && TYPE_VECTOR_SUBPARTS (type) == count);
15651
15652 for (unsigned i = 0; i < count; i++)
15653 {
15654 tree elem_type = TREE_TYPE (type);
15655 tree elem0 = VECTOR_CST_ELT (op0, i);
15656 tree elem1 = VECTOR_CST_ELT (op1, i);
15657
15658 tree tem = fold_relational_const (code, elem_type,
15659 elem0, elem1);
15660
15661 if (tem == NULL_TREE)
15662 return NULL_TREE;
15663
15664 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15665 }
15666
15667 return build_vector (type, elts);
15668 }
15669
15670 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15671
15672 To compute GT, swap the arguments and do LT.
15673 To compute GE, do LT and invert the result.
15674 To compute LE, swap the arguments, do LT and invert the result.
15675 To compute NE, do EQ and invert the result.
15676
15677 Therefore, the code below must handle only EQ and LT. */
15678
15679 if (code == LE_EXPR || code == GT_EXPR)
15680 {
15681 tree tem = op0;
15682 op0 = op1;
15683 op1 = tem;
15684 code = swap_tree_comparison (code);
15685 }
15686
15687 /* Note that it is safe to invert for real values here because we
15688 have already handled the one case that it matters. */
15689
15690 invert = 0;
15691 if (code == NE_EXPR || code == GE_EXPR)
15692 {
15693 invert = 1;
15694 code = invert_tree_comparison (code, false);
15695 }
15696
15697 /* Compute a result for LT or EQ if args permit;
15698 Otherwise return T. */
15699 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15700 {
15701 if (code == EQ_EXPR)
15702 result = tree_int_cst_equal (op0, op1);
15703 else
15704 result = tree_int_cst_lt (op0, op1);
15705 }
15706 else
15707 return NULL_TREE;
15708
15709 if (invert)
15710 result ^= 1;
15711 return constant_boolean_node (result, type);
15712 }
15713
15714 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15715 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15716 itself. */
15717
15718 tree
15719 fold_build_cleanup_point_expr (tree type, tree expr)
15720 {
15721 /* If the expression does not have side effects then we don't have to wrap
15722 it with a cleanup point expression. */
15723 if (!TREE_SIDE_EFFECTS (expr))
15724 return expr;
15725
15726 /* If the expression is a return, check to see if the expression inside the
15727 return has no side effects or the right hand side of the modify expression
15728 inside the return. If either don't have side effects set we don't need to
15729 wrap the expression in a cleanup point expression. Note we don't check the
15730 left hand side of the modify because it should always be a return decl. */
15731 if (TREE_CODE (expr) == RETURN_EXPR)
15732 {
15733 tree op = TREE_OPERAND (expr, 0);
15734 if (!op || !TREE_SIDE_EFFECTS (op))
15735 return expr;
15736 op = TREE_OPERAND (op, 1);
15737 if (!TREE_SIDE_EFFECTS (op))
15738 return expr;
15739 }
15740
15741 return build1 (CLEANUP_POINT_EXPR, type, expr);
15742 }
15743
15744 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15745 of an indirection through OP0, or NULL_TREE if no simplification is
15746 possible. */
15747
15748 tree
15749 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15750 {
15751 tree sub = op0;
15752 tree subtype;
15753
15754 STRIP_NOPS (sub);
15755 subtype = TREE_TYPE (sub);
15756 if (!POINTER_TYPE_P (subtype))
15757 return NULL_TREE;
15758
15759 if (TREE_CODE (sub) == ADDR_EXPR)
15760 {
15761 tree op = TREE_OPERAND (sub, 0);
15762 tree optype = TREE_TYPE (op);
15763 /* *&CONST_DECL -> to the value of the const decl. */
15764 if (TREE_CODE (op) == CONST_DECL)
15765 return DECL_INITIAL (op);
15766 /* *&p => p; make sure to handle *&"str"[cst] here. */
15767 if (type == optype)
15768 {
15769 tree fop = fold_read_from_constant_string (op);
15770 if (fop)
15771 return fop;
15772 else
15773 return op;
15774 }
15775 /* *(foo *)&fooarray => fooarray[0] */
15776 else if (TREE_CODE (optype) == ARRAY_TYPE
15777 && type == TREE_TYPE (optype)
15778 && (!in_gimple_form
15779 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15780 {
15781 tree type_domain = TYPE_DOMAIN (optype);
15782 tree min_val = size_zero_node;
15783 if (type_domain && TYPE_MIN_VALUE (type_domain))
15784 min_val = TYPE_MIN_VALUE (type_domain);
15785 if (in_gimple_form
15786 && TREE_CODE (min_val) != INTEGER_CST)
15787 return NULL_TREE;
15788 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15789 NULL_TREE, NULL_TREE);
15790 }
15791 /* *(foo *)&complexfoo => __real__ complexfoo */
15792 else if (TREE_CODE (optype) == COMPLEX_TYPE
15793 && type == TREE_TYPE (optype))
15794 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15795 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15796 else if (TREE_CODE (optype) == VECTOR_TYPE
15797 && type == TREE_TYPE (optype))
15798 {
15799 tree part_width = TYPE_SIZE (type);
15800 tree index = bitsize_int (0);
15801 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15802 }
15803 }
15804
15805 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15806 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15807 {
15808 tree op00 = TREE_OPERAND (sub, 0);
15809 tree op01 = TREE_OPERAND (sub, 1);
15810
15811 STRIP_NOPS (op00);
15812 if (TREE_CODE (op00) == ADDR_EXPR)
15813 {
15814 tree op00type;
15815 op00 = TREE_OPERAND (op00, 0);
15816 op00type = TREE_TYPE (op00);
15817
15818 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15819 if (TREE_CODE (op00type) == VECTOR_TYPE
15820 && type == TREE_TYPE (op00type))
15821 {
15822 HOST_WIDE_INT offset = tree_to_shwi (op01);
15823 tree part_width = TYPE_SIZE (type);
15824 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15825 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15826 tree index = bitsize_int (indexi);
15827
15828 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15829 return fold_build3_loc (loc,
15830 BIT_FIELD_REF, type, op00,
15831 part_width, index);
15832
15833 }
15834 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15835 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15836 && type == TREE_TYPE (op00type))
15837 {
15838 tree size = TYPE_SIZE_UNIT (type);
15839 if (tree_int_cst_equal (size, op01))
15840 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15841 }
15842 /* ((foo *)&fooarray)[1] => fooarray[1] */
15843 else if (TREE_CODE (op00type) == ARRAY_TYPE
15844 && type == TREE_TYPE (op00type))
15845 {
15846 tree type_domain = TYPE_DOMAIN (op00type);
15847 tree min_val = size_zero_node;
15848 if (type_domain && TYPE_MIN_VALUE (type_domain))
15849 min_val = TYPE_MIN_VALUE (type_domain);
15850 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15851 TYPE_SIZE_UNIT (type));
15852 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15853 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15854 NULL_TREE, NULL_TREE);
15855 }
15856 }
15857 }
15858
15859 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15860 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15861 && type == TREE_TYPE (TREE_TYPE (subtype))
15862 && (!in_gimple_form
15863 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15864 {
15865 tree type_domain;
15866 tree min_val = size_zero_node;
15867 sub = build_fold_indirect_ref_loc (loc, sub);
15868 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15869 if (type_domain && TYPE_MIN_VALUE (type_domain))
15870 min_val = TYPE_MIN_VALUE (type_domain);
15871 if (in_gimple_form
15872 && TREE_CODE (min_val) != INTEGER_CST)
15873 return NULL_TREE;
15874 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15875 NULL_TREE);
15876 }
15877
15878 return NULL_TREE;
15879 }
15880
15881 /* Builds an expression for an indirection through T, simplifying some
15882 cases. */
15883
15884 tree
15885 build_fold_indirect_ref_loc (location_t loc, tree t)
15886 {
15887 tree type = TREE_TYPE (TREE_TYPE (t));
15888 tree sub = fold_indirect_ref_1 (loc, type, t);
15889
15890 if (sub)
15891 return sub;
15892
15893 return build1_loc (loc, INDIRECT_REF, type, t);
15894 }
15895
15896 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15897
15898 tree
15899 fold_indirect_ref_loc (location_t loc, tree t)
15900 {
15901 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15902
15903 if (sub)
15904 return sub;
15905 else
15906 return t;
15907 }
15908
15909 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15910 whose result is ignored. The type of the returned tree need not be
15911 the same as the original expression. */
15912
15913 tree
15914 fold_ignored_result (tree t)
15915 {
15916 if (!TREE_SIDE_EFFECTS (t))
15917 return integer_zero_node;
15918
15919 for (;;)
15920 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15921 {
15922 case tcc_unary:
15923 t = TREE_OPERAND (t, 0);
15924 break;
15925
15926 case tcc_binary:
15927 case tcc_comparison:
15928 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15929 t = TREE_OPERAND (t, 0);
15930 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15931 t = TREE_OPERAND (t, 1);
15932 else
15933 return t;
15934 break;
15935
15936 case tcc_expression:
15937 switch (TREE_CODE (t))
15938 {
15939 case COMPOUND_EXPR:
15940 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15941 return t;
15942 t = TREE_OPERAND (t, 0);
15943 break;
15944
15945 case COND_EXPR:
15946 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15947 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15948 return t;
15949 t = TREE_OPERAND (t, 0);
15950 break;
15951
15952 default:
15953 return t;
15954 }
15955 break;
15956
15957 default:
15958 return t;
15959 }
15960 }
15961
15962 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15963
15964 tree
15965 round_up_loc (location_t loc, tree value, unsigned int divisor)
15966 {
15967 tree div = NULL_TREE;
15968
15969 if (divisor == 1)
15970 return value;
15971
15972 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15973 have to do anything. Only do this when we are not given a const,
15974 because in that case, this check is more expensive than just
15975 doing it. */
15976 if (TREE_CODE (value) != INTEGER_CST)
15977 {
15978 div = build_int_cst (TREE_TYPE (value), divisor);
15979
15980 if (multiple_of_p (TREE_TYPE (value), value, div))
15981 return value;
15982 }
15983
15984 /* If divisor is a power of two, simplify this to bit manipulation. */
15985 if (divisor == (divisor & -divisor))
15986 {
15987 if (TREE_CODE (value) == INTEGER_CST)
15988 {
15989 wide_int val = value;
15990 bool overflow_p;
15991
15992 if ((val & (divisor - 1)) == 0)
15993 return value;
15994
15995 overflow_p = TREE_OVERFLOW (value);
15996 val &= ~(divisor - 1);
15997 val += divisor;
15998 if (val == 0)
15999 overflow_p = true;
16000
16001 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16002 }
16003 else
16004 {
16005 tree t;
16006
16007 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16008 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16009 t = build_int_cst (TREE_TYPE (value), -divisor);
16010 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16011 }
16012 }
16013 else
16014 {
16015 if (!div)
16016 div = build_int_cst (TREE_TYPE (value), divisor);
16017 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16018 value = size_binop_loc (loc, MULT_EXPR, value, div);
16019 }
16020
16021 return value;
16022 }
16023
16024 /* Likewise, but round down. */
16025
16026 tree
16027 round_down_loc (location_t loc, tree value, int divisor)
16028 {
16029 tree div = NULL_TREE;
16030
16031 gcc_assert (divisor > 0);
16032 if (divisor == 1)
16033 return value;
16034
16035 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16036 have to do anything. Only do this when we are not given a const,
16037 because in that case, this check is more expensive than just
16038 doing it. */
16039 if (TREE_CODE (value) != INTEGER_CST)
16040 {
16041 div = build_int_cst (TREE_TYPE (value), divisor);
16042
16043 if (multiple_of_p (TREE_TYPE (value), value, div))
16044 return value;
16045 }
16046
16047 /* If divisor is a power of two, simplify this to bit manipulation. */
16048 if (divisor == (divisor & -divisor))
16049 {
16050 tree t;
16051
16052 t = build_int_cst (TREE_TYPE (value), -divisor);
16053 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16054 }
16055 else
16056 {
16057 if (!div)
16058 div = build_int_cst (TREE_TYPE (value), divisor);
16059 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16060 value = size_binop_loc (loc, MULT_EXPR, value, div);
16061 }
16062
16063 return value;
16064 }
16065
16066 /* Returns the pointer to the base of the object addressed by EXP and
16067 extracts the information about the offset of the access, storing it
16068 to PBITPOS and POFFSET. */
16069
16070 static tree
16071 split_address_to_core_and_offset (tree exp,
16072 HOST_WIDE_INT *pbitpos, tree *poffset)
16073 {
16074 tree core;
16075 machine_mode mode;
16076 int unsignedp, volatilep;
16077 HOST_WIDE_INT bitsize;
16078 location_t loc = EXPR_LOCATION (exp);
16079
16080 if (TREE_CODE (exp) == ADDR_EXPR)
16081 {
16082 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16083 poffset, &mode, &unsignedp, &volatilep,
16084 false);
16085 core = build_fold_addr_expr_loc (loc, core);
16086 }
16087 else
16088 {
16089 core = exp;
16090 *pbitpos = 0;
16091 *poffset = NULL_TREE;
16092 }
16093
16094 return core;
16095 }
16096
16097 /* Returns true if addresses of E1 and E2 differ by a constant, false
16098 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16099
16100 bool
16101 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16102 {
16103 tree core1, core2;
16104 HOST_WIDE_INT bitpos1, bitpos2;
16105 tree toffset1, toffset2, tdiff, type;
16106
16107 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16108 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16109
16110 if (bitpos1 % BITS_PER_UNIT != 0
16111 || bitpos2 % BITS_PER_UNIT != 0
16112 || !operand_equal_p (core1, core2, 0))
16113 return false;
16114
16115 if (toffset1 && toffset2)
16116 {
16117 type = TREE_TYPE (toffset1);
16118 if (type != TREE_TYPE (toffset2))
16119 toffset2 = fold_convert (type, toffset2);
16120
16121 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16122 if (!cst_and_fits_in_hwi (tdiff))
16123 return false;
16124
16125 *diff = int_cst_value (tdiff);
16126 }
16127 else if (toffset1 || toffset2)
16128 {
16129 /* If only one of the offsets is non-constant, the difference cannot
16130 be a constant. */
16131 return false;
16132 }
16133 else
16134 *diff = 0;
16135
16136 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16137 return true;
16138 }
16139
16140 /* Simplify the floating point expression EXP when the sign of the
16141 result is not significant. Return NULL_TREE if no simplification
16142 is possible. */
16143
16144 tree
16145 fold_strip_sign_ops (tree exp)
16146 {
16147 tree arg0, arg1;
16148 location_t loc = EXPR_LOCATION (exp);
16149
16150 switch (TREE_CODE (exp))
16151 {
16152 case ABS_EXPR:
16153 case NEGATE_EXPR:
16154 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16155 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16156
16157 case MULT_EXPR:
16158 case RDIV_EXPR:
16159 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16160 return NULL_TREE;
16161 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16162 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16163 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16164 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16165 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16166 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16167 break;
16168
16169 case COMPOUND_EXPR:
16170 arg0 = TREE_OPERAND (exp, 0);
16171 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16172 if (arg1)
16173 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16174 break;
16175
16176 case COND_EXPR:
16177 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16178 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16179 if (arg0 || arg1)
16180 return fold_build3_loc (loc,
16181 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16182 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16183 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16184 break;
16185
16186 case CALL_EXPR:
16187 {
16188 const enum built_in_function fcode = builtin_mathfn_code (exp);
16189 switch (fcode)
16190 {
16191 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16192 /* Strip copysign function call, return the 1st argument. */
16193 arg0 = CALL_EXPR_ARG (exp, 0);
16194 arg1 = CALL_EXPR_ARG (exp, 1);
16195 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16196
16197 default:
16198 /* Strip sign ops from the argument of "odd" math functions. */
16199 if (negate_mathfn_p (fcode))
16200 {
16201 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16202 if (arg0)
16203 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16204 }
16205 break;
16206 }
16207 }
16208 break;
16209
16210 default:
16211 break;
16212 }
16213 return NULL_TREE;
16214 }