fold-const.c (fold_binary_loc): Don't fold if the result is undefined.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
125 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
126 static tree make_bit_field_ref (location_t, tree, tree,
127 HOST_WIDE_INT, HOST_WIDE_INT, int);
128 static tree optimize_bit_field_compare (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
131 HOST_WIDE_INT *,
132 machine_mode *, int *, int *,
133 tree *, tree *);
134 static tree sign_bit_p (tree, const_tree);
135 static int simple_operand_p (const_tree);
136 static bool simple_operand_p_2 (tree);
137 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
138 static tree range_predecessor (tree);
139 static tree range_successor (tree);
140 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
142 static tree unextend (tree, int, int, tree);
143 static tree optimize_minmax_comparison (location_t, enum tree_code,
144 tree, tree, tree);
145 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
146 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
147 static tree fold_binary_op_with_conditional_arg (location_t,
148 enum tree_code, tree,
149 tree, tree,
150 tree, tree, int);
151 static tree fold_mathfn_compare (location_t,
152 enum built_in_function, enum tree_code,
153 tree, tree, tree);
154 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
155 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
156 static bool reorder_operands_p (const_tree, const_tree);
157 static tree fold_negate_const (tree, tree);
158 static tree fold_not_const (const_tree, tree);
159 static tree fold_relational_const (enum tree_code, tree, tree, tree);
160 static tree fold_convert_const (enum tree_code, tree, tree);
161
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
164
165 static location_t
166 expr_location_or (tree t, location_t loc)
167 {
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
170 }
171
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
174
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
177 {
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
183 {
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
186 }
187 return x;
188 }
189 \f
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
193
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 {
197 widest_int quo;
198
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202
203 return NULL_TREE;
204 }
205 \f
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
214
215 static int fold_deferring_overflow_warnings;
216
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
221
222 static const char* fold_deferred_overflow_warning;
223
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
226
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
231
232 void
233 fold_defer_overflow_warnings (void)
234 {
235 ++fold_deferring_overflow_warnings;
236 }
237
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
246
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 {
250 const char *warnmsg;
251 location_t locus;
252
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
256 {
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
262 }
263
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
266
267 if (!issue || warnmsg == NULL)
268 return;
269
270 if (gimple_no_warning_p (stmt))
271 return;
272
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
277
278 if (!issue_strict_overflow_warning (code))
279 return;
280
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
286 }
287
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
290
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
293 {
294 fold_undefer_overflow_warnings (false, NULL, 0);
295 }
296
297 /* Whether we are deferring overflow warnings. */
298
299 bool
300 fold_deferring_overflow_warnings_p (void)
301 {
302 return fold_deferring_overflow_warnings > 0;
303 }
304
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
307
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 {
311 if (fold_deferring_overflow_warnings > 0)
312 {
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
315 {
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
318 }
319 }
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
322 }
323 \f
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
326
327 static bool
328 negate_mathfn_p (enum built_in_function code)
329 {
330 switch (code)
331 {
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
356
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
362
363 default:
364 break;
365 }
366 return false;
367 }
368
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
371
372 bool
373 may_negate_without_overflow_p (const_tree t)
374 {
375 tree type;
376
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
382
383 return !wi::only_sign_bit_p (t);
384 }
385
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
388
389 static bool
390 negate_expr_p (tree t)
391 {
392 tree type;
393
394 if (t == 0)
395 return false;
396
397 type = TREE_TYPE (t);
398
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
401 {
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
405
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
411
412 case FIXED_CST:
413 case NEGATE_EXPR:
414 return true;
415
416 case REAL_CST:
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420
421 case COMPLEX_CST:
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
424
425 case VECTOR_CST:
426 {
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 return true;
429
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
431
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
434 return false;
435
436 return true;
437 }
438
439 case COMPLEX_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
442
443 case CONJ_EXPR:
444 return negate_expr_p (TREE_OPERAND (t, 0));
445
446 case PLUS_EXPR:
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
457
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
464
465 case MULT_EXPR:
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 break;
468
469 /* Fall through. */
470
471 case RDIV_EXPR:
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
475 break;
476
477 case TRUNC_DIV_EXPR:
478 case ROUND_DIV_EXPR:
479 case EXACT_DIV_EXPR:
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
484 overflow. */
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 {
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
498 return true;
499 }
500 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return true;
502 return negate_expr_p (TREE_OPERAND (t, 1));
503
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
507 {
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
511 }
512 break;
513
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
519
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 {
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
526 return true;
527 }
528 break;
529
530 default:
531 break;
532 }
533 return false;
534 }
535
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
540
541 static tree
542 fold_negate_expr (location_t loc, tree t)
543 {
544 tree type = TREE_TYPE (t);
545 tree tem;
546
547 switch (TREE_CODE (t))
548 {
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
555
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || !TYPE_OVERFLOW_TRAPS (type))
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 /* Two's complement FP formats, such as c4x, may overflow. */
566 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 return tem;
568 break;
569
570 case FIXED_CST:
571 tem = fold_negate_const (t, type);
572 return tem;
573
574 case COMPLEX_CST:
575 {
576 tree rpart = negate_expr (TREE_REALPART (t));
577 tree ipart = negate_expr (TREE_IMAGPART (t));
578
579 if ((TREE_CODE (rpart) == REAL_CST
580 && TREE_CODE (ipart) == REAL_CST)
581 || (TREE_CODE (rpart) == INTEGER_CST
582 && TREE_CODE (ipart) == INTEGER_CST))
583 return build_complex (type, rpart, ipart);
584 }
585 break;
586
587 case VECTOR_CST:
588 {
589 int count = TYPE_VECTOR_SUBPARTS (type), i;
590 tree *elts = XALLOCAVEC (tree, count);
591
592 for (i = 0; i < count; i++)
593 {
594 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
595 if (elts[i] == NULL_TREE)
596 return NULL_TREE;
597 }
598
599 return build_vector (type, elts);
600 }
601
602 case COMPLEX_EXPR:
603 if (negate_expr_p (t))
604 return fold_build2_loc (loc, COMPLEX_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
606 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
607 break;
608
609 case CONJ_EXPR:
610 if (negate_expr_p (t))
611 return fold_build1_loc (loc, CONJ_EXPR, type,
612 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
613 break;
614
615 case NEGATE_EXPR:
616 return TREE_OPERAND (t, 0);
617
618 case PLUS_EXPR:
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 {
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t, 1))
624 && reorder_operands_p (TREE_OPERAND (t, 0),
625 TREE_OPERAND (t, 1)))
626 {
627 tem = negate_expr (TREE_OPERAND (t, 1));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 0));
630 }
631
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 {
635 tem = negate_expr (TREE_OPERAND (t, 0));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 1));
638 }
639 }
640 break;
641
642 case MINUS_EXPR:
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
646 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
649 break;
650
651 case MULT_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
654
655 /* Fall through. */
656
657 case RDIV_EXPR:
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 {
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
664 tem = TREE_OPERAND (t, 0);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (tem), TREE_OPERAND (t, 1));
668 }
669 break;
670
671 case TRUNC_DIV_EXPR:
672 case ROUND_DIV_EXPR:
673 case EXACT_DIV_EXPR:
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
678 overflow. */
679 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 {
681 const char * const warnmsg = G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem = TREE_OPERAND (t, 1);
684 if (negate_expr_p (tem))
685 {
686 if (INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) != INTEGER_CST
688 || integer_onep (tem)))
689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 TREE_OPERAND (t, 0), negate_expr (tem));
692 }
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem = TREE_OPERAND (t, 0);
700 if ((INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) == NEGATE_EXPR
702 || (TREE_CODE (tem) == INTEGER_CST
703 && may_negate_without_overflow_p (tem))))
704 || !INTEGRAL_TYPE_P (type))
705 return fold_build2_loc (loc, TREE_CODE (t), type,
706 negate_expr (tem), TREE_OPERAND (t, 1));
707 }
708 break;
709
710 case NOP_EXPR:
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type) == REAL_TYPE)
713 {
714 tem = strip_float_extensions (t);
715 if (tem != t && negate_expr_p (tem))
716 return fold_convert_loc (loc, type, negate_expr (tem));
717 }
718 break;
719
720 case CALL_EXPR:
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t))
723 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 {
725 tree fndecl, arg;
726
727 fndecl = get_callee_fndecl (t);
728 arg = negate_expr (CALL_EXPR_ARG (t, 0));
729 return build_call_expr_loc (loc, fndecl, 1, arg);
730 }
731 break;
732
733 case RSHIFT_EXPR:
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 {
737 tree op1 = TREE_OPERAND (t, 1);
738 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 {
740 tree ntype = TYPE_UNSIGNED (type)
741 ? signed_type_for (type)
742 : unsigned_type_for (type);
743 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
744 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
745 return fold_convert_loc (loc, type, temp);
746 }
747 }
748 break;
749
750 default:
751 break;
752 }
753
754 return NULL_TREE;
755 }
756
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
759 return NULL_TREE. */
760
761 static tree
762 negate_expr (tree t)
763 {
764 tree type, tem;
765 location_t loc;
766
767 if (t == NULL_TREE)
768 return NULL_TREE;
769
770 loc = EXPR_LOCATION (t);
771 type = TREE_TYPE (t);
772 STRIP_SIGN_NOPS (t);
773
774 tem = fold_negate_expr (loc, t);
775 if (!tem)
776 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
777 return fold_convert_loc (loc, type, tem);
778 }
779 \f
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
787
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
791
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
794
795 If IN is itself a literal or constant, return it as appropriate.
796
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
799
800 static tree
801 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
802 tree *minus_litp, int negate_p)
803 {
804 tree var = 0;
805
806 *conp = 0;
807 *litp = 0;
808 *minus_litp = 0;
809
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in);
812
813 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
814 || TREE_CODE (in) == FIXED_CST)
815 *litp = in;
816 else if (TREE_CODE (in) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 {
826 tree op0 = TREE_OPERAND (in, 0);
827 tree op1 = TREE_OPERAND (in, 1);
828 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
829 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
833 || TREE_CODE (op0) == FIXED_CST)
834 *litp = op0, op0 = 0;
835 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
836 || TREE_CODE (op1) == FIXED_CST)
837 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838
839 if (op0 != 0 && TREE_CONSTANT (op0))
840 *conp = op0, op0 = 0;
841 else if (op1 != 0 && TREE_CONSTANT (op1))
842 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0 != 0 && op1 != 0)
847 var = in;
848 else if (op0 != 0)
849 var = op0;
850 else
851 var = op1, neg_var_p = neg1_p;
852
853 /* Now do any needed negations. */
854 if (neg_litp_p)
855 *minus_litp = *litp, *litp = 0;
856 if (neg_conp_p)
857 *conp = negate_expr (*conp);
858 if (neg_var_p)
859 var = negate_expr (var);
860 }
861 else if (TREE_CODE (in) == BIT_NOT_EXPR
862 && code == PLUS_EXPR)
863 {
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp = build_one_cst (TREE_TYPE (in));
866 var = negate_expr (TREE_OPERAND (in, 0));
867 }
868 else if (TREE_CONSTANT (in))
869 *conp = in;
870 else
871 var = in;
872
873 if (negate_p)
874 {
875 if (*litp)
876 *minus_litp = *litp, *litp = 0;
877 else if (*minus_litp)
878 *litp = *minus_litp, *minus_litp = 0;
879 *conp = negate_expr (*conp);
880 var = negate_expr (var);
881 }
882
883 return var;
884 }
885
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
890
891 static tree
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 {
894 if (t1 == 0)
895 return t2;
896 else if (t2 == 0)
897 return t1;
898
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 {
905 if (code == PLUS_EXPR)
906 {
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
919 }
920 else if (code == MINUS_EXPR)
921 {
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
924 }
925
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
928 }
929
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
932 }
933 \f
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
936
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 {
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
944
945 switch (code)
946 {
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
952
953 default:
954 break;
955 }
956
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
960 }
961
962
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
966
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
970 {
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
976
977 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
978 TYPE_SIGN (TREE_TYPE (parg2)));
979
980 switch (code)
981 {
982 case BIT_IOR_EXPR:
983 res = wi::bit_or (arg1, arg2);
984 break;
985
986 case BIT_XOR_EXPR:
987 res = wi::bit_xor (arg1, arg2);
988 break;
989
990 case BIT_AND_EXPR:
991 res = wi::bit_and (arg1, arg2);
992 break;
993
994 case RSHIFT_EXPR:
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
997 {
998 arg2 = -arg2;
999 if (code == RSHIFT_EXPR)
1000 code = LSHIFT_EXPR;
1001 else
1002 code = RSHIFT_EXPR;
1003 }
1004
1005 if (code == RSHIFT_EXPR)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = wi::rshift (arg1, arg2, sign);
1010 else
1011 res = wi::lshift (arg1, arg2);
1012 break;
1013
1014 case RROTATE_EXPR:
1015 case LROTATE_EXPR:
1016 if (wi::neg_p (arg2))
1017 {
1018 arg2 = -arg2;
1019 if (code == RROTATE_EXPR)
1020 code = LROTATE_EXPR;
1021 else
1022 code = RROTATE_EXPR;
1023 }
1024
1025 if (code == RROTATE_EXPR)
1026 res = wi::rrotate (arg1, arg2);
1027 else
1028 res = wi::lrotate (arg1, arg2);
1029 break;
1030
1031 case PLUS_EXPR:
1032 res = wi::add (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case MINUS_EXPR:
1036 res = wi::sub (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case MULT_EXPR:
1040 res = wi::mul (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case MULT_HIGHPART_EXPR:
1044 res = wi::mul_high (arg1, arg2, sign);
1045 break;
1046
1047 case TRUNC_DIV_EXPR:
1048 case EXACT_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1052 break;
1053
1054 case FLOOR_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_floor (arg1, arg2, sign, &overflow);
1058 break;
1059
1060 case CEIL_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1064 break;
1065
1066 case ROUND_DIV_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::div_round (arg1, arg2, sign, &overflow);
1070 break;
1071
1072 case TRUNC_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1076 break;
1077
1078 case FLOOR_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1082 break;
1083
1084 case CEIL_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1088 break;
1089
1090 case ROUND_MOD_EXPR:
1091 if (arg2 == 0)
1092 return NULL_TREE;
1093 res = wi::mod_round (arg1, arg2, sign, &overflow);
1094 break;
1095
1096 case MIN_EXPR:
1097 res = wi::min (arg1, arg2, sign);
1098 break;
1099
1100 case MAX_EXPR:
1101 res = wi::max (arg1, arg2, sign);
1102 break;
1103
1104 default:
1105 return NULL_TREE;
1106 }
1107
1108 t = force_fit_type (type, res, overflowable,
1109 (((sign == SIGNED || overflowable == -1)
1110 && overflow)
1111 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112
1113 return t;
1114 }
1115
1116 tree
1117 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 {
1119 return int_const_binop_1 (code, arg1, arg2, 1);
1120 }
1121
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1126
1127 static tree
1128 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 {
1130 /* Sanity check for the recursive cases. */
1131 if (!arg1 || !arg2)
1132 return NULL_TREE;
1133
1134 STRIP_NOPS (arg1);
1135 STRIP_NOPS (arg2);
1136
1137 if (TREE_CODE (arg1) == INTEGER_CST)
1138 return int_const_binop (code, arg1, arg2);
1139
1140 if (TREE_CODE (arg1) == REAL_CST)
1141 {
1142 machine_mode mode;
1143 REAL_VALUE_TYPE d1;
1144 REAL_VALUE_TYPE d2;
1145 REAL_VALUE_TYPE value;
1146 REAL_VALUE_TYPE result;
1147 bool inexact;
1148 tree t, type;
1149
1150 /* The following codes are handled by real_arithmetic. */
1151 switch (code)
1152 {
1153 case PLUS_EXPR:
1154 case MINUS_EXPR:
1155 case MULT_EXPR:
1156 case RDIV_EXPR:
1157 case MIN_EXPR:
1158 case MAX_EXPR:
1159 break;
1160
1161 default:
1162 return NULL_TREE;
1163 }
1164
1165 d1 = TREE_REAL_CST (arg1);
1166 d2 = TREE_REAL_CST (arg2);
1167
1168 type = TREE_TYPE (arg1);
1169 mode = TYPE_MODE (type);
1170
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode)
1174 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1175 return NULL_TREE;
1176
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code == RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2, dconst0)
1181 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1182 return NULL_TREE;
1183
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1187 return arg1;
1188 else if (REAL_VALUE_ISNAN (d2))
1189 return arg2;
1190
1191 inexact = real_arithmetic (&value, code, &d1, &d2);
1192 real_convert (&result, mode, &value);
1193
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode)
1198 && REAL_VALUE_ISINF (result)
1199 && !REAL_VALUE_ISINF (d1)
1200 && !REAL_VALUE_ISINF (d2))
1201 return NULL_TREE;
1202
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1209 && (inexact || !real_identical (&result, &value)))
1210 return NULL_TREE;
1211
1212 t = build_real (type, result);
1213
1214 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1215 return t;
1216 }
1217
1218 if (TREE_CODE (arg1) == FIXED_CST)
1219 {
1220 FIXED_VALUE_TYPE f1;
1221 FIXED_VALUE_TYPE f2;
1222 FIXED_VALUE_TYPE result;
1223 tree t, type;
1224 int sat_p;
1225 bool overflow_p;
1226
1227 /* The following codes are handled by fixed_arithmetic. */
1228 switch (code)
1229 {
1230 case PLUS_EXPR:
1231 case MINUS_EXPR:
1232 case MULT_EXPR:
1233 case TRUNC_DIV_EXPR:
1234 f2 = TREE_FIXED_CST (arg2);
1235 break;
1236
1237 case LSHIFT_EXPR:
1238 case RSHIFT_EXPR:
1239 {
1240 wide_int w2 = arg2;
1241 f2.data.high = w2.elt (1);
1242 f2.data.low = w2.elt (0);
1243 f2.mode = SImode;
1244 }
1245 break;
1246
1247 default:
1248 return NULL_TREE;
1249 }
1250
1251 f1 = TREE_FIXED_CST (arg1);
1252 type = TREE_TYPE (arg1);
1253 sat_p = TYPE_SATURATING (type);
1254 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1255 t = build_fixed (type, result);
1256 /* Propagate overflow flags. */
1257 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1258 TREE_OVERFLOW (t) = 1;
1259 return t;
1260 }
1261
1262 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 {
1264 tree type = TREE_TYPE (arg1);
1265 tree r1 = TREE_REALPART (arg1);
1266 tree i1 = TREE_IMAGPART (arg1);
1267 tree r2 = TREE_REALPART (arg2);
1268 tree i2 = TREE_IMAGPART (arg2);
1269 tree real, imag;
1270
1271 switch (code)
1272 {
1273 case PLUS_EXPR:
1274 case MINUS_EXPR:
1275 real = const_binop (code, r1, r2);
1276 imag = const_binop (code, i1, i2);
1277 break;
1278
1279 case MULT_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_mul);
1284
1285 real = const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, r1, r2),
1287 const_binop (MULT_EXPR, i1, i2));
1288 imag = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, i2),
1290 const_binop (MULT_EXPR, i1, r2));
1291 break;
1292
1293 case RDIV_EXPR:
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1297 mpc_div);
1298 /* Fallthru ... */
1299 case TRUNC_DIV_EXPR:
1300 case CEIL_DIV_EXPR:
1301 case FLOOR_DIV_EXPR:
1302 case ROUND_DIV_EXPR:
1303 if (flag_complex_method == 0)
1304 {
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1307
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1310 t = br*br + bi*bi
1311 */
1312 tree magsquared
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r2, r2),
1315 const_binop (MULT_EXPR, i2, i2));
1316 tree t1
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, r2),
1319 const_binop (MULT_EXPR, i1, i2));
1320 tree t2
1321 = const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2),
1323 const_binop (MULT_EXPR, r1, i2));
1324
1325 real = const_binop (code, t1, magsquared);
1326 imag = const_binop (code, t2, magsquared);
1327 }
1328 else
1329 {
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1332
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1336 fold_abs_const (r2, TREE_TYPE (type)),
1337 fold_abs_const (i2, TREE_TYPE (type)));
1338
1339 if (integer_nonzerop (compare))
1340 {
1341 /* In the TRUE branch, we compute
1342 ratio = br/bi;
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1346 tr = tr / div;
1347 ti = ti / div; */
1348 tree ratio = const_binop (code, r2, i2);
1349 tree div = const_binop (PLUS_EXPR, i2,
1350 const_binop (MULT_EXPR, r2, ratio));
1351 real = const_binop (MULT_EXPR, r1, ratio);
1352 real = const_binop (PLUS_EXPR, real, i1);
1353 real = const_binop (code, real, div);
1354
1355 imag = const_binop (MULT_EXPR, i1, ratio);
1356 imag = const_binop (MINUS_EXPR, imag, r1);
1357 imag = const_binop (code, imag, div);
1358 }
1359 else
1360 {
1361 /* In the FALSE branch, we compute
1362 ratio = d/c;
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1366 tr = tr / div;
1367 ti = ti / div; */
1368 tree ratio = const_binop (code, i2, r2);
1369 tree div = const_binop (PLUS_EXPR, r2,
1370 const_binop (MULT_EXPR, i2, ratio));
1371
1372 real = const_binop (MULT_EXPR, i1, ratio);
1373 real = const_binop (PLUS_EXPR, real, r1);
1374 real = const_binop (code, real, div);
1375
1376 imag = const_binop (MULT_EXPR, r1, ratio);
1377 imag = const_binop (MINUS_EXPR, i1, imag);
1378 imag = const_binop (code, imag, div);
1379 }
1380 }
1381 break;
1382
1383 default:
1384 return NULL_TREE;
1385 }
1386
1387 if (real && imag)
1388 return build_complex (type, real, imag);
1389 }
1390
1391 if (TREE_CODE (arg1) == VECTOR_CST
1392 && TREE_CODE (arg2) == VECTOR_CST)
1393 {
1394 tree type = TREE_TYPE (arg1);
1395 int count = TYPE_VECTOR_SUBPARTS (type), i;
1396 tree *elts = XALLOCAVEC (tree, count);
1397
1398 for (i = 0; i < count; i++)
1399 {
1400 tree elem1 = VECTOR_CST_ELT (arg1, i);
1401 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402
1403 elts[i] = const_binop (code, elem1, elem2);
1404
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts[i] == NULL_TREE)
1408 return NULL_TREE;
1409 }
1410
1411 return build_vector (type, elts);
1412 }
1413
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == INTEGER_CST)
1417 {
1418 tree type = TREE_TYPE (arg1);
1419 int count = TYPE_VECTOR_SUBPARTS (type), i;
1420 tree *elts = XALLOCAVEC (tree, count);
1421
1422 if (code == VEC_RSHIFT_EXPR)
1423 {
1424 if (!tree_fits_uhwi_p (arg2))
1425 return NULL_TREE;
1426
1427 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1428 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1429 unsigned HOST_WIDE_INT innerc
1430 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1431 if (shiftc >= outerc || (shiftc % innerc) != 0)
1432 return NULL_TREE;
1433 int offset = shiftc / innerc;
1434 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1435 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1436 vector element, but last element if BYTES_BIG_ENDIAN. */
1437 if (BYTES_BIG_ENDIAN)
1438 offset = -offset;
1439 tree zero = build_zero_cst (TREE_TYPE (type));
1440 for (i = 0; i < count; i++)
1441 {
1442 if (i + offset < 0 || i + offset >= count)
1443 elts[i] = zero;
1444 else
1445 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1446 }
1447 }
1448 else
1449 for (i = 0; i < count; i++)
1450 {
1451 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452
1453 elts[i] = const_binop (code, elem1, arg2);
1454
1455 /* It is possible that const_binop cannot handle the given
1456 code and return NULL_TREE */
1457 if (elts[i] == NULL_TREE)
1458 return NULL_TREE;
1459 }
1460
1461 return build_vector (type, elts);
1462 }
1463 return NULL_TREE;
1464 }
1465
1466 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1467 indicates which particular sizetype to create. */
1468
1469 tree
1470 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1471 {
1472 return build_int_cst (sizetype_tab[(int) kind], number);
1473 }
1474 \f
1475 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1476 is a tree code. The type of the result is taken from the operands.
1477 Both must be equivalent integer types, ala int_binop_types_match_p.
1478 If the operands are constant, so is the result. */
1479
1480 tree
1481 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1482 {
1483 tree type = TREE_TYPE (arg0);
1484
1485 if (arg0 == error_mark_node || arg1 == error_mark_node)
1486 return error_mark_node;
1487
1488 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1489 TREE_TYPE (arg1)));
1490
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 {
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR)
1496 {
1497 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1498 return arg1;
1499 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1500 return arg0;
1501 }
1502 else if (code == MINUS_EXPR)
1503 {
1504 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1505 return arg0;
1506 }
1507 else if (code == MULT_EXPR)
1508 {
1509 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1510 return arg1;
1511 }
1512
1513 /* Handle general case of two integer constants. For sizetype
1514 constant calculations we always want to know about overflow,
1515 even in the unsigned case. */
1516 return int_const_binop_1 (code, arg0, arg1, -1);
1517 }
1518
1519 return fold_build2_loc (loc, code, type, arg0, arg1);
1520 }
1521
1522 /* Given two values, either both of sizetype or both of bitsizetype,
1523 compute the difference between the two values. Return the value
1524 in signed type corresponding to the type of the operands. */
1525
1526 tree
1527 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1528 {
1529 tree type = TREE_TYPE (arg0);
1530 tree ctype;
1531
1532 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1533 TREE_TYPE (arg1)));
1534
1535 /* If the type is already signed, just do the simple thing. */
1536 if (!TYPE_UNSIGNED (type))
1537 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1538
1539 if (type == sizetype)
1540 ctype = ssizetype;
1541 else if (type == bitsizetype)
1542 ctype = sbitsizetype;
1543 else
1544 ctype = signed_type_for (type);
1545
1546 /* If either operand is not a constant, do the conversions to the signed
1547 type and subtract. The hardware will do the right thing with any
1548 overflow in the subtraction. */
1549 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1550 return size_binop_loc (loc, MINUS_EXPR,
1551 fold_convert_loc (loc, ctype, arg0),
1552 fold_convert_loc (loc, ctype, arg1));
1553
1554 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1555 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1556 overflow) and negate (which can't either). Special-case a result
1557 of zero while we're here. */
1558 if (tree_int_cst_equal (arg0, arg1))
1559 return build_int_cst (ctype, 0);
1560 else if (tree_int_cst_lt (arg1, arg0))
1561 return fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1563 else
1564 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1565 fold_convert_loc (loc, ctype,
1566 size_binop_loc (loc,
1567 MINUS_EXPR,
1568 arg1, arg0)));
1569 }
1570 \f
1571 /* A subroutine of fold_convert_const handling conversions of an
1572 INTEGER_CST to another integer type. */
1573
1574 static tree
1575 fold_convert_const_int_from_int (tree type, const_tree arg1)
1576 {
1577 /* Given an integer constant, make new constant with new type,
1578 appropriately sign-extended or truncated. Use widest_int
1579 so that any extension is done according ARG1's type. */
1580 return force_fit_type (type, wi::to_widest (arg1),
1581 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1582 TREE_OVERFLOW (arg1));
1583 }
1584
1585 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1586 to an integer type. */
1587
1588 static tree
1589 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1590 {
1591 bool overflow = false;
1592 tree t;
1593
1594 /* The following code implements the floating point to integer
1595 conversion rules required by the Java Language Specification,
1596 that IEEE NaNs are mapped to zero and values that overflow
1597 the target precision saturate, i.e. values greater than
1598 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1599 are mapped to INT_MIN. These semantics are allowed by the
1600 C and C++ standards that simply state that the behavior of
1601 FP-to-integer conversion is unspecified upon overflow. */
1602
1603 wide_int val;
1604 REAL_VALUE_TYPE r;
1605 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1606
1607 switch (code)
1608 {
1609 case FIX_TRUNC_EXPR:
1610 real_trunc (&r, VOIDmode, &x);
1611 break;
1612
1613 default:
1614 gcc_unreachable ();
1615 }
1616
1617 /* If R is NaN, return zero and show we have an overflow. */
1618 if (REAL_VALUE_ISNAN (r))
1619 {
1620 overflow = true;
1621 val = wi::zero (TYPE_PRECISION (type));
1622 }
1623
1624 /* See if R is less than the lower bound or greater than the
1625 upper bound. */
1626
1627 if (! overflow)
1628 {
1629 tree lt = TYPE_MIN_VALUE (type);
1630 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1631 if (REAL_VALUES_LESS (r, l))
1632 {
1633 overflow = true;
1634 val = lt;
1635 }
1636 }
1637
1638 if (! overflow)
1639 {
1640 tree ut = TYPE_MAX_VALUE (type);
1641 if (ut)
1642 {
1643 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1644 if (REAL_VALUES_LESS (u, r))
1645 {
1646 overflow = true;
1647 val = ut;
1648 }
1649 }
1650 }
1651
1652 if (! overflow)
1653 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1654
1655 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1656 return t;
1657 }
1658
1659 /* A subroutine of fold_convert_const handling conversions of a
1660 FIXED_CST to an integer type. */
1661
1662 static tree
1663 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1664 {
1665 tree t;
1666 double_int temp, temp_trunc;
1667 unsigned int mode;
1668
1669 /* Right shift FIXED_CST to temp by fbit. */
1670 temp = TREE_FIXED_CST (arg1).data;
1671 mode = TREE_FIXED_CST (arg1).mode;
1672 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1673 {
1674 temp = temp.rshift (GET_MODE_FBIT (mode),
1675 HOST_BITS_PER_DOUBLE_INT,
1676 SIGNED_FIXED_POINT_MODE_P (mode));
1677
1678 /* Left shift temp to temp_trunc by fbit. */
1679 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1680 HOST_BITS_PER_DOUBLE_INT,
1681 SIGNED_FIXED_POINT_MODE_P (mode));
1682 }
1683 else
1684 {
1685 temp = double_int_zero;
1686 temp_trunc = double_int_zero;
1687 }
1688
1689 /* If FIXED_CST is negative, we need to round the value toward 0.
1690 By checking if the fractional bits are not zero to add 1 to temp. */
1691 if (SIGNED_FIXED_POINT_MODE_P (mode)
1692 && temp_trunc.is_negative ()
1693 && TREE_FIXED_CST (arg1).data != temp_trunc)
1694 temp += double_int_one;
1695
1696 /* Given a fixed-point constant, make new constant with new type,
1697 appropriately sign-extended or truncated. */
1698 t = force_fit_type (type, temp, -1,
1699 (temp.is_negative ()
1700 && (TYPE_UNSIGNED (type)
1701 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1702 | TREE_OVERFLOW (arg1));
1703
1704 return t;
1705 }
1706
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to another floating point type. */
1709
1710 static tree
1711 fold_convert_const_real_from_real (tree type, const_tree arg1)
1712 {
1713 REAL_VALUE_TYPE value;
1714 tree t;
1715
1716 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1717 t = build_real (type, value);
1718
1719 /* If converting an infinity or NAN to a representation that doesn't
1720 have one, set the overflow bit so that we can produce some kind of
1721 error message at the appropriate point if necessary. It's not the
1722 most user-friendly message, but it's better than nothing. */
1723 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1724 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1725 TREE_OVERFLOW (t) = 1;
1726 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1727 && !MODE_HAS_NANS (TYPE_MODE (type)))
1728 TREE_OVERFLOW (t) = 1;
1729 /* Regular overflow, conversion produced an infinity in a mode that
1730 can't represent them. */
1731 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1732 && REAL_VALUE_ISINF (value)
1733 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1734 TREE_OVERFLOW (t) = 1;
1735 else
1736 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1737 return t;
1738 }
1739
1740 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1741 to a floating point type. */
1742
1743 static tree
1744 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1745 {
1746 REAL_VALUE_TYPE value;
1747 tree t;
1748
1749 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1750 t = build_real (type, value);
1751
1752 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1753 return t;
1754 }
1755
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to another fixed-point type. */
1758
1759 static tree
1760 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1761 {
1762 FIXED_VALUE_TYPE value;
1763 tree t;
1764 bool overflow_p;
1765
1766 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1769
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1774 }
1775
1776 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1777 to a fixed-point type. */
1778
1779 static tree
1780 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1781 {
1782 FIXED_VALUE_TYPE value;
1783 tree t;
1784 bool overflow_p;
1785 double_int di;
1786
1787 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1788
1789 di.low = TREE_INT_CST_ELT (arg1, 0);
1790 if (TREE_INT_CST_NUNITS (arg1) == 1)
1791 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1792 else
1793 di.high = TREE_INT_CST_ELT (arg1, 1);
1794
1795 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1796 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1797 TYPE_SATURATING (type));
1798 t = build_fixed (type, value);
1799
1800 /* Propagate overflow flags. */
1801 if (overflow_p | TREE_OVERFLOW (arg1))
1802 TREE_OVERFLOW (t) = 1;
1803 return t;
1804 }
1805
1806 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1807 to a fixed-point type. */
1808
1809 static tree
1810 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1811 {
1812 FIXED_VALUE_TYPE value;
1813 tree t;
1814 bool overflow_p;
1815
1816 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1817 &TREE_REAL_CST (arg1),
1818 TYPE_SATURATING (type));
1819 t = build_fixed (type, value);
1820
1821 /* Propagate overflow flags. */
1822 if (overflow_p | TREE_OVERFLOW (arg1))
1823 TREE_OVERFLOW (t) = 1;
1824 return t;
1825 }
1826
1827 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1828 type TYPE. If no simplification can be done return NULL_TREE. */
1829
1830 static tree
1831 fold_convert_const (enum tree_code code, tree type, tree arg1)
1832 {
1833 if (TREE_TYPE (arg1) == type)
1834 return arg1;
1835
1836 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1837 || TREE_CODE (type) == OFFSET_TYPE)
1838 {
1839 if (TREE_CODE (arg1) == INTEGER_CST)
1840 return fold_convert_const_int_from_int (type, arg1);
1841 else if (TREE_CODE (arg1) == REAL_CST)
1842 return fold_convert_const_int_from_real (code, type, arg1);
1843 else if (TREE_CODE (arg1) == FIXED_CST)
1844 return fold_convert_const_int_from_fixed (type, arg1);
1845 }
1846 else if (TREE_CODE (type) == REAL_TYPE)
1847 {
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_real_from_real (type, arg1);
1852 else if (TREE_CODE (arg1) == FIXED_CST)
1853 return fold_convert_const_real_from_fixed (type, arg1);
1854 }
1855 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1856 {
1857 if (TREE_CODE (arg1) == FIXED_CST)
1858 return fold_convert_const_fixed_from_fixed (type, arg1);
1859 else if (TREE_CODE (arg1) == INTEGER_CST)
1860 return fold_convert_const_fixed_from_int (type, arg1);
1861 else if (TREE_CODE (arg1) == REAL_CST)
1862 return fold_convert_const_fixed_from_real (type, arg1);
1863 }
1864 return NULL_TREE;
1865 }
1866
1867 /* Construct a vector of zero elements of vector type TYPE. */
1868
1869 static tree
1870 build_zero_vector (tree type)
1871 {
1872 tree t;
1873
1874 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1875 return build_vector_from_val (type, t);
1876 }
1877
1878 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1879
1880 bool
1881 fold_convertible_p (const_tree type, const_tree arg)
1882 {
1883 tree orig = TREE_TYPE (arg);
1884
1885 if (type == orig)
1886 return true;
1887
1888 if (TREE_CODE (arg) == ERROR_MARK
1889 || TREE_CODE (type) == ERROR_MARK
1890 || TREE_CODE (orig) == ERROR_MARK)
1891 return false;
1892
1893 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1894 return true;
1895
1896 switch (TREE_CODE (type))
1897 {
1898 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1899 case POINTER_TYPE: case REFERENCE_TYPE:
1900 case OFFSET_TYPE:
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1902 || TREE_CODE (orig) == OFFSET_TYPE)
1903 return true;
1904 return (TREE_CODE (orig) == VECTOR_TYPE
1905 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1906
1907 case REAL_TYPE:
1908 case FIXED_POINT_TYPE:
1909 case COMPLEX_TYPE:
1910 case VECTOR_TYPE:
1911 case VOID_TYPE:
1912 return TREE_CODE (type) == TREE_CODE (orig);
1913
1914 default:
1915 return false;
1916 }
1917 }
1918
1919 /* Convert expression ARG to type TYPE. Used by the middle-end for
1920 simple conversions in preference to calling the front-end's convert. */
1921
1922 tree
1923 fold_convert_loc (location_t loc, tree type, tree arg)
1924 {
1925 tree orig = TREE_TYPE (arg);
1926 tree tem;
1927
1928 if (type == orig)
1929 return arg;
1930
1931 if (TREE_CODE (arg) == ERROR_MARK
1932 || TREE_CODE (type) == ERROR_MARK
1933 || TREE_CODE (orig) == ERROR_MARK)
1934 return error_mark_node;
1935
1936 switch (TREE_CODE (type))
1937 {
1938 case POINTER_TYPE:
1939 case REFERENCE_TYPE:
1940 /* Handle conversions between pointers to different address spaces. */
1941 if (POINTER_TYPE_P (orig)
1942 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1943 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1944 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1945 /* fall through */
1946
1947 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1948 case OFFSET_TYPE:
1949 if (TREE_CODE (arg) == INTEGER_CST)
1950 {
1951 tem = fold_convert_const (NOP_EXPR, type, arg);
1952 if (tem != NULL_TREE)
1953 return tem;
1954 }
1955 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1956 || TREE_CODE (orig) == OFFSET_TYPE)
1957 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 return fold_convert_loc (loc, type,
1960 fold_build1_loc (loc, REALPART_EXPR,
1961 TREE_TYPE (orig), arg));
1962 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965
1966 case REAL_TYPE:
1967 if (TREE_CODE (arg) == INTEGER_CST)
1968 {
1969 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1972 }
1973 else if (TREE_CODE (arg) == REAL_CST)
1974 {
1975 tem = fold_convert_const (NOP_EXPR, type, arg);
1976 if (tem != NULL_TREE)
1977 return tem;
1978 }
1979 else if (TREE_CODE (arg) == FIXED_CST)
1980 {
1981 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1982 if (tem != NULL_TREE)
1983 return tem;
1984 }
1985
1986 switch (TREE_CODE (orig))
1987 {
1988 case INTEGER_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1992
1993 case REAL_TYPE:
1994 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1995
1996 case FIXED_POINT_TYPE:
1997 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1998
1999 case COMPLEX_TYPE:
2000 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert_loc (loc, type, tem);
2002
2003 default:
2004 gcc_unreachable ();
2005 }
2006
2007 case FIXED_POINT_TYPE:
2008 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2009 || TREE_CODE (arg) == REAL_CST)
2010 {
2011 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2012 if (tem != NULL_TREE)
2013 goto fold_convert_exit;
2014 }
2015
2016 switch (TREE_CODE (orig))
2017 {
2018 case FIXED_POINT_TYPE:
2019 case INTEGER_TYPE:
2020 case ENUMERAL_TYPE:
2021 case BOOLEAN_TYPE:
2022 case REAL_TYPE:
2023 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2024
2025 case COMPLEX_TYPE:
2026 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert_loc (loc, type, tem);
2028
2029 default:
2030 gcc_unreachable ();
2031 }
2032
2033 case COMPLEX_TYPE:
2034 switch (TREE_CODE (orig))
2035 {
2036 case INTEGER_TYPE:
2037 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2038 case POINTER_TYPE: case REFERENCE_TYPE:
2039 case REAL_TYPE:
2040 case FIXED_POINT_TYPE:
2041 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2042 fold_convert_loc (loc, TREE_TYPE (type), arg),
2043 fold_convert_loc (loc, TREE_TYPE (type),
2044 integer_zero_node));
2045 case COMPLEX_TYPE:
2046 {
2047 tree rpart, ipart;
2048
2049 if (TREE_CODE (arg) == COMPLEX_EXPR)
2050 {
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2052 TREE_OPERAND (arg, 0));
2053 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2054 TREE_OPERAND (arg, 1));
2055 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2056 }
2057
2058 arg = save_expr (arg);
2059 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2060 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2061 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2062 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2064 }
2065
2066 default:
2067 gcc_unreachable ();
2068 }
2069
2070 case VECTOR_TYPE:
2071 if (integer_zerop (arg))
2072 return build_zero_vector (type);
2073 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2074 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2075 || TREE_CODE (orig) == VECTOR_TYPE);
2076 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2077
2078 case VOID_TYPE:
2079 tem = fold_ignored_result (arg);
2080 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2081
2082 default:
2083 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2084 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2085 gcc_unreachable ();
2086 }
2087 fold_convert_exit:
2088 protected_set_expr_location_unshare (tem, loc);
2089 return tem;
2090 }
2091 \f
2092 /* Return false if expr can be assumed not to be an lvalue, true
2093 otherwise. */
2094
2095 static bool
2096 maybe_lvalue_p (const_tree x)
2097 {
2098 /* We only need to wrap lvalue tree codes. */
2099 switch (TREE_CODE (x))
2100 {
2101 case VAR_DECL:
2102 case PARM_DECL:
2103 case RESULT_DECL:
2104 case LABEL_DECL:
2105 case FUNCTION_DECL:
2106 case SSA_NAME:
2107
2108 case COMPONENT_REF:
2109 case MEM_REF:
2110 case INDIRECT_REF:
2111 case ARRAY_REF:
2112 case ARRAY_RANGE_REF:
2113 case BIT_FIELD_REF:
2114 case OBJ_TYPE_REF:
2115
2116 case REALPART_EXPR:
2117 case IMAGPART_EXPR:
2118 case PREINCREMENT_EXPR:
2119 case PREDECREMENT_EXPR:
2120 case SAVE_EXPR:
2121 case TRY_CATCH_EXPR:
2122 case WITH_CLEANUP_EXPR:
2123 case COMPOUND_EXPR:
2124 case MODIFY_EXPR:
2125 case TARGET_EXPR:
2126 case COND_EXPR:
2127 case BIND_EXPR:
2128 break;
2129
2130 default:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2133 break;
2134 return false;
2135 }
2136
2137 return true;
2138 }
2139
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2141
2142 tree
2143 non_lvalue_loc (location_t loc, tree x)
2144 {
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2146 us. */
2147 if (in_gimple_form)
2148 return x;
2149
2150 if (! maybe_lvalue_p (x))
2151 return x;
2152 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2153 }
2154
2155 /* When pedantic, return an expr equal to X but certainly not valid as a
2156 pedantic lvalue. Otherwise, return X. */
2157
2158 static tree
2159 pedantic_non_lvalue_loc (location_t loc, tree x)
2160 {
2161 return protected_set_expr_location_unshare (x, loc);
2162 }
2163 \f
2164 /* Given a tree comparison code, return the code that is the logical inverse.
2165 It is generally not safe to do this for floating-point comparisons, except
2166 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2167 ERROR_MARK in this case. */
2168
2169 enum tree_code
2170 invert_tree_comparison (enum tree_code code, bool honor_nans)
2171 {
2172 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2173 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2174 return ERROR_MARK;
2175
2176 switch (code)
2177 {
2178 case EQ_EXPR:
2179 return NE_EXPR;
2180 case NE_EXPR:
2181 return EQ_EXPR;
2182 case GT_EXPR:
2183 return honor_nans ? UNLE_EXPR : LE_EXPR;
2184 case GE_EXPR:
2185 return honor_nans ? UNLT_EXPR : LT_EXPR;
2186 case LT_EXPR:
2187 return honor_nans ? UNGE_EXPR : GE_EXPR;
2188 case LE_EXPR:
2189 return honor_nans ? UNGT_EXPR : GT_EXPR;
2190 case LTGT_EXPR:
2191 return UNEQ_EXPR;
2192 case UNEQ_EXPR:
2193 return LTGT_EXPR;
2194 case UNGT_EXPR:
2195 return LE_EXPR;
2196 case UNGE_EXPR:
2197 return LT_EXPR;
2198 case UNLT_EXPR:
2199 return GE_EXPR;
2200 case UNLE_EXPR:
2201 return GT_EXPR;
2202 case ORDERED_EXPR:
2203 return UNORDERED_EXPR;
2204 case UNORDERED_EXPR:
2205 return ORDERED_EXPR;
2206 default:
2207 gcc_unreachable ();
2208 }
2209 }
2210
2211 /* Similar, but return the comparison that results if the operands are
2212 swapped. This is safe for floating-point. */
2213
2214 enum tree_code
2215 swap_tree_comparison (enum tree_code code)
2216 {
2217 switch (code)
2218 {
2219 case EQ_EXPR:
2220 case NE_EXPR:
2221 case ORDERED_EXPR:
2222 case UNORDERED_EXPR:
2223 case LTGT_EXPR:
2224 case UNEQ_EXPR:
2225 return code;
2226 case GT_EXPR:
2227 return LT_EXPR;
2228 case GE_EXPR:
2229 return LE_EXPR;
2230 case LT_EXPR:
2231 return GT_EXPR;
2232 case LE_EXPR:
2233 return GE_EXPR;
2234 case UNGT_EXPR:
2235 return UNLT_EXPR;
2236 case UNGE_EXPR:
2237 return UNLE_EXPR;
2238 case UNLT_EXPR:
2239 return UNGT_EXPR;
2240 case UNLE_EXPR:
2241 return UNGE_EXPR;
2242 default:
2243 gcc_unreachable ();
2244 }
2245 }
2246
2247
2248 /* Convert a comparison tree code from an enum tree_code representation
2249 into a compcode bit-based encoding. This function is the inverse of
2250 compcode_to_comparison. */
2251
2252 static enum comparison_code
2253 comparison_to_compcode (enum tree_code code)
2254 {
2255 switch (code)
2256 {
2257 case LT_EXPR:
2258 return COMPCODE_LT;
2259 case EQ_EXPR:
2260 return COMPCODE_EQ;
2261 case LE_EXPR:
2262 return COMPCODE_LE;
2263 case GT_EXPR:
2264 return COMPCODE_GT;
2265 case NE_EXPR:
2266 return COMPCODE_NE;
2267 case GE_EXPR:
2268 return COMPCODE_GE;
2269 case ORDERED_EXPR:
2270 return COMPCODE_ORD;
2271 case UNORDERED_EXPR:
2272 return COMPCODE_UNORD;
2273 case UNLT_EXPR:
2274 return COMPCODE_UNLT;
2275 case UNEQ_EXPR:
2276 return COMPCODE_UNEQ;
2277 case UNLE_EXPR:
2278 return COMPCODE_UNLE;
2279 case UNGT_EXPR:
2280 return COMPCODE_UNGT;
2281 case LTGT_EXPR:
2282 return COMPCODE_LTGT;
2283 case UNGE_EXPR:
2284 return COMPCODE_UNGE;
2285 default:
2286 gcc_unreachable ();
2287 }
2288 }
2289
2290 /* Convert a compcode bit-based encoding of a comparison operator back
2291 to GCC's enum tree_code representation. This function is the
2292 inverse of comparison_to_compcode. */
2293
2294 static enum tree_code
2295 compcode_to_comparison (enum comparison_code code)
2296 {
2297 switch (code)
2298 {
2299 case COMPCODE_LT:
2300 return LT_EXPR;
2301 case COMPCODE_EQ:
2302 return EQ_EXPR;
2303 case COMPCODE_LE:
2304 return LE_EXPR;
2305 case COMPCODE_GT:
2306 return GT_EXPR;
2307 case COMPCODE_NE:
2308 return NE_EXPR;
2309 case COMPCODE_GE:
2310 return GE_EXPR;
2311 case COMPCODE_ORD:
2312 return ORDERED_EXPR;
2313 case COMPCODE_UNORD:
2314 return UNORDERED_EXPR;
2315 case COMPCODE_UNLT:
2316 return UNLT_EXPR;
2317 case COMPCODE_UNEQ:
2318 return UNEQ_EXPR;
2319 case COMPCODE_UNLE:
2320 return UNLE_EXPR;
2321 case COMPCODE_UNGT:
2322 return UNGT_EXPR;
2323 case COMPCODE_LTGT:
2324 return LTGT_EXPR;
2325 case COMPCODE_UNGE:
2326 return UNGE_EXPR;
2327 default:
2328 gcc_unreachable ();
2329 }
2330 }
2331
2332 /* Return a tree for the comparison which is the combination of
2333 doing the AND or OR (depending on CODE) of the two operations LCODE
2334 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2335 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2336 if this makes the transformation invalid. */
2337
2338 tree
2339 combine_comparisons (location_t loc,
2340 enum tree_code code, enum tree_code lcode,
2341 enum tree_code rcode, tree truth_type,
2342 tree ll_arg, tree lr_arg)
2343 {
2344 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2345 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2346 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2347 int compcode;
2348
2349 switch (code)
2350 {
2351 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2352 compcode = lcompcode & rcompcode;
2353 break;
2354
2355 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2356 compcode = lcompcode | rcompcode;
2357 break;
2358
2359 default:
2360 return NULL_TREE;
2361 }
2362
2363 if (!honor_nans)
2364 {
2365 /* Eliminate unordered comparisons, as well as LTGT and ORD
2366 which are not used unless the mode has NaNs. */
2367 compcode &= ~COMPCODE_UNORD;
2368 if (compcode == COMPCODE_LTGT)
2369 compcode = COMPCODE_NE;
2370 else if (compcode == COMPCODE_ORD)
2371 compcode = COMPCODE_TRUE;
2372 }
2373 else if (flag_trapping_math)
2374 {
2375 /* Check that the original operation and the optimized ones will trap
2376 under the same condition. */
2377 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2378 && (lcompcode != COMPCODE_EQ)
2379 && (lcompcode != COMPCODE_ORD);
2380 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2381 && (rcompcode != COMPCODE_EQ)
2382 && (rcompcode != COMPCODE_ORD);
2383 bool trap = (compcode & COMPCODE_UNORD) == 0
2384 && (compcode != COMPCODE_EQ)
2385 && (compcode != COMPCODE_ORD);
2386
2387 /* In a short-circuited boolean expression the LHS might be
2388 such that the RHS, if evaluated, will never trap. For
2389 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2390 if neither x nor y is NaN. (This is a mixed blessing: for
2391 example, the expression above will never trap, hence
2392 optimizing it to x < y would be invalid). */
2393 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2394 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2395 rtrap = false;
2396
2397 /* If the comparison was short-circuited, and only the RHS
2398 trapped, we may now generate a spurious trap. */
2399 if (rtrap && !ltrap
2400 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2401 return NULL_TREE;
2402
2403 /* If we changed the conditions that cause a trap, we lose. */
2404 if ((ltrap || rtrap) != trap)
2405 return NULL_TREE;
2406 }
2407
2408 if (compcode == COMPCODE_TRUE)
2409 return constant_boolean_node (true, truth_type);
2410 else if (compcode == COMPCODE_FALSE)
2411 return constant_boolean_node (false, truth_type);
2412 else
2413 {
2414 enum tree_code tcode;
2415
2416 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2417 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2418 }
2419 }
2420 \f
2421 /* Return nonzero if two operands (typically of the same tree node)
2422 are necessarily equal. If either argument has side-effects this
2423 function returns zero. FLAGS modifies behavior as follows:
2424
2425 If OEP_ONLY_CONST is set, only return nonzero for constants.
2426 This function tests whether the operands are indistinguishable;
2427 it does not test whether they are equal using C's == operation.
2428 The distinction is important for IEEE floating point, because
2429 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2430 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2431
2432 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2433 even though it may hold multiple values during a function.
2434 This is because a GCC tree node guarantees that nothing else is
2435 executed between the evaluation of its "operands" (which may often
2436 be evaluated in arbitrary order). Hence if the operands themselves
2437 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2438 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2439 unset means assuming isochronic (or instantaneous) tree equivalence.
2440 Unless comparing arbitrary expression trees, such as from different
2441 statements, this flag can usually be left unset.
2442
2443 If OEP_PURE_SAME is set, then pure functions with identical arguments
2444 are considered the same. It is used when the caller has other ways
2445 to ensure that global memory is unchanged in between. */
2446
2447 int
2448 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2449 {
2450 /* If either is ERROR_MARK, they aren't equal. */
2451 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2452 || TREE_TYPE (arg0) == error_mark_node
2453 || TREE_TYPE (arg1) == error_mark_node)
2454 return 0;
2455
2456 /* Similar, if either does not have a type (like a released SSA name),
2457 they aren't equal. */
2458 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2459 return 0;
2460
2461 /* Check equality of integer constants before bailing out due to
2462 precision differences. */
2463 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2464 return tree_int_cst_equal (arg0, arg1);
2465
2466 /* If both types don't have the same signedness, then we can't consider
2467 them equal. We must check this before the STRIP_NOPS calls
2468 because they may change the signedness of the arguments. As pointers
2469 strictly don't have a signedness, require either two pointers or
2470 two non-pointers as well. */
2471 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2472 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2473 return 0;
2474
2475 /* We cannot consider pointers to different address space equal. */
2476 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2477 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2478 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2479 return 0;
2480
2481 /* If both types don't have the same precision, then it is not safe
2482 to strip NOPs. */
2483 if (element_precision (TREE_TYPE (arg0))
2484 != element_precision (TREE_TYPE (arg1)))
2485 return 0;
2486
2487 STRIP_NOPS (arg0);
2488 STRIP_NOPS (arg1);
2489
2490 /* In case both args are comparisons but with different comparison
2491 code, try to swap the comparison operands of one arg to produce
2492 a match and compare that variant. */
2493 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2494 && COMPARISON_CLASS_P (arg0)
2495 && COMPARISON_CLASS_P (arg1))
2496 {
2497 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2498
2499 if (TREE_CODE (arg0) == swap_code)
2500 return operand_equal_p (TREE_OPERAND (arg0, 0),
2501 TREE_OPERAND (arg1, 1), flags)
2502 && operand_equal_p (TREE_OPERAND (arg0, 1),
2503 TREE_OPERAND (arg1, 0), flags);
2504 }
2505
2506 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2507 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2508 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2509 return 0;
2510
2511 /* This is needed for conversions and for COMPONENT_REF.
2512 Might as well play it safe and always test this. */
2513 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2514 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2515 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2516 return 0;
2517
2518 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2519 We don't care about side effects in that case because the SAVE_EXPR
2520 takes care of that for us. In all other cases, two expressions are
2521 equal if they have no side effects. If we have two identical
2522 expressions with side effects that should be treated the same due
2523 to the only side effects being identical SAVE_EXPR's, that will
2524 be detected in the recursive calls below.
2525 If we are taking an invariant address of two identical objects
2526 they are necessarily equal as well. */
2527 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2528 && (TREE_CODE (arg0) == SAVE_EXPR
2529 || (flags & OEP_CONSTANT_ADDRESS_OF)
2530 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2531 return 1;
2532
2533 /* Next handle constant cases, those for which we can return 1 even
2534 if ONLY_CONST is set. */
2535 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2536 switch (TREE_CODE (arg0))
2537 {
2538 case INTEGER_CST:
2539 return tree_int_cst_equal (arg0, arg1);
2540
2541 case FIXED_CST:
2542 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2543 TREE_FIXED_CST (arg1));
2544
2545 case REAL_CST:
2546 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2547 TREE_REAL_CST (arg1)))
2548 return 1;
2549
2550
2551 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2552 {
2553 /* If we do not distinguish between signed and unsigned zero,
2554 consider them equal. */
2555 if (real_zerop (arg0) && real_zerop (arg1))
2556 return 1;
2557 }
2558 return 0;
2559
2560 case VECTOR_CST:
2561 {
2562 unsigned i;
2563
2564 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2565 return 0;
2566
2567 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2568 {
2569 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2570 VECTOR_CST_ELT (arg1, i), flags))
2571 return 0;
2572 }
2573 return 1;
2574 }
2575
2576 case COMPLEX_CST:
2577 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2578 flags)
2579 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2580 flags));
2581
2582 case STRING_CST:
2583 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2584 && ! memcmp (TREE_STRING_POINTER (arg0),
2585 TREE_STRING_POINTER (arg1),
2586 TREE_STRING_LENGTH (arg0)));
2587
2588 case ADDR_EXPR:
2589 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2590 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2591 ? OEP_CONSTANT_ADDRESS_OF : 0);
2592 default:
2593 break;
2594 }
2595
2596 if (flags & OEP_ONLY_CONST)
2597 return 0;
2598
2599 /* Define macros to test an operand from arg0 and arg1 for equality and a
2600 variant that allows null and views null as being different from any
2601 non-null value. In the latter case, if either is null, the both
2602 must be; otherwise, do the normal comparison. */
2603 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2604 TREE_OPERAND (arg1, N), flags)
2605
2606 #define OP_SAME_WITH_NULL(N) \
2607 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2608 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2609
2610 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2611 {
2612 case tcc_unary:
2613 /* Two conversions are equal only if signedness and modes match. */
2614 switch (TREE_CODE (arg0))
2615 {
2616 CASE_CONVERT:
2617 case FIX_TRUNC_EXPR:
2618 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2619 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2620 return 0;
2621 break;
2622 default:
2623 break;
2624 }
2625
2626 return OP_SAME (0);
2627
2628
2629 case tcc_comparison:
2630 case tcc_binary:
2631 if (OP_SAME (0) && OP_SAME (1))
2632 return 1;
2633
2634 /* For commutative ops, allow the other order. */
2635 return (commutative_tree_code (TREE_CODE (arg0))
2636 && operand_equal_p (TREE_OPERAND (arg0, 0),
2637 TREE_OPERAND (arg1, 1), flags)
2638 && operand_equal_p (TREE_OPERAND (arg0, 1),
2639 TREE_OPERAND (arg1, 0), flags));
2640
2641 case tcc_reference:
2642 /* If either of the pointer (or reference) expressions we are
2643 dereferencing contain a side effect, these cannot be equal,
2644 but their addresses can be. */
2645 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2646 && (TREE_SIDE_EFFECTS (arg0)
2647 || TREE_SIDE_EFFECTS (arg1)))
2648 return 0;
2649
2650 switch (TREE_CODE (arg0))
2651 {
2652 case INDIRECT_REF:
2653 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2654 return OP_SAME (0);
2655
2656 case REALPART_EXPR:
2657 case IMAGPART_EXPR:
2658 return OP_SAME (0);
2659
2660 case TARGET_MEM_REF:
2661 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2662 /* Require equal extra operands and then fall through to MEM_REF
2663 handling of the two common operands. */
2664 if (!OP_SAME_WITH_NULL (2)
2665 || !OP_SAME_WITH_NULL (3)
2666 || !OP_SAME_WITH_NULL (4))
2667 return 0;
2668 /* Fallthru. */
2669 case MEM_REF:
2670 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2671 /* Require equal access sizes, and similar pointer types.
2672 We can have incomplete types for array references of
2673 variable-sized arrays from the Fortran frontend
2674 though. Also verify the types are compatible. */
2675 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2676 || (TYPE_SIZE (TREE_TYPE (arg0))
2677 && TYPE_SIZE (TREE_TYPE (arg1))
2678 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2679 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2680 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2681 && alias_ptr_types_compatible_p
2682 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2683 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2684 && OP_SAME (0) && OP_SAME (1));
2685
2686 case ARRAY_REF:
2687 case ARRAY_RANGE_REF:
2688 /* Operands 2 and 3 may be null.
2689 Compare the array index by value if it is constant first as we
2690 may have different types but same value here. */
2691 if (!OP_SAME (0))
2692 return 0;
2693 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2694 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2695 TREE_OPERAND (arg1, 1))
2696 || OP_SAME (1))
2697 && OP_SAME_WITH_NULL (2)
2698 && OP_SAME_WITH_NULL (3));
2699
2700 case COMPONENT_REF:
2701 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2702 may be NULL when we're called to compare MEM_EXPRs. */
2703 if (!OP_SAME_WITH_NULL (0)
2704 || !OP_SAME (1))
2705 return 0;
2706 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2707 return OP_SAME_WITH_NULL (2);
2708
2709 case BIT_FIELD_REF:
2710 if (!OP_SAME (0))
2711 return 0;
2712 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2713 return OP_SAME (1) && OP_SAME (2);
2714
2715 default:
2716 return 0;
2717 }
2718
2719 case tcc_expression:
2720 switch (TREE_CODE (arg0))
2721 {
2722 case ADDR_EXPR:
2723 case TRUTH_NOT_EXPR:
2724 return OP_SAME (0);
2725
2726 case TRUTH_ANDIF_EXPR:
2727 case TRUTH_ORIF_EXPR:
2728 return OP_SAME (0) && OP_SAME (1);
2729
2730 case FMA_EXPR:
2731 case WIDEN_MULT_PLUS_EXPR:
2732 case WIDEN_MULT_MINUS_EXPR:
2733 if (!OP_SAME (2))
2734 return 0;
2735 /* The multiplcation operands are commutative. */
2736 /* FALLTHRU */
2737
2738 case TRUTH_AND_EXPR:
2739 case TRUTH_OR_EXPR:
2740 case TRUTH_XOR_EXPR:
2741 if (OP_SAME (0) && OP_SAME (1))
2742 return 1;
2743
2744 /* Otherwise take into account this is a commutative operation. */
2745 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2746 TREE_OPERAND (arg1, 1), flags)
2747 && operand_equal_p (TREE_OPERAND (arg0, 1),
2748 TREE_OPERAND (arg1, 0), flags));
2749
2750 case COND_EXPR:
2751 case VEC_COND_EXPR:
2752 case DOT_PROD_EXPR:
2753 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2754
2755 default:
2756 return 0;
2757 }
2758
2759 case tcc_vl_exp:
2760 switch (TREE_CODE (arg0))
2761 {
2762 case CALL_EXPR:
2763 /* If the CALL_EXPRs call different functions, then they
2764 clearly can not be equal. */
2765 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2766 flags))
2767 return 0;
2768
2769 {
2770 unsigned int cef = call_expr_flags (arg0);
2771 if (flags & OEP_PURE_SAME)
2772 cef &= ECF_CONST | ECF_PURE;
2773 else
2774 cef &= ECF_CONST;
2775 if (!cef)
2776 return 0;
2777 }
2778
2779 /* Now see if all the arguments are the same. */
2780 {
2781 const_call_expr_arg_iterator iter0, iter1;
2782 const_tree a0, a1;
2783 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2784 a1 = first_const_call_expr_arg (arg1, &iter1);
2785 a0 && a1;
2786 a0 = next_const_call_expr_arg (&iter0),
2787 a1 = next_const_call_expr_arg (&iter1))
2788 if (! operand_equal_p (a0, a1, flags))
2789 return 0;
2790
2791 /* If we get here and both argument lists are exhausted
2792 then the CALL_EXPRs are equal. */
2793 return ! (a0 || a1);
2794 }
2795 default:
2796 return 0;
2797 }
2798
2799 case tcc_declaration:
2800 /* Consider __builtin_sqrt equal to sqrt. */
2801 return (TREE_CODE (arg0) == FUNCTION_DECL
2802 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2803 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2804 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2805
2806 default:
2807 return 0;
2808 }
2809
2810 #undef OP_SAME
2811 #undef OP_SAME_WITH_NULL
2812 }
2813 \f
2814 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2815 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2816
2817 When in doubt, return 0. */
2818
2819 static int
2820 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2821 {
2822 int unsignedp1, unsignedpo;
2823 tree primarg0, primarg1, primother;
2824 unsigned int correct_width;
2825
2826 if (operand_equal_p (arg0, arg1, 0))
2827 return 1;
2828
2829 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2830 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2831 return 0;
2832
2833 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2834 and see if the inner values are the same. This removes any
2835 signedness comparison, which doesn't matter here. */
2836 primarg0 = arg0, primarg1 = arg1;
2837 STRIP_NOPS (primarg0);
2838 STRIP_NOPS (primarg1);
2839 if (operand_equal_p (primarg0, primarg1, 0))
2840 return 1;
2841
2842 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2843 actual comparison operand, ARG0.
2844
2845 First throw away any conversions to wider types
2846 already present in the operands. */
2847
2848 primarg1 = get_narrower (arg1, &unsignedp1);
2849 primother = get_narrower (other, &unsignedpo);
2850
2851 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2852 if (unsignedp1 == unsignedpo
2853 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2854 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2855 {
2856 tree type = TREE_TYPE (arg0);
2857
2858 /* Make sure shorter operand is extended the right way
2859 to match the longer operand. */
2860 primarg1 = fold_convert (signed_or_unsigned_type_for
2861 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2862
2863 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2864 return 1;
2865 }
2866
2867 return 0;
2868 }
2869 \f
2870 /* See if ARG is an expression that is either a comparison or is performing
2871 arithmetic on comparisons. The comparisons must only be comparing
2872 two different values, which will be stored in *CVAL1 and *CVAL2; if
2873 they are nonzero it means that some operands have already been found.
2874 No variables may be used anywhere else in the expression except in the
2875 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2876 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2877
2878 If this is true, return 1. Otherwise, return zero. */
2879
2880 static int
2881 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2882 {
2883 enum tree_code code = TREE_CODE (arg);
2884 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2885
2886 /* We can handle some of the tcc_expression cases here. */
2887 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2888 tclass = tcc_unary;
2889 else if (tclass == tcc_expression
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2891 || code == COMPOUND_EXPR))
2892 tclass = tcc_binary;
2893
2894 else if (tclass == tcc_expression && code == SAVE_EXPR
2895 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2896 {
2897 /* If we've already found a CVAL1 or CVAL2, this expression is
2898 two complex to handle. */
2899 if (*cval1 || *cval2)
2900 return 0;
2901
2902 tclass = tcc_unary;
2903 *save_p = 1;
2904 }
2905
2906 switch (tclass)
2907 {
2908 case tcc_unary:
2909 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2910
2911 case tcc_binary:
2912 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2913 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2914 cval1, cval2, save_p));
2915
2916 case tcc_constant:
2917 return 1;
2918
2919 case tcc_expression:
2920 if (code == COND_EXPR)
2921 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2922 cval1, cval2, save_p)
2923 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2924 cval1, cval2, save_p)
2925 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2926 cval1, cval2, save_p));
2927 return 0;
2928
2929 case tcc_comparison:
2930 /* First see if we can handle the first operand, then the second. For
2931 the second operand, we know *CVAL1 can't be zero. It must be that
2932 one side of the comparison is each of the values; test for the
2933 case where this isn't true by failing if the two operands
2934 are the same. */
2935
2936 if (operand_equal_p (TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (arg, 1), 0))
2938 return 0;
2939
2940 if (*cval1 == 0)
2941 *cval1 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2943 ;
2944 else if (*cval2 == 0)
2945 *cval2 = TREE_OPERAND (arg, 0);
2946 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2947 ;
2948 else
2949 return 0;
2950
2951 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2952 ;
2953 else if (*cval2 == 0)
2954 *cval2 = TREE_OPERAND (arg, 1);
2955 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2956 ;
2957 else
2958 return 0;
2959
2960 return 1;
2961
2962 default:
2963 return 0;
2964 }
2965 }
2966 \f
2967 /* ARG is a tree that is known to contain just arithmetic operations and
2968 comparisons. Evaluate the operations in the tree substituting NEW0 for
2969 any occurrence of OLD0 as an operand of a comparison and likewise for
2970 NEW1 and OLD1. */
2971
2972 static tree
2973 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2974 tree old1, tree new1)
2975 {
2976 tree type = TREE_TYPE (arg);
2977 enum tree_code code = TREE_CODE (arg);
2978 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2979
2980 /* We can handle some of the tcc_expression cases here. */
2981 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2982 tclass = tcc_unary;
2983 else if (tclass == tcc_expression
2984 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2985 tclass = tcc_binary;
2986
2987 switch (tclass)
2988 {
2989 case tcc_unary:
2990 return fold_build1_loc (loc, code, type,
2991 eval_subst (loc, TREE_OPERAND (arg, 0),
2992 old0, new0, old1, new1));
2993
2994 case tcc_binary:
2995 return fold_build2_loc (loc, code, type,
2996 eval_subst (loc, TREE_OPERAND (arg, 0),
2997 old0, new0, old1, new1),
2998 eval_subst (loc, TREE_OPERAND (arg, 1),
2999 old0, new0, old1, new1));
3000
3001 case tcc_expression:
3002 switch (code)
3003 {
3004 case SAVE_EXPR:
3005 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3006 old1, new1);
3007
3008 case COMPOUND_EXPR:
3009 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3010 old1, new1);
3011
3012 case COND_EXPR:
3013 return fold_build3_loc (loc, code, type,
3014 eval_subst (loc, TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1),
3016 eval_subst (loc, TREE_OPERAND (arg, 1),
3017 old0, new0, old1, new1),
3018 eval_subst (loc, TREE_OPERAND (arg, 2),
3019 old0, new0, old1, new1));
3020 default:
3021 break;
3022 }
3023 /* Fall through - ??? */
3024
3025 case tcc_comparison:
3026 {
3027 tree arg0 = TREE_OPERAND (arg, 0);
3028 tree arg1 = TREE_OPERAND (arg, 1);
3029
3030 /* We need to check both for exact equality and tree equality. The
3031 former will be true if the operand has a side-effect. In that
3032 case, we know the operand occurred exactly once. */
3033
3034 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3035 arg0 = new0;
3036 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3037 arg0 = new1;
3038
3039 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3040 arg1 = new0;
3041 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3042 arg1 = new1;
3043
3044 return fold_build2_loc (loc, code, type, arg0, arg1);
3045 }
3046
3047 default:
3048 return arg;
3049 }
3050 }
3051 \f
3052 /* Return a tree for the case when the result of an expression is RESULT
3053 converted to TYPE and OMITTED was previously an operand of the expression
3054 but is now not needed (e.g., we folded OMITTED * 0).
3055
3056 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3057 the conversion of RESULT to TYPE. */
3058
3059 tree
3060 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3061 {
3062 tree t = fold_convert_loc (loc, type, result);
3063
3064 /* If the resulting operand is an empty statement, just return the omitted
3065 statement casted to void. */
3066 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3067 return build1_loc (loc, NOP_EXPR, void_type_node,
3068 fold_ignored_result (omitted));
3069
3070 if (TREE_SIDE_EFFECTS (omitted))
3071 return build2_loc (loc, COMPOUND_EXPR, type,
3072 fold_ignored_result (omitted), t);
3073
3074 return non_lvalue_loc (loc, t);
3075 }
3076
3077 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3078
3079 static tree
3080 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3081 tree omitted)
3082 {
3083 tree t = fold_convert_loc (loc, type, result);
3084
3085 /* If the resulting operand is an empty statement, just return the omitted
3086 statement casted to void. */
3087 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3088 return build1_loc (loc, NOP_EXPR, void_type_node,
3089 fold_ignored_result (omitted));
3090
3091 if (TREE_SIDE_EFFECTS (omitted))
3092 return build2_loc (loc, COMPOUND_EXPR, type,
3093 fold_ignored_result (omitted), t);
3094
3095 return pedantic_non_lvalue_loc (loc, t);
3096 }
3097
3098 /* Return a tree for the case when the result of an expression is RESULT
3099 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3100 of the expression but are now not needed.
3101
3102 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3103 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3104 evaluated before OMITTED2. Otherwise, if neither has side effects,
3105 just do the conversion of RESULT to TYPE. */
3106
3107 tree
3108 omit_two_operands_loc (location_t loc, tree type, tree result,
3109 tree omitted1, tree omitted2)
3110 {
3111 tree t = fold_convert_loc (loc, type, result);
3112
3113 if (TREE_SIDE_EFFECTS (omitted2))
3114 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3115 if (TREE_SIDE_EFFECTS (omitted1))
3116 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3117
3118 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3119 }
3120
3121 \f
3122 /* Return a simplified tree node for the truth-negation of ARG. This
3123 never alters ARG itself. We assume that ARG is an operation that
3124 returns a truth value (0 or 1).
3125
3126 FIXME: one would think we would fold the result, but it causes
3127 problems with the dominator optimizer. */
3128
3129 static tree
3130 fold_truth_not_expr (location_t loc, tree arg)
3131 {
3132 tree type = TREE_TYPE (arg);
3133 enum tree_code code = TREE_CODE (arg);
3134 location_t loc1, loc2;
3135
3136 /* If this is a comparison, we can simply invert it, except for
3137 floating-point non-equality comparisons, in which case we just
3138 enclose a TRUTH_NOT_EXPR around what we have. */
3139
3140 if (TREE_CODE_CLASS (code) == tcc_comparison)
3141 {
3142 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3143 if (FLOAT_TYPE_P (op_type)
3144 && flag_trapping_math
3145 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3146 && code != NE_EXPR && code != EQ_EXPR)
3147 return NULL_TREE;
3148
3149 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3150 if (code == ERROR_MARK)
3151 return NULL_TREE;
3152
3153 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3154 TREE_OPERAND (arg, 1));
3155 }
3156
3157 switch (code)
3158 {
3159 case INTEGER_CST:
3160 return constant_boolean_node (integer_zerop (arg), type);
3161
3162 case TRUTH_AND_EXPR:
3163 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3164 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3165 return build2_loc (loc, TRUTH_OR_EXPR, type,
3166 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3167 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3168
3169 case TRUTH_OR_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 return build2_loc (loc, TRUTH_AND_EXPR, type,
3173 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3174 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3175
3176 case TRUTH_XOR_EXPR:
3177 /* Here we can invert either operand. We invert the first operand
3178 unless the second operand is a TRUTH_NOT_EXPR in which case our
3179 result is the XOR of the first operand with the inside of the
3180 negation of the second operand. */
3181
3182 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3183 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3184 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3185 else
3186 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3187 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3188 TREE_OPERAND (arg, 1));
3189
3190 case TRUTH_ANDIF_EXPR:
3191 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3192 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3193 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3194 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3195 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3196
3197 case TRUTH_ORIF_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3203
3204 case TRUTH_NOT_EXPR:
3205 return TREE_OPERAND (arg, 0);
3206
3207 case COND_EXPR:
3208 {
3209 tree arg1 = TREE_OPERAND (arg, 1);
3210 tree arg2 = TREE_OPERAND (arg, 2);
3211
3212 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3213 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3214
3215 /* A COND_EXPR may have a throw as one operand, which
3216 then has void type. Just leave void operands
3217 as they are. */
3218 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3219 VOID_TYPE_P (TREE_TYPE (arg1))
3220 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3221 VOID_TYPE_P (TREE_TYPE (arg2))
3222 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3223 }
3224
3225 case COMPOUND_EXPR:
3226 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3227 return build2_loc (loc, COMPOUND_EXPR, type,
3228 TREE_OPERAND (arg, 0),
3229 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3230
3231 case NON_LVALUE_EXPR:
3232 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3233 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3234
3235 CASE_CONVERT:
3236 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3237 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3238
3239 /* ... fall through ... */
3240
3241 case FLOAT_EXPR:
3242 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3243 return build1_loc (loc, TREE_CODE (arg), type,
3244 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3245
3246 case BIT_AND_EXPR:
3247 if (!integer_onep (TREE_OPERAND (arg, 1)))
3248 return NULL_TREE;
3249 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3250
3251 case SAVE_EXPR:
3252 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3253
3254 case CLEANUP_POINT_EXPR:
3255 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3256 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3257 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3258
3259 default:
3260 return NULL_TREE;
3261 }
3262 }
3263
3264 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3265 assume that ARG is an operation that returns a truth value (0 or 1
3266 for scalars, 0 or -1 for vectors). Return the folded expression if
3267 folding is successful. Otherwise, return NULL_TREE. */
3268
3269 static tree
3270 fold_invert_truthvalue (location_t loc, tree arg)
3271 {
3272 tree type = TREE_TYPE (arg);
3273 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3274 ? BIT_NOT_EXPR
3275 : TRUTH_NOT_EXPR,
3276 type, arg);
3277 }
3278
3279 /* Return a simplified tree node for the truth-negation of ARG. This
3280 never alters ARG itself. We assume that ARG is an operation that
3281 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3282
3283 tree
3284 invert_truthvalue_loc (location_t loc, tree arg)
3285 {
3286 if (TREE_CODE (arg) == ERROR_MARK)
3287 return arg;
3288
3289 tree type = TREE_TYPE (arg);
3290 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3291 ? BIT_NOT_EXPR
3292 : TRUTH_NOT_EXPR,
3293 type, arg);
3294 }
3295
3296 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3297 operands are another bit-wise operation with a common input. If so,
3298 distribute the bit operations to save an operation and possibly two if
3299 constants are involved. For example, convert
3300 (A | B) & (A | C) into A | (B & C)
3301 Further simplification will occur if B and C are constants.
3302
3303 If this optimization cannot be done, 0 will be returned. */
3304
3305 static tree
3306 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3307 tree arg0, tree arg1)
3308 {
3309 tree common;
3310 tree left, right;
3311
3312 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3313 || TREE_CODE (arg0) == code
3314 || (TREE_CODE (arg0) != BIT_AND_EXPR
3315 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3316 return 0;
3317
3318 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3319 {
3320 common = TREE_OPERAND (arg0, 0);
3321 left = TREE_OPERAND (arg0, 1);
3322 right = TREE_OPERAND (arg1, 1);
3323 }
3324 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3325 {
3326 common = TREE_OPERAND (arg0, 0);
3327 left = TREE_OPERAND (arg0, 1);
3328 right = TREE_OPERAND (arg1, 0);
3329 }
3330 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3331 {
3332 common = TREE_OPERAND (arg0, 1);
3333 left = TREE_OPERAND (arg0, 0);
3334 right = TREE_OPERAND (arg1, 1);
3335 }
3336 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3337 {
3338 common = TREE_OPERAND (arg0, 1);
3339 left = TREE_OPERAND (arg0, 0);
3340 right = TREE_OPERAND (arg1, 0);
3341 }
3342 else
3343 return 0;
3344
3345 common = fold_convert_loc (loc, type, common);
3346 left = fold_convert_loc (loc, type, left);
3347 right = fold_convert_loc (loc, type, right);
3348 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3349 fold_build2_loc (loc, code, type, left, right));
3350 }
3351
3352 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3353 with code CODE. This optimization is unsafe. */
3354 static tree
3355 distribute_real_division (location_t loc, enum tree_code code, tree type,
3356 tree arg0, tree arg1)
3357 {
3358 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3359 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3360
3361 /* (A / C) +- (B / C) -> (A +- B) / C. */
3362 if (mul0 == mul1
3363 && operand_equal_p (TREE_OPERAND (arg0, 1),
3364 TREE_OPERAND (arg1, 1), 0))
3365 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3366 fold_build2_loc (loc, code, type,
3367 TREE_OPERAND (arg0, 0),
3368 TREE_OPERAND (arg1, 0)),
3369 TREE_OPERAND (arg0, 1));
3370
3371 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3372 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3373 TREE_OPERAND (arg1, 0), 0)
3374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3375 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3376 {
3377 REAL_VALUE_TYPE r0, r1;
3378 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3379 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3380 if (!mul0)
3381 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3382 if (!mul1)
3383 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3384 real_arithmetic (&r0, code, &r0, &r1);
3385 return fold_build2_loc (loc, MULT_EXPR, type,
3386 TREE_OPERAND (arg0, 0),
3387 build_real (type, r0));
3388 }
3389
3390 return NULL_TREE;
3391 }
3392 \f
3393 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3394 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3395
3396 static tree
3397 make_bit_field_ref (location_t loc, tree inner, tree type,
3398 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3399 {
3400 tree result, bftype;
3401
3402 if (bitpos == 0)
3403 {
3404 tree size = TYPE_SIZE (TREE_TYPE (inner));
3405 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3406 || POINTER_TYPE_P (TREE_TYPE (inner)))
3407 && tree_fits_shwi_p (size)
3408 && tree_to_shwi (size) == bitsize)
3409 return fold_convert_loc (loc, type, inner);
3410 }
3411
3412 bftype = type;
3413 if (TYPE_PRECISION (bftype) != bitsize
3414 || TYPE_UNSIGNED (bftype) == !unsignedp)
3415 bftype = build_nonstandard_integer_type (bitsize, 0);
3416
3417 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3418 size_int (bitsize), bitsize_int (bitpos));
3419
3420 if (bftype != type)
3421 result = fold_convert_loc (loc, type, result);
3422
3423 return result;
3424 }
3425
3426 /* Optimize a bit-field compare.
3427
3428 There are two cases: First is a compare against a constant and the
3429 second is a comparison of two items where the fields are at the same
3430 bit position relative to the start of a chunk (byte, halfword, word)
3431 large enough to contain it. In these cases we can avoid the shift
3432 implicit in bitfield extractions.
3433
3434 For constants, we emit a compare of the shifted constant with the
3435 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3436 compared. For two fields at the same position, we do the ANDs with the
3437 similar mask and compare the result of the ANDs.
3438
3439 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3440 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3441 are the left and right operands of the comparison, respectively.
3442
3443 If the optimization described above can be done, we return the resulting
3444 tree. Otherwise we return zero. */
3445
3446 static tree
3447 optimize_bit_field_compare (location_t loc, enum tree_code code,
3448 tree compare_type, tree lhs, tree rhs)
3449 {
3450 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3451 tree type = TREE_TYPE (lhs);
3452 tree unsigned_type;
3453 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3454 machine_mode lmode, rmode, nmode;
3455 int lunsignedp, runsignedp;
3456 int lvolatilep = 0, rvolatilep = 0;
3457 tree linner, rinner = NULL_TREE;
3458 tree mask;
3459 tree offset;
3460
3461 /* Get all the information about the extractions being done. If the bit size
3462 if the same as the size of the underlying object, we aren't doing an
3463 extraction at all and so can do nothing. We also don't want to
3464 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3465 then will no longer be able to replace it. */
3466 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3467 &lunsignedp, &lvolatilep, false);
3468 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3469 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3470 return 0;
3471
3472 if (!const_p)
3473 {
3474 /* If this is not a constant, we can only do something if bit positions,
3475 sizes, and signedness are the same. */
3476 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3477 &runsignedp, &rvolatilep, false);
3478
3479 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3480 || lunsignedp != runsignedp || offset != 0
3481 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3482 return 0;
3483 }
3484
3485 /* See if we can find a mode to refer to this field. We should be able to,
3486 but fail if we can't. */
3487 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3488 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3489 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3490 TYPE_ALIGN (TREE_TYPE (rinner))),
3491 word_mode, false);
3492 if (nmode == VOIDmode)
3493 return 0;
3494
3495 /* Set signed and unsigned types of the precision of this mode for the
3496 shifts below. */
3497 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3498
3499 /* Compute the bit position and size for the new reference and our offset
3500 within it. If the new reference is the same size as the original, we
3501 won't optimize anything, so return zero. */
3502 nbitsize = GET_MODE_BITSIZE (nmode);
3503 nbitpos = lbitpos & ~ (nbitsize - 1);
3504 lbitpos -= nbitpos;
3505 if (nbitsize == lbitsize)
3506 return 0;
3507
3508 if (BYTES_BIG_ENDIAN)
3509 lbitpos = nbitsize - lbitsize - lbitpos;
3510
3511 /* Make the mask to be used against the extracted field. */
3512 mask = build_int_cst_type (unsigned_type, -1);
3513 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3514 mask = const_binop (RSHIFT_EXPR, mask,
3515 size_int (nbitsize - lbitsize - lbitpos));
3516
3517 if (! const_p)
3518 /* If not comparing with constant, just rework the comparison
3519 and return. */
3520 return fold_build2_loc (loc, code, compare_type,
3521 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3522 make_bit_field_ref (loc, linner,
3523 unsigned_type,
3524 nbitsize, nbitpos,
3525 1),
3526 mask),
3527 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3528 make_bit_field_ref (loc, rinner,
3529 unsigned_type,
3530 nbitsize, nbitpos,
3531 1),
3532 mask));
3533
3534 /* Otherwise, we are handling the constant case. See if the constant is too
3535 big for the field. Warn and return a tree of for 0 (false) if so. We do
3536 this not only for its own sake, but to avoid having to test for this
3537 error case below. If we didn't, we might generate wrong code.
3538
3539 For unsigned fields, the constant shifted right by the field length should
3540 be all zero. For signed fields, the high-order bits should agree with
3541 the sign bit. */
3542
3543 if (lunsignedp)
3544 {
3545 if (wi::lrshift (rhs, lbitsize) != 0)
3546 {
3547 warning (0, "comparison is always %d due to width of bit-field",
3548 code == NE_EXPR);
3549 return constant_boolean_node (code == NE_EXPR, compare_type);
3550 }
3551 }
3552 else
3553 {
3554 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3555 if (tem != 0 && tem != -1)
3556 {
3557 warning (0, "comparison is always %d due to width of bit-field",
3558 code == NE_EXPR);
3559 return constant_boolean_node (code == NE_EXPR, compare_type);
3560 }
3561 }
3562
3563 /* Single-bit compares should always be against zero. */
3564 if (lbitsize == 1 && ! integer_zerop (rhs))
3565 {
3566 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3567 rhs = build_int_cst (type, 0);
3568 }
3569
3570 /* Make a new bitfield reference, shift the constant over the
3571 appropriate number of bits and mask it with the computed mask
3572 (in case this was a signed field). If we changed it, make a new one. */
3573 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3574
3575 rhs = const_binop (BIT_AND_EXPR,
3576 const_binop (LSHIFT_EXPR,
3577 fold_convert_loc (loc, unsigned_type, rhs),
3578 size_int (lbitpos)),
3579 mask);
3580
3581 lhs = build2_loc (loc, code, compare_type,
3582 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3583 return lhs;
3584 }
3585 \f
3586 /* Subroutine for fold_truth_andor_1: decode a field reference.
3587
3588 If EXP is a comparison reference, we return the innermost reference.
3589
3590 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3591 set to the starting bit number.
3592
3593 If the innermost field can be completely contained in a mode-sized
3594 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3595
3596 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3597 otherwise it is not changed.
3598
3599 *PUNSIGNEDP is set to the signedness of the field.
3600
3601 *PMASK is set to the mask used. This is either contained in a
3602 BIT_AND_EXPR or derived from the width of the field.
3603
3604 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3605
3606 Return 0 if this is not a component reference or is one that we can't
3607 do anything with. */
3608
3609 static tree
3610 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3611 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3612 int *punsignedp, int *pvolatilep,
3613 tree *pmask, tree *pand_mask)
3614 {
3615 tree outer_type = 0;
3616 tree and_mask = 0;
3617 tree mask, inner, offset;
3618 tree unsigned_type;
3619 unsigned int precision;
3620
3621 /* All the optimizations using this function assume integer fields.
3622 There are problems with FP fields since the type_for_size call
3623 below can fail for, e.g., XFmode. */
3624 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3625 return 0;
3626
3627 /* We are interested in the bare arrangement of bits, so strip everything
3628 that doesn't affect the machine mode. However, record the type of the
3629 outermost expression if it may matter below. */
3630 if (CONVERT_EXPR_P (exp)
3631 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3632 outer_type = TREE_TYPE (exp);
3633 STRIP_NOPS (exp);
3634
3635 if (TREE_CODE (exp) == BIT_AND_EXPR)
3636 {
3637 and_mask = TREE_OPERAND (exp, 1);
3638 exp = TREE_OPERAND (exp, 0);
3639 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3640 if (TREE_CODE (and_mask) != INTEGER_CST)
3641 return 0;
3642 }
3643
3644 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3645 punsignedp, pvolatilep, false);
3646 if ((inner == exp && and_mask == 0)
3647 || *pbitsize < 0 || offset != 0
3648 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3649 return 0;
3650
3651 /* If the number of bits in the reference is the same as the bitsize of
3652 the outer type, then the outer type gives the signedness. Otherwise
3653 (in case of a small bitfield) the signedness is unchanged. */
3654 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3655 *punsignedp = TYPE_UNSIGNED (outer_type);
3656
3657 /* Compute the mask to access the bitfield. */
3658 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3659 precision = TYPE_PRECISION (unsigned_type);
3660
3661 mask = build_int_cst_type (unsigned_type, -1);
3662
3663 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3664 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3665
3666 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3667 if (and_mask != 0)
3668 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3669 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3670
3671 *pmask = mask;
3672 *pand_mask = and_mask;
3673 return inner;
3674 }
3675
3676 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3677 bit positions and MASK is SIGNED. */
3678
3679 static int
3680 all_ones_mask_p (const_tree mask, unsigned int size)
3681 {
3682 tree type = TREE_TYPE (mask);
3683 unsigned int precision = TYPE_PRECISION (type);
3684
3685 /* If this function returns true when the type of the mask is
3686 UNSIGNED, then there will be errors. In particular see
3687 gcc.c-torture/execute/990326-1.c. There does not appear to be
3688 any documentation paper trail as to why this is so. But the pre
3689 wide-int worked with that restriction and it has been preserved
3690 here. */
3691 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3692 return false;
3693
3694 return wi::mask (size, false, precision) == mask;
3695 }
3696
3697 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3698 represents the sign bit of EXP's type. If EXP represents a sign
3699 or zero extension, also test VAL against the unextended type.
3700 The return value is the (sub)expression whose sign bit is VAL,
3701 or NULL_TREE otherwise. */
3702
3703 static tree
3704 sign_bit_p (tree exp, const_tree val)
3705 {
3706 int width;
3707 tree t;
3708
3709 /* Tree EXP must have an integral type. */
3710 t = TREE_TYPE (exp);
3711 if (! INTEGRAL_TYPE_P (t))
3712 return NULL_TREE;
3713
3714 /* Tree VAL must be an integer constant. */
3715 if (TREE_CODE (val) != INTEGER_CST
3716 || TREE_OVERFLOW (val))
3717 return NULL_TREE;
3718
3719 width = TYPE_PRECISION (t);
3720 if (wi::only_sign_bit_p (val, width))
3721 return exp;
3722
3723 /* Handle extension from a narrower type. */
3724 if (TREE_CODE (exp) == NOP_EXPR
3725 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3726 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3727
3728 return NULL_TREE;
3729 }
3730
3731 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3732 to be evaluated unconditionally. */
3733
3734 static int
3735 simple_operand_p (const_tree exp)
3736 {
3737 /* Strip any conversions that don't change the machine mode. */
3738 STRIP_NOPS (exp);
3739
3740 return (CONSTANT_CLASS_P (exp)
3741 || TREE_CODE (exp) == SSA_NAME
3742 || (DECL_P (exp)
3743 && ! TREE_ADDRESSABLE (exp)
3744 && ! TREE_THIS_VOLATILE (exp)
3745 && ! DECL_NONLOCAL (exp)
3746 /* Don't regard global variables as simple. They may be
3747 allocated in ways unknown to the compiler (shared memory,
3748 #pragma weak, etc). */
3749 && ! TREE_PUBLIC (exp)
3750 && ! DECL_EXTERNAL (exp)
3751 /* Weakrefs are not safe to be read, since they can be NULL.
3752 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3753 have DECL_WEAK flag set. */
3754 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3755 /* Loading a static variable is unduly expensive, but global
3756 registers aren't expensive. */
3757 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3758 }
3759
3760 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3761 to be evaluated unconditionally.
3762 I addition to simple_operand_p, we assume that comparisons, conversions,
3763 and logic-not operations are simple, if their operands are simple, too. */
3764
3765 static bool
3766 simple_operand_p_2 (tree exp)
3767 {
3768 enum tree_code code;
3769
3770 if (TREE_SIDE_EFFECTS (exp)
3771 || tree_could_trap_p (exp))
3772 return false;
3773
3774 while (CONVERT_EXPR_P (exp))
3775 exp = TREE_OPERAND (exp, 0);
3776
3777 code = TREE_CODE (exp);
3778
3779 if (TREE_CODE_CLASS (code) == tcc_comparison)
3780 return (simple_operand_p (TREE_OPERAND (exp, 0))
3781 && simple_operand_p (TREE_OPERAND (exp, 1)));
3782
3783 if (code == TRUTH_NOT_EXPR)
3784 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3785
3786 return simple_operand_p (exp);
3787 }
3788
3789 \f
3790 /* The following functions are subroutines to fold_range_test and allow it to
3791 try to change a logical combination of comparisons into a range test.
3792
3793 For example, both
3794 X == 2 || X == 3 || X == 4 || X == 5
3795 and
3796 X >= 2 && X <= 5
3797 are converted to
3798 (unsigned) (X - 2) <= 3
3799
3800 We describe each set of comparisons as being either inside or outside
3801 a range, using a variable named like IN_P, and then describe the
3802 range with a lower and upper bound. If one of the bounds is omitted,
3803 it represents either the highest or lowest value of the type.
3804
3805 In the comments below, we represent a range by two numbers in brackets
3806 preceded by a "+" to designate being inside that range, or a "-" to
3807 designate being outside that range, so the condition can be inverted by
3808 flipping the prefix. An omitted bound is represented by a "-". For
3809 example, "- [-, 10]" means being outside the range starting at the lowest
3810 possible value and ending at 10, in other words, being greater than 10.
3811 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3812 always false.
3813
3814 We set up things so that the missing bounds are handled in a consistent
3815 manner so neither a missing bound nor "true" and "false" need to be
3816 handled using a special case. */
3817
3818 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3819 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3820 and UPPER1_P are nonzero if the respective argument is an upper bound
3821 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3822 must be specified for a comparison. ARG1 will be converted to ARG0's
3823 type if both are specified. */
3824
3825 static tree
3826 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3827 tree arg1, int upper1_p)
3828 {
3829 tree tem;
3830 int result;
3831 int sgn0, sgn1;
3832
3833 /* If neither arg represents infinity, do the normal operation.
3834 Else, if not a comparison, return infinity. Else handle the special
3835 comparison rules. Note that most of the cases below won't occur, but
3836 are handled for consistency. */
3837
3838 if (arg0 != 0 && arg1 != 0)
3839 {
3840 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3841 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3842 STRIP_NOPS (tem);
3843 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3844 }
3845
3846 if (TREE_CODE_CLASS (code) != tcc_comparison)
3847 return 0;
3848
3849 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3850 for neither. In real maths, we cannot assume open ended ranges are
3851 the same. But, this is computer arithmetic, where numbers are finite.
3852 We can therefore make the transformation of any unbounded range with
3853 the value Z, Z being greater than any representable number. This permits
3854 us to treat unbounded ranges as equal. */
3855 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3856 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3857 switch (code)
3858 {
3859 case EQ_EXPR:
3860 result = sgn0 == sgn1;
3861 break;
3862 case NE_EXPR:
3863 result = sgn0 != sgn1;
3864 break;
3865 case LT_EXPR:
3866 result = sgn0 < sgn1;
3867 break;
3868 case LE_EXPR:
3869 result = sgn0 <= sgn1;
3870 break;
3871 case GT_EXPR:
3872 result = sgn0 > sgn1;
3873 break;
3874 case GE_EXPR:
3875 result = sgn0 >= sgn1;
3876 break;
3877 default:
3878 gcc_unreachable ();
3879 }
3880
3881 return constant_boolean_node (result, type);
3882 }
3883 \f
3884 /* Helper routine for make_range. Perform one step for it, return
3885 new expression if the loop should continue or NULL_TREE if it should
3886 stop. */
3887
3888 tree
3889 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3890 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3891 bool *strict_overflow_p)
3892 {
3893 tree arg0_type = TREE_TYPE (arg0);
3894 tree n_low, n_high, low = *p_low, high = *p_high;
3895 int in_p = *p_in_p, n_in_p;
3896
3897 switch (code)
3898 {
3899 case TRUTH_NOT_EXPR:
3900 /* We can only do something if the range is testing for zero. */
3901 if (low == NULL_TREE || high == NULL_TREE
3902 || ! integer_zerop (low) || ! integer_zerop (high))
3903 return NULL_TREE;
3904 *p_in_p = ! in_p;
3905 return arg0;
3906
3907 case EQ_EXPR: case NE_EXPR:
3908 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3909 /* We can only do something if the range is testing for zero
3910 and if the second operand is an integer constant. Note that
3911 saying something is "in" the range we make is done by
3912 complementing IN_P since it will set in the initial case of
3913 being not equal to zero; "out" is leaving it alone. */
3914 if (low == NULL_TREE || high == NULL_TREE
3915 || ! integer_zerop (low) || ! integer_zerop (high)
3916 || TREE_CODE (arg1) != INTEGER_CST)
3917 return NULL_TREE;
3918
3919 switch (code)
3920 {
3921 case NE_EXPR: /* - [c, c] */
3922 low = high = arg1;
3923 break;
3924 case EQ_EXPR: /* + [c, c] */
3925 in_p = ! in_p, low = high = arg1;
3926 break;
3927 case GT_EXPR: /* - [-, c] */
3928 low = 0, high = arg1;
3929 break;
3930 case GE_EXPR: /* + [c, -] */
3931 in_p = ! in_p, low = arg1, high = 0;
3932 break;
3933 case LT_EXPR: /* - [c, -] */
3934 low = arg1, high = 0;
3935 break;
3936 case LE_EXPR: /* + [-, c] */
3937 in_p = ! in_p, low = 0, high = arg1;
3938 break;
3939 default:
3940 gcc_unreachable ();
3941 }
3942
3943 /* If this is an unsigned comparison, we also know that EXP is
3944 greater than or equal to zero. We base the range tests we make
3945 on that fact, so we record it here so we can parse existing
3946 range tests. We test arg0_type since often the return type
3947 of, e.g. EQ_EXPR, is boolean. */
3948 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3949 {
3950 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3951 in_p, low, high, 1,
3952 build_int_cst (arg0_type, 0),
3953 NULL_TREE))
3954 return NULL_TREE;
3955
3956 in_p = n_in_p, low = n_low, high = n_high;
3957
3958 /* If the high bound is missing, but we have a nonzero low
3959 bound, reverse the range so it goes from zero to the low bound
3960 minus 1. */
3961 if (high == 0 && low && ! integer_zerop (low))
3962 {
3963 in_p = ! in_p;
3964 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3965 build_int_cst (TREE_TYPE (low), 1), 0);
3966 low = build_int_cst (arg0_type, 0);
3967 }
3968 }
3969
3970 *p_low = low;
3971 *p_high = high;
3972 *p_in_p = in_p;
3973 return arg0;
3974
3975 case NEGATE_EXPR:
3976 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3977 low and high are non-NULL, then normalize will DTRT. */
3978 if (!TYPE_UNSIGNED (arg0_type)
3979 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3980 {
3981 if (low == NULL_TREE)
3982 low = TYPE_MIN_VALUE (arg0_type);
3983 if (high == NULL_TREE)
3984 high = TYPE_MAX_VALUE (arg0_type);
3985 }
3986
3987 /* (-x) IN [a,b] -> x in [-b, -a] */
3988 n_low = range_binop (MINUS_EXPR, exp_type,
3989 build_int_cst (exp_type, 0),
3990 0, high, 1);
3991 n_high = range_binop (MINUS_EXPR, exp_type,
3992 build_int_cst (exp_type, 0),
3993 0, low, 0);
3994 if (n_high != 0 && TREE_OVERFLOW (n_high))
3995 return NULL_TREE;
3996 goto normalize;
3997
3998 case BIT_NOT_EXPR:
3999 /* ~ X -> -X - 1 */
4000 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4001 build_int_cst (exp_type, 1));
4002
4003 case PLUS_EXPR:
4004 case MINUS_EXPR:
4005 if (TREE_CODE (arg1) != INTEGER_CST)
4006 return NULL_TREE;
4007
4008 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4009 move a constant to the other side. */
4010 if (!TYPE_UNSIGNED (arg0_type)
4011 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4012 return NULL_TREE;
4013
4014 /* If EXP is signed, any overflow in the computation is undefined,
4015 so we don't worry about it so long as our computations on
4016 the bounds don't overflow. For unsigned, overflow is defined
4017 and this is exactly the right thing. */
4018 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4019 arg0_type, low, 0, arg1, 0);
4020 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4021 arg0_type, high, 1, arg1, 0);
4022 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4023 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4024 return NULL_TREE;
4025
4026 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4027 *strict_overflow_p = true;
4028
4029 normalize:
4030 /* Check for an unsigned range which has wrapped around the maximum
4031 value thus making n_high < n_low, and normalize it. */
4032 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4033 {
4034 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4035 build_int_cst (TREE_TYPE (n_high), 1), 0);
4036 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4037 build_int_cst (TREE_TYPE (n_low), 1), 0);
4038
4039 /* If the range is of the form +/- [ x+1, x ], we won't
4040 be able to normalize it. But then, it represents the
4041 whole range or the empty set, so make it
4042 +/- [ -, - ]. */
4043 if (tree_int_cst_equal (n_low, low)
4044 && tree_int_cst_equal (n_high, high))
4045 low = high = 0;
4046 else
4047 in_p = ! in_p;
4048 }
4049 else
4050 low = n_low, high = n_high;
4051
4052 *p_low = low;
4053 *p_high = high;
4054 *p_in_p = in_p;
4055 return arg0;
4056
4057 CASE_CONVERT:
4058 case NON_LVALUE_EXPR:
4059 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4060 return NULL_TREE;
4061
4062 if (! INTEGRAL_TYPE_P (arg0_type)
4063 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4064 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4065 return NULL_TREE;
4066
4067 n_low = low, n_high = high;
4068
4069 if (n_low != 0)
4070 n_low = fold_convert_loc (loc, arg0_type, n_low);
4071
4072 if (n_high != 0)
4073 n_high = fold_convert_loc (loc, arg0_type, n_high);
4074
4075 /* If we're converting arg0 from an unsigned type, to exp,
4076 a signed type, we will be doing the comparison as unsigned.
4077 The tests above have already verified that LOW and HIGH
4078 are both positive.
4079
4080 So we have to ensure that we will handle large unsigned
4081 values the same way that the current signed bounds treat
4082 negative values. */
4083
4084 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4085 {
4086 tree high_positive;
4087 tree equiv_type;
4088 /* For fixed-point modes, we need to pass the saturating flag
4089 as the 2nd parameter. */
4090 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4091 equiv_type
4092 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4093 TYPE_SATURATING (arg0_type));
4094 else
4095 equiv_type
4096 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4097
4098 /* A range without an upper bound is, naturally, unbounded.
4099 Since convert would have cropped a very large value, use
4100 the max value for the destination type. */
4101 high_positive
4102 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4103 : TYPE_MAX_VALUE (arg0_type);
4104
4105 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4106 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4107 fold_convert_loc (loc, arg0_type,
4108 high_positive),
4109 build_int_cst (arg0_type, 1));
4110
4111 /* If the low bound is specified, "and" the range with the
4112 range for which the original unsigned value will be
4113 positive. */
4114 if (low != 0)
4115 {
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4117 1, fold_convert_loc (loc, arg0_type,
4118 integer_zero_node),
4119 high_positive))
4120 return NULL_TREE;
4121
4122 in_p = (n_in_p == in_p);
4123 }
4124 else
4125 {
4126 /* Otherwise, "or" the range with the range of the input
4127 that will be interpreted as negative. */
4128 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4129 1, fold_convert_loc (loc, arg0_type,
4130 integer_zero_node),
4131 high_positive))
4132 return NULL_TREE;
4133
4134 in_p = (in_p != n_in_p);
4135 }
4136 }
4137
4138 *p_low = n_low;
4139 *p_high = n_high;
4140 *p_in_p = in_p;
4141 return arg0;
4142
4143 default:
4144 return NULL_TREE;
4145 }
4146 }
4147
4148 /* Given EXP, a logical expression, set the range it is testing into
4149 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4150 actually being tested. *PLOW and *PHIGH will be made of the same
4151 type as the returned expression. If EXP is not a comparison, we
4152 will most likely not be returning a useful value and range. Set
4153 *STRICT_OVERFLOW_P to true if the return value is only valid
4154 because signed overflow is undefined; otherwise, do not change
4155 *STRICT_OVERFLOW_P. */
4156
4157 tree
4158 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4159 bool *strict_overflow_p)
4160 {
4161 enum tree_code code;
4162 tree arg0, arg1 = NULL_TREE;
4163 tree exp_type, nexp;
4164 int in_p;
4165 tree low, high;
4166 location_t loc = EXPR_LOCATION (exp);
4167
4168 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4169 and see if we can refine the range. Some of the cases below may not
4170 happen, but it doesn't seem worth worrying about this. We "continue"
4171 the outer loop when we've changed something; otherwise we "break"
4172 the switch, which will "break" the while. */
4173
4174 in_p = 0;
4175 low = high = build_int_cst (TREE_TYPE (exp), 0);
4176
4177 while (1)
4178 {
4179 code = TREE_CODE (exp);
4180 exp_type = TREE_TYPE (exp);
4181 arg0 = NULL_TREE;
4182
4183 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4184 {
4185 if (TREE_OPERAND_LENGTH (exp) > 0)
4186 arg0 = TREE_OPERAND (exp, 0);
4187 if (TREE_CODE_CLASS (code) == tcc_binary
4188 || TREE_CODE_CLASS (code) == tcc_comparison
4189 || (TREE_CODE_CLASS (code) == tcc_expression
4190 && TREE_OPERAND_LENGTH (exp) > 1))
4191 arg1 = TREE_OPERAND (exp, 1);
4192 }
4193 if (arg0 == NULL_TREE)
4194 break;
4195
4196 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4197 &high, &in_p, strict_overflow_p);
4198 if (nexp == NULL_TREE)
4199 break;
4200 exp = nexp;
4201 }
4202
4203 /* If EXP is a constant, we can evaluate whether this is true or false. */
4204 if (TREE_CODE (exp) == INTEGER_CST)
4205 {
4206 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4207 exp, 0, low, 0))
4208 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4209 exp, 1, high, 1)));
4210 low = high = 0;
4211 exp = 0;
4212 }
4213
4214 *pin_p = in_p, *plow = low, *phigh = high;
4215 return exp;
4216 }
4217 \f
4218 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4219 type, TYPE, return an expression to test if EXP is in (or out of, depending
4220 on IN_P) the range. Return 0 if the test couldn't be created. */
4221
4222 tree
4223 build_range_check (location_t loc, tree type, tree exp, int in_p,
4224 tree low, tree high)
4225 {
4226 tree etype = TREE_TYPE (exp), value;
4227
4228 #ifdef HAVE_canonicalize_funcptr_for_compare
4229 /* Disable this optimization for function pointer expressions
4230 on targets that require function pointer canonicalization. */
4231 if (HAVE_canonicalize_funcptr_for_compare
4232 && TREE_CODE (etype) == POINTER_TYPE
4233 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4234 return NULL_TREE;
4235 #endif
4236
4237 if (! in_p)
4238 {
4239 value = build_range_check (loc, type, exp, 1, low, high);
4240 if (value != 0)
4241 return invert_truthvalue_loc (loc, value);
4242
4243 return 0;
4244 }
4245
4246 if (low == 0 && high == 0)
4247 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4248
4249 if (low == 0)
4250 return fold_build2_loc (loc, LE_EXPR, type, exp,
4251 fold_convert_loc (loc, etype, high));
4252
4253 if (high == 0)
4254 return fold_build2_loc (loc, GE_EXPR, type, exp,
4255 fold_convert_loc (loc, etype, low));
4256
4257 if (operand_equal_p (low, high, 0))
4258 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4259 fold_convert_loc (loc, etype, low));
4260
4261 if (integer_zerop (low))
4262 {
4263 if (! TYPE_UNSIGNED (etype))
4264 {
4265 etype = unsigned_type_for (etype);
4266 high = fold_convert_loc (loc, etype, high);
4267 exp = fold_convert_loc (loc, etype, exp);
4268 }
4269 return build_range_check (loc, type, exp, 1, 0, high);
4270 }
4271
4272 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4273 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4274 {
4275 int prec = TYPE_PRECISION (etype);
4276
4277 if (wi::mask (prec - 1, false, prec) == high)
4278 {
4279 if (TYPE_UNSIGNED (etype))
4280 {
4281 tree signed_etype = signed_type_for (etype);
4282 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4283 etype
4284 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4285 else
4286 etype = signed_etype;
4287 exp = fold_convert_loc (loc, etype, exp);
4288 }
4289 return fold_build2_loc (loc, GT_EXPR, type, exp,
4290 build_int_cst (etype, 0));
4291 }
4292 }
4293
4294 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295 This requires wrap-around arithmetics for the type of the expression.
4296 First make sure that arithmetics in this type is valid, then make sure
4297 that it wraps around. */
4298 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4299 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4300 TYPE_UNSIGNED (etype));
4301
4302 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4303 {
4304 tree utype, minv, maxv;
4305
4306 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4307 for the type in question, as we rely on this here. */
4308 utype = unsigned_type_for (etype);
4309 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4310 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4311 build_int_cst (TREE_TYPE (maxv), 1), 1);
4312 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4313
4314 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4315 minv, 1, maxv, 1)))
4316 etype = utype;
4317 else
4318 return 0;
4319 }
4320
4321 high = fold_convert_loc (loc, etype, high);
4322 low = fold_convert_loc (loc, etype, low);
4323 exp = fold_convert_loc (loc, etype, exp);
4324
4325 value = const_binop (MINUS_EXPR, high, low);
4326
4327
4328 if (POINTER_TYPE_P (etype))
4329 {
4330 if (value != 0 && !TREE_OVERFLOW (value))
4331 {
4332 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4333 return build_range_check (loc, type,
4334 fold_build_pointer_plus_loc (loc, exp, low),
4335 1, build_int_cst (etype, 0), value);
4336 }
4337 return 0;
4338 }
4339
4340 if (value != 0 && !TREE_OVERFLOW (value))
4341 return build_range_check (loc, type,
4342 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4343 1, build_int_cst (etype, 0), value);
4344
4345 return 0;
4346 }
4347 \f
4348 /* Return the predecessor of VAL in its type, handling the infinite case. */
4349
4350 static tree
4351 range_predecessor (tree val)
4352 {
4353 tree type = TREE_TYPE (val);
4354
4355 if (INTEGRAL_TYPE_P (type)
4356 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4357 return 0;
4358 else
4359 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4360 build_int_cst (TREE_TYPE (val), 1), 0);
4361 }
4362
4363 /* Return the successor of VAL in its type, handling the infinite case. */
4364
4365 static tree
4366 range_successor (tree val)
4367 {
4368 tree type = TREE_TYPE (val);
4369
4370 if (INTEGRAL_TYPE_P (type)
4371 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4372 return 0;
4373 else
4374 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4375 build_int_cst (TREE_TYPE (val), 1), 0);
4376 }
4377
4378 /* Given two ranges, see if we can merge them into one. Return 1 if we
4379 can, 0 if we can't. Set the output range into the specified parameters. */
4380
4381 bool
4382 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4383 tree high0, int in1_p, tree low1, tree high1)
4384 {
4385 int no_overlap;
4386 int subset;
4387 int temp;
4388 tree tem;
4389 int in_p;
4390 tree low, high;
4391 int lowequal = ((low0 == 0 && low1 == 0)
4392 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 low0, 0, low1, 0)));
4394 int highequal = ((high0 == 0 && high1 == 0)
4395 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4396 high0, 1, high1, 1)));
4397
4398 /* Make range 0 be the range that starts first, or ends last if they
4399 start at the same value. Swap them if it isn't. */
4400 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4401 low0, 0, low1, 0))
4402 || (lowequal
4403 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4404 high1, 1, high0, 1))))
4405 {
4406 temp = in0_p, in0_p = in1_p, in1_p = temp;
4407 tem = low0, low0 = low1, low1 = tem;
4408 tem = high0, high0 = high1, high1 = tem;
4409 }
4410
4411 /* Now flag two cases, whether the ranges are disjoint or whether the
4412 second range is totally subsumed in the first. Note that the tests
4413 below are simplified by the ones above. */
4414 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4415 high0, 1, low1, 0));
4416 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4417 high1, 1, high0, 1));
4418
4419 /* We now have four cases, depending on whether we are including or
4420 excluding the two ranges. */
4421 if (in0_p && in1_p)
4422 {
4423 /* If they don't overlap, the result is false. If the second range
4424 is a subset it is the result. Otherwise, the range is from the start
4425 of the second to the end of the first. */
4426 if (no_overlap)
4427 in_p = 0, low = high = 0;
4428 else if (subset)
4429 in_p = 1, low = low1, high = high1;
4430 else
4431 in_p = 1, low = low1, high = high0;
4432 }
4433
4434 else if (in0_p && ! in1_p)
4435 {
4436 /* If they don't overlap, the result is the first range. If they are
4437 equal, the result is false. If the second range is a subset of the
4438 first, and the ranges begin at the same place, we go from just after
4439 the end of the second range to the end of the first. If the second
4440 range is not a subset of the first, or if it is a subset and both
4441 ranges end at the same place, the range starts at the start of the
4442 first range and ends just before the second range.
4443 Otherwise, we can't describe this as a single range. */
4444 if (no_overlap)
4445 in_p = 1, low = low0, high = high0;
4446 else if (lowequal && highequal)
4447 in_p = 0, low = high = 0;
4448 else if (subset && lowequal)
4449 {
4450 low = range_successor (high1);
4451 high = high0;
4452 in_p = 1;
4453 if (low == 0)
4454 {
4455 /* We are in the weird situation where high0 > high1 but
4456 high1 has no successor. Punt. */
4457 return 0;
4458 }
4459 }
4460 else if (! subset || highequal)
4461 {
4462 low = low0;
4463 high = range_predecessor (low1);
4464 in_p = 1;
4465 if (high == 0)
4466 {
4467 /* low0 < low1 but low1 has no predecessor. Punt. */
4468 return 0;
4469 }
4470 }
4471 else
4472 return 0;
4473 }
4474
4475 else if (! in0_p && in1_p)
4476 {
4477 /* If they don't overlap, the result is the second range. If the second
4478 is a subset of the first, the result is false. Otherwise,
4479 the range starts just after the first range and ends at the
4480 end of the second. */
4481 if (no_overlap)
4482 in_p = 1, low = low1, high = high1;
4483 else if (subset || highequal)
4484 in_p = 0, low = high = 0;
4485 else
4486 {
4487 low = range_successor (high0);
4488 high = high1;
4489 in_p = 1;
4490 if (low == 0)
4491 {
4492 /* high1 > high0 but high0 has no successor. Punt. */
4493 return 0;
4494 }
4495 }
4496 }
4497
4498 else
4499 {
4500 /* The case where we are excluding both ranges. Here the complex case
4501 is if they don't overlap. In that case, the only time we have a
4502 range is if they are adjacent. If the second is a subset of the
4503 first, the result is the first. Otherwise, the range to exclude
4504 starts at the beginning of the first range and ends at the end of the
4505 second. */
4506 if (no_overlap)
4507 {
4508 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4509 range_successor (high0),
4510 1, low1, 0)))
4511 in_p = 0, low = low0, high = high1;
4512 else
4513 {
4514 /* Canonicalize - [min, x] into - [-, x]. */
4515 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4516 switch (TREE_CODE (TREE_TYPE (low0)))
4517 {
4518 case ENUMERAL_TYPE:
4519 if (TYPE_PRECISION (TREE_TYPE (low0))
4520 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4521 break;
4522 /* FALLTHROUGH */
4523 case INTEGER_TYPE:
4524 if (tree_int_cst_equal (low0,
4525 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4526 low0 = 0;
4527 break;
4528 case POINTER_TYPE:
4529 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4530 && integer_zerop (low0))
4531 low0 = 0;
4532 break;
4533 default:
4534 break;
4535 }
4536
4537 /* Canonicalize - [x, max] into - [x, -]. */
4538 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4539 switch (TREE_CODE (TREE_TYPE (high1)))
4540 {
4541 case ENUMERAL_TYPE:
4542 if (TYPE_PRECISION (TREE_TYPE (high1))
4543 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4544 break;
4545 /* FALLTHROUGH */
4546 case INTEGER_TYPE:
4547 if (tree_int_cst_equal (high1,
4548 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4549 high1 = 0;
4550 break;
4551 case POINTER_TYPE:
4552 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4553 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4554 high1, 1,
4555 build_int_cst (TREE_TYPE (high1), 1),
4556 1)))
4557 high1 = 0;
4558 break;
4559 default:
4560 break;
4561 }
4562
4563 /* The ranges might be also adjacent between the maximum and
4564 minimum values of the given type. For
4565 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566 return + [x + 1, y - 1]. */
4567 if (low0 == 0 && high1 == 0)
4568 {
4569 low = range_successor (high0);
4570 high = range_predecessor (low1);
4571 if (low == 0 || high == 0)
4572 return 0;
4573
4574 in_p = 1;
4575 }
4576 else
4577 return 0;
4578 }
4579 }
4580 else if (subset)
4581 in_p = 0, low = low0, high = high0;
4582 else
4583 in_p = 0, low = low0, high = high1;
4584 }
4585
4586 *pin_p = in_p, *plow = low, *phigh = high;
4587 return 1;
4588 }
4589 \f
4590
4591 /* Subroutine of fold, looking inside expressions of the form
4592 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593 of the COND_EXPR. This function is being used also to optimize
4594 A op B ? C : A, by reversing the comparison first.
4595
4596 Return a folded expression whose code is not a COND_EXPR
4597 anymore, or NULL_TREE if no folding opportunity is found. */
4598
4599 static tree
4600 fold_cond_expr_with_comparison (location_t loc, tree type,
4601 tree arg0, tree arg1, tree arg2)
4602 {
4603 enum tree_code comp_code = TREE_CODE (arg0);
4604 tree arg00 = TREE_OPERAND (arg0, 0);
4605 tree arg01 = TREE_OPERAND (arg0, 1);
4606 tree arg1_type = TREE_TYPE (arg1);
4607 tree tem;
4608
4609 STRIP_NOPS (arg1);
4610 STRIP_NOPS (arg2);
4611
4612 /* If we have A op 0 ? A : -A, consider applying the following
4613 transformations:
4614
4615 A == 0? A : -A same as -A
4616 A != 0? A : -A same as A
4617 A >= 0? A : -A same as abs (A)
4618 A > 0? A : -A same as abs (A)
4619 A <= 0? A : -A same as -abs (A)
4620 A < 0? A : -A same as -abs (A)
4621
4622 None of these transformations work for modes with signed
4623 zeros. If A is +/-0, the first two transformations will
4624 change the sign of the result (from +0 to -0, or vice
4625 versa). The last four will fix the sign of the result,
4626 even though the original expressions could be positive or
4627 negative, depending on the sign of A.
4628
4629 Note that all these transformations are correct if A is
4630 NaN, since the two alternatives (A and -A) are also NaNs. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4632 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4633 ? real_zerop (arg01)
4634 : integer_zerop (arg01))
4635 && ((TREE_CODE (arg2) == NEGATE_EXPR
4636 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4637 /* In the case that A is of the form X-Y, '-A' (arg2) may
4638 have already been folded to Y-X, check for that. */
4639 || (TREE_CODE (arg1) == MINUS_EXPR
4640 && TREE_CODE (arg2) == MINUS_EXPR
4641 && operand_equal_p (TREE_OPERAND (arg1, 0),
4642 TREE_OPERAND (arg2, 1), 0)
4643 && operand_equal_p (TREE_OPERAND (arg1, 1),
4644 TREE_OPERAND (arg2, 0), 0))))
4645 switch (comp_code)
4646 {
4647 case EQ_EXPR:
4648 case UNEQ_EXPR:
4649 tem = fold_convert_loc (loc, arg1_type, arg1);
4650 return pedantic_non_lvalue_loc (loc,
4651 fold_convert_loc (loc, type,
4652 negate_expr (tem)));
4653 case NE_EXPR:
4654 case LTGT_EXPR:
4655 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4656 case UNGE_EXPR:
4657 case UNGT_EXPR:
4658 if (flag_trapping_math)
4659 break;
4660 /* Fall through. */
4661 case GE_EXPR:
4662 case GT_EXPR:
4663 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4664 arg1 = fold_convert_loc (loc, signed_type_for
4665 (TREE_TYPE (arg1)), arg1);
4666 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4667 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4668 case UNLE_EXPR:
4669 case UNLT_EXPR:
4670 if (flag_trapping_math)
4671 break;
4672 case LE_EXPR:
4673 case LT_EXPR:
4674 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4675 arg1 = fold_convert_loc (loc, signed_type_for
4676 (TREE_TYPE (arg1)), arg1);
4677 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4678 return negate_expr (fold_convert_loc (loc, type, tem));
4679 default:
4680 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4681 break;
4682 }
4683
4684 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4685 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4686 both transformations are correct when A is NaN: A != 0
4687 is then true, and A == 0 is false. */
4688
4689 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4690 && integer_zerop (arg01) && integer_zerop (arg2))
4691 {
4692 if (comp_code == NE_EXPR)
4693 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4694 else if (comp_code == EQ_EXPR)
4695 return build_zero_cst (type);
4696 }
4697
4698 /* Try some transformations of A op B ? A : B.
4699
4700 A == B? A : B same as B
4701 A != B? A : B same as A
4702 A >= B? A : B same as max (A, B)
4703 A > B? A : B same as max (B, A)
4704 A <= B? A : B same as min (A, B)
4705 A < B? A : B same as min (B, A)
4706
4707 As above, these transformations don't work in the presence
4708 of signed zeros. For example, if A and B are zeros of
4709 opposite sign, the first two transformations will change
4710 the sign of the result. In the last four, the original
4711 expressions give different results for (A=+0, B=-0) and
4712 (A=-0, B=+0), but the transformed expressions do not.
4713
4714 The first two transformations are correct if either A or B
4715 is a NaN. In the first transformation, the condition will
4716 be false, and B will indeed be chosen. In the case of the
4717 second transformation, the condition A != B will be true,
4718 and A will be chosen.
4719
4720 The conversions to max() and min() are not correct if B is
4721 a number and A is not. The conditions in the original
4722 expressions will be false, so all four give B. The min()
4723 and max() versions would give a NaN instead. */
4724 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4725 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4726 /* Avoid these transformations if the COND_EXPR may be used
4727 as an lvalue in the C++ front-end. PR c++/19199. */
4728 && (in_gimple_form
4729 || VECTOR_TYPE_P (type)
4730 || (strcmp (lang_hooks.name, "GNU C++") != 0
4731 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4732 || ! maybe_lvalue_p (arg1)
4733 || ! maybe_lvalue_p (arg2)))
4734 {
4735 tree comp_op0 = arg00;
4736 tree comp_op1 = arg01;
4737 tree comp_type = TREE_TYPE (comp_op0);
4738
4739 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4740 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4741 {
4742 comp_type = type;
4743 comp_op0 = arg1;
4744 comp_op1 = arg2;
4745 }
4746
4747 switch (comp_code)
4748 {
4749 case EQ_EXPR:
4750 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4751 case NE_EXPR:
4752 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4753 case LE_EXPR:
4754 case LT_EXPR:
4755 case UNLE_EXPR:
4756 case UNLT_EXPR:
4757 /* In C++ a ?: expression can be an lvalue, so put the
4758 operand which will be used if they are equal first
4759 so that we can convert this back to the
4760 corresponding COND_EXPR. */
4761 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4762 {
4763 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4764 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4765 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4766 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4767 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4768 comp_op1, comp_op0);
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, tem));
4771 }
4772 break;
4773 case GE_EXPR:
4774 case GT_EXPR:
4775 case UNGE_EXPR:
4776 case UNGT_EXPR:
4777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4778 {
4779 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4780 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4781 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4782 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4783 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4784 comp_op1, comp_op0);
4785 return pedantic_non_lvalue_loc (loc,
4786 fold_convert_loc (loc, type, tem));
4787 }
4788 break;
4789 case UNEQ_EXPR:
4790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4791 return pedantic_non_lvalue_loc (loc,
4792 fold_convert_loc (loc, type, arg2));
4793 break;
4794 case LTGT_EXPR:
4795 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4796 return pedantic_non_lvalue_loc (loc,
4797 fold_convert_loc (loc, type, arg1));
4798 break;
4799 default:
4800 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4801 break;
4802 }
4803 }
4804
4805 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4806 we might still be able to simplify this. For example,
4807 if C1 is one less or one more than C2, this might have started
4808 out as a MIN or MAX and been transformed by this function.
4809 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4810
4811 if (INTEGRAL_TYPE_P (type)
4812 && TREE_CODE (arg01) == INTEGER_CST
4813 && TREE_CODE (arg2) == INTEGER_CST)
4814 switch (comp_code)
4815 {
4816 case EQ_EXPR:
4817 if (TREE_CODE (arg1) == INTEGER_CST)
4818 break;
4819 /* We can replace A with C1 in this case. */
4820 arg1 = fold_convert_loc (loc, type, arg01);
4821 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4822
4823 case LT_EXPR:
4824 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4825 MIN_EXPR, to preserve the signedness of the comparison. */
4826 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4827 OEP_ONLY_CONST)
4828 && operand_equal_p (arg01,
4829 const_binop (PLUS_EXPR, arg2,
4830 build_int_cst (type, 1)),
4831 OEP_ONLY_CONST))
4832 {
4833 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4834 fold_convert_loc (loc, TREE_TYPE (arg00),
4835 arg2));
4836 return pedantic_non_lvalue_loc (loc,
4837 fold_convert_loc (loc, type, tem));
4838 }
4839 break;
4840
4841 case LE_EXPR:
4842 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4843 as above. */
4844 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4845 OEP_ONLY_CONST)
4846 && operand_equal_p (arg01,
4847 const_binop (MINUS_EXPR, arg2,
4848 build_int_cst (type, 1)),
4849 OEP_ONLY_CONST))
4850 {
4851 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4852 fold_convert_loc (loc, TREE_TYPE (arg00),
4853 arg2));
4854 return pedantic_non_lvalue_loc (loc,
4855 fold_convert_loc (loc, type, tem));
4856 }
4857 break;
4858
4859 case GT_EXPR:
4860 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4861 MAX_EXPR, to preserve the signedness of the comparison. */
4862 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4863 OEP_ONLY_CONST)
4864 && operand_equal_p (arg01,
4865 const_binop (MINUS_EXPR, arg2,
4866 build_int_cst (type, 1)),
4867 OEP_ONLY_CONST))
4868 {
4869 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4870 fold_convert_loc (loc, TREE_TYPE (arg00),
4871 arg2));
4872 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4873 }
4874 break;
4875
4876 case GE_EXPR:
4877 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4878 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4879 OEP_ONLY_CONST)
4880 && operand_equal_p (arg01,
4881 const_binop (PLUS_EXPR, arg2,
4882 build_int_cst (type, 1)),
4883 OEP_ONLY_CONST))
4884 {
4885 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4886 fold_convert_loc (loc, TREE_TYPE (arg00),
4887 arg2));
4888 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4889 }
4890 break;
4891 case NE_EXPR:
4892 break;
4893 default:
4894 gcc_unreachable ();
4895 }
4896
4897 return NULL_TREE;
4898 }
4899
4900
4901 \f
4902 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4903 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4904 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4905 false) >= 2)
4906 #endif
4907
4908 /* EXP is some logical combination of boolean tests. See if we can
4909 merge it into some range test. Return the new tree if so. */
4910
4911 static tree
4912 fold_range_test (location_t loc, enum tree_code code, tree type,
4913 tree op0, tree op1)
4914 {
4915 int or_op = (code == TRUTH_ORIF_EXPR
4916 || code == TRUTH_OR_EXPR);
4917 int in0_p, in1_p, in_p;
4918 tree low0, low1, low, high0, high1, high;
4919 bool strict_overflow_p = false;
4920 tree tem, lhs, rhs;
4921 const char * const warnmsg = G_("assuming signed overflow does not occur "
4922 "when simplifying range test");
4923
4924 if (!INTEGRAL_TYPE_P (type))
4925 return 0;
4926
4927 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4928 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4929
4930 /* If this is an OR operation, invert both sides; we will invert
4931 again at the end. */
4932 if (or_op)
4933 in0_p = ! in0_p, in1_p = ! in1_p;
4934
4935 /* If both expressions are the same, if we can merge the ranges, and we
4936 can build the range test, return it or it inverted. If one of the
4937 ranges is always true or always false, consider it to be the same
4938 expression as the other. */
4939 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4940 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4941 in1_p, low1, high1)
4942 && 0 != (tem = (build_range_check (loc, type,
4943 lhs != 0 ? lhs
4944 : rhs != 0 ? rhs : integer_zero_node,
4945 in_p, low, high))))
4946 {
4947 if (strict_overflow_p)
4948 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4949 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4950 }
4951
4952 /* On machines where the branch cost is expensive, if this is a
4953 short-circuited branch and the underlying object on both sides
4954 is the same, make a non-short-circuit operation. */
4955 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4956 && lhs != 0 && rhs != 0
4957 && (code == TRUTH_ANDIF_EXPR
4958 || code == TRUTH_ORIF_EXPR)
4959 && operand_equal_p (lhs, rhs, 0))
4960 {
4961 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4962 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4963 which cases we can't do this. */
4964 if (simple_operand_p (lhs))
4965 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4966 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4967 type, op0, op1);
4968
4969 else if (!lang_hooks.decls.global_bindings_p ()
4970 && !CONTAINS_PLACEHOLDER_P (lhs))
4971 {
4972 tree common = save_expr (lhs);
4973
4974 if (0 != (lhs = build_range_check (loc, type, common,
4975 or_op ? ! in0_p : in0_p,
4976 low0, high0))
4977 && (0 != (rhs = build_range_check (loc, type, common,
4978 or_op ? ! in1_p : in1_p,
4979 low1, high1))))
4980 {
4981 if (strict_overflow_p)
4982 fold_overflow_warning (warnmsg,
4983 WARN_STRICT_OVERFLOW_COMPARISON);
4984 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4985 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4986 type, lhs, rhs);
4987 }
4988 }
4989 }
4990
4991 return 0;
4992 }
4993 \f
4994 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4995 bit value. Arrange things so the extra bits will be set to zero if and
4996 only if C is signed-extended to its full width. If MASK is nonzero,
4997 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4998
4999 static tree
5000 unextend (tree c, int p, int unsignedp, tree mask)
5001 {
5002 tree type = TREE_TYPE (c);
5003 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5004 tree temp;
5005
5006 if (p == modesize || unsignedp)
5007 return c;
5008
5009 /* We work by getting just the sign bit into the low-order bit, then
5010 into the high-order bit, then sign-extend. We then XOR that value
5011 with C. */
5012 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5013
5014 /* We must use a signed type in order to get an arithmetic right shift.
5015 However, we must also avoid introducing accidental overflows, so that
5016 a subsequent call to integer_zerop will work. Hence we must
5017 do the type conversion here. At this point, the constant is either
5018 zero or one, and the conversion to a signed type can never overflow.
5019 We could get an overflow if this conversion is done anywhere else. */
5020 if (TYPE_UNSIGNED (type))
5021 temp = fold_convert (signed_type_for (type), temp);
5022
5023 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5024 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5025 if (mask != 0)
5026 temp = const_binop (BIT_AND_EXPR, temp,
5027 fold_convert (TREE_TYPE (c), mask));
5028 /* If necessary, convert the type back to match the type of C. */
5029 if (TYPE_UNSIGNED (type))
5030 temp = fold_convert (type, temp);
5031
5032 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5033 }
5034 \f
5035 /* For an expression that has the form
5036 (A && B) || ~B
5037 or
5038 (A || B) && ~B,
5039 we can drop one of the inner expressions and simplify to
5040 A || ~B
5041 or
5042 A && ~B
5043 LOC is the location of the resulting expression. OP is the inner
5044 logical operation; the left-hand side in the examples above, while CMPOP
5045 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5046 removing a condition that guards another, as in
5047 (A != NULL && A->...) || A == NULL
5048 which we must not transform. If RHS_ONLY is true, only eliminate the
5049 right-most operand of the inner logical operation. */
5050
5051 static tree
5052 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5053 bool rhs_only)
5054 {
5055 tree type = TREE_TYPE (cmpop);
5056 enum tree_code code = TREE_CODE (cmpop);
5057 enum tree_code truthop_code = TREE_CODE (op);
5058 tree lhs = TREE_OPERAND (op, 0);
5059 tree rhs = TREE_OPERAND (op, 1);
5060 tree orig_lhs = lhs, orig_rhs = rhs;
5061 enum tree_code rhs_code = TREE_CODE (rhs);
5062 enum tree_code lhs_code = TREE_CODE (lhs);
5063 enum tree_code inv_code;
5064
5065 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5066 return NULL_TREE;
5067
5068 if (TREE_CODE_CLASS (code) != tcc_comparison)
5069 return NULL_TREE;
5070
5071 if (rhs_code == truthop_code)
5072 {
5073 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5074 if (newrhs != NULL_TREE)
5075 {
5076 rhs = newrhs;
5077 rhs_code = TREE_CODE (rhs);
5078 }
5079 }
5080 if (lhs_code == truthop_code && !rhs_only)
5081 {
5082 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5083 if (newlhs != NULL_TREE)
5084 {
5085 lhs = newlhs;
5086 lhs_code = TREE_CODE (lhs);
5087 }
5088 }
5089
5090 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5091 if (inv_code == rhs_code
5092 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5094 return lhs;
5095 if (!rhs_only && inv_code == lhs_code
5096 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5097 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5098 return rhs;
5099 if (rhs != orig_rhs || lhs != orig_lhs)
5100 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5101 lhs, rhs);
5102 return NULL_TREE;
5103 }
5104
5105 /* Find ways of folding logical expressions of LHS and RHS:
5106 Try to merge two comparisons to the same innermost item.
5107 Look for range tests like "ch >= '0' && ch <= '9'".
5108 Look for combinations of simple terms on machines with expensive branches
5109 and evaluate the RHS unconditionally.
5110
5111 For example, if we have p->a == 2 && p->b == 4 and we can make an
5112 object large enough to span both A and B, we can do this with a comparison
5113 against the object ANDed with the a mask.
5114
5115 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5116 operations to do this with one comparison.
5117
5118 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5119 function and the one above.
5120
5121 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5122 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5123
5124 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5125 two operands.
5126
5127 We return the simplified tree or 0 if no optimization is possible. */
5128
5129 static tree
5130 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5131 tree lhs, tree rhs)
5132 {
5133 /* If this is the "or" of two comparisons, we can do something if
5134 the comparisons are NE_EXPR. If this is the "and", we can do something
5135 if the comparisons are EQ_EXPR. I.e.,
5136 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5137
5138 WANTED_CODE is this operation code. For single bit fields, we can
5139 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5140 comparison for one-bit fields. */
5141
5142 enum tree_code wanted_code;
5143 enum tree_code lcode, rcode;
5144 tree ll_arg, lr_arg, rl_arg, rr_arg;
5145 tree ll_inner, lr_inner, rl_inner, rr_inner;
5146 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5147 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5148 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5149 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5150 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5151 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5152 machine_mode lnmode, rnmode;
5153 tree ll_mask, lr_mask, rl_mask, rr_mask;
5154 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5155 tree l_const, r_const;
5156 tree lntype, rntype, result;
5157 HOST_WIDE_INT first_bit, end_bit;
5158 int volatilep;
5159
5160 /* Start by getting the comparison codes. Fail if anything is volatile.
5161 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5162 it were surrounded with a NE_EXPR. */
5163
5164 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5165 return 0;
5166
5167 lcode = TREE_CODE (lhs);
5168 rcode = TREE_CODE (rhs);
5169
5170 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5171 {
5172 lhs = build2 (NE_EXPR, truth_type, lhs,
5173 build_int_cst (TREE_TYPE (lhs), 0));
5174 lcode = NE_EXPR;
5175 }
5176
5177 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5178 {
5179 rhs = build2 (NE_EXPR, truth_type, rhs,
5180 build_int_cst (TREE_TYPE (rhs), 0));
5181 rcode = NE_EXPR;
5182 }
5183
5184 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5185 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5186 return 0;
5187
5188 ll_arg = TREE_OPERAND (lhs, 0);
5189 lr_arg = TREE_OPERAND (lhs, 1);
5190 rl_arg = TREE_OPERAND (rhs, 0);
5191 rr_arg = TREE_OPERAND (rhs, 1);
5192
5193 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5194 if (simple_operand_p (ll_arg)
5195 && simple_operand_p (lr_arg))
5196 {
5197 if (operand_equal_p (ll_arg, rl_arg, 0)
5198 && operand_equal_p (lr_arg, rr_arg, 0))
5199 {
5200 result = combine_comparisons (loc, code, lcode, rcode,
5201 truth_type, ll_arg, lr_arg);
5202 if (result)
5203 return result;
5204 }
5205 else if (operand_equal_p (ll_arg, rr_arg, 0)
5206 && operand_equal_p (lr_arg, rl_arg, 0))
5207 {
5208 result = combine_comparisons (loc, code, lcode,
5209 swap_tree_comparison (rcode),
5210 truth_type, ll_arg, lr_arg);
5211 if (result)
5212 return result;
5213 }
5214 }
5215
5216 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5217 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5218
5219 /* If the RHS can be evaluated unconditionally and its operands are
5220 simple, it wins to evaluate the RHS unconditionally on machines
5221 with expensive branches. In this case, this isn't a comparison
5222 that can be merged. */
5223
5224 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5225 false) >= 2
5226 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5227 && simple_operand_p (rl_arg)
5228 && simple_operand_p (rr_arg))
5229 {
5230 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5231 if (code == TRUTH_OR_EXPR
5232 && lcode == NE_EXPR && integer_zerop (lr_arg)
5233 && rcode == NE_EXPR && integer_zerop (rr_arg)
5234 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5235 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5236 return build2_loc (loc, NE_EXPR, truth_type,
5237 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5238 ll_arg, rl_arg),
5239 build_int_cst (TREE_TYPE (ll_arg), 0));
5240
5241 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5242 if (code == TRUTH_AND_EXPR
5243 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5244 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5245 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5246 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5247 return build2_loc (loc, EQ_EXPR, truth_type,
5248 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5249 ll_arg, rl_arg),
5250 build_int_cst (TREE_TYPE (ll_arg), 0));
5251 }
5252
5253 /* See if the comparisons can be merged. Then get all the parameters for
5254 each side. */
5255
5256 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5257 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5258 return 0;
5259
5260 volatilep = 0;
5261 ll_inner = decode_field_reference (loc, ll_arg,
5262 &ll_bitsize, &ll_bitpos, &ll_mode,
5263 &ll_unsignedp, &volatilep, &ll_mask,
5264 &ll_and_mask);
5265 lr_inner = decode_field_reference (loc, lr_arg,
5266 &lr_bitsize, &lr_bitpos, &lr_mode,
5267 &lr_unsignedp, &volatilep, &lr_mask,
5268 &lr_and_mask);
5269 rl_inner = decode_field_reference (loc, rl_arg,
5270 &rl_bitsize, &rl_bitpos, &rl_mode,
5271 &rl_unsignedp, &volatilep, &rl_mask,
5272 &rl_and_mask);
5273 rr_inner = decode_field_reference (loc, rr_arg,
5274 &rr_bitsize, &rr_bitpos, &rr_mode,
5275 &rr_unsignedp, &volatilep, &rr_mask,
5276 &rr_and_mask);
5277
5278 /* It must be true that the inner operation on the lhs of each
5279 comparison must be the same if we are to be able to do anything.
5280 Then see if we have constants. If not, the same must be true for
5281 the rhs's. */
5282 if (volatilep || ll_inner == 0 || rl_inner == 0
5283 || ! operand_equal_p (ll_inner, rl_inner, 0))
5284 return 0;
5285
5286 if (TREE_CODE (lr_arg) == INTEGER_CST
5287 && TREE_CODE (rr_arg) == INTEGER_CST)
5288 l_const = lr_arg, r_const = rr_arg;
5289 else if (lr_inner == 0 || rr_inner == 0
5290 || ! operand_equal_p (lr_inner, rr_inner, 0))
5291 return 0;
5292 else
5293 l_const = r_const = 0;
5294
5295 /* If either comparison code is not correct for our logical operation,
5296 fail. However, we can convert a one-bit comparison against zero into
5297 the opposite comparison against that bit being set in the field. */
5298
5299 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5300 if (lcode != wanted_code)
5301 {
5302 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5303 {
5304 /* Make the left operand unsigned, since we are only interested
5305 in the value of one bit. Otherwise we are doing the wrong
5306 thing below. */
5307 ll_unsignedp = 1;
5308 l_const = ll_mask;
5309 }
5310 else
5311 return 0;
5312 }
5313
5314 /* This is analogous to the code for l_const above. */
5315 if (rcode != wanted_code)
5316 {
5317 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5318 {
5319 rl_unsignedp = 1;
5320 r_const = rl_mask;
5321 }
5322 else
5323 return 0;
5324 }
5325
5326 /* See if we can find a mode that contains both fields being compared on
5327 the left. If we can't, fail. Otherwise, update all constants and masks
5328 to be relative to a field of that size. */
5329 first_bit = MIN (ll_bitpos, rl_bitpos);
5330 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5331 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5332 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5333 volatilep);
5334 if (lnmode == VOIDmode)
5335 return 0;
5336
5337 lnbitsize = GET_MODE_BITSIZE (lnmode);
5338 lnbitpos = first_bit & ~ (lnbitsize - 1);
5339 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5340 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5341
5342 if (BYTES_BIG_ENDIAN)
5343 {
5344 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5345 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5346 }
5347
5348 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5349 size_int (xll_bitpos));
5350 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5351 size_int (xrl_bitpos));
5352
5353 if (l_const)
5354 {
5355 l_const = fold_convert_loc (loc, lntype, l_const);
5356 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5357 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5358 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5359 fold_build1_loc (loc, BIT_NOT_EXPR,
5360 lntype, ll_mask))))
5361 {
5362 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5363
5364 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5365 }
5366 }
5367 if (r_const)
5368 {
5369 r_const = fold_convert_loc (loc, lntype, r_const);
5370 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5371 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5372 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5373 fold_build1_loc (loc, BIT_NOT_EXPR,
5374 lntype, rl_mask))))
5375 {
5376 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5377
5378 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5379 }
5380 }
5381
5382 /* If the right sides are not constant, do the same for it. Also,
5383 disallow this optimization if a size or signedness mismatch occurs
5384 between the left and right sides. */
5385 if (l_const == 0)
5386 {
5387 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5388 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5389 /* Make sure the two fields on the right
5390 correspond to the left without being swapped. */
5391 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5392 return 0;
5393
5394 first_bit = MIN (lr_bitpos, rr_bitpos);
5395 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5396 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5397 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5398 volatilep);
5399 if (rnmode == VOIDmode)
5400 return 0;
5401
5402 rnbitsize = GET_MODE_BITSIZE (rnmode);
5403 rnbitpos = first_bit & ~ (rnbitsize - 1);
5404 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5405 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5406
5407 if (BYTES_BIG_ENDIAN)
5408 {
5409 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5410 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5411 }
5412
5413 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5414 rntype, lr_mask),
5415 size_int (xlr_bitpos));
5416 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5417 rntype, rr_mask),
5418 size_int (xrr_bitpos));
5419
5420 /* Make a mask that corresponds to both fields being compared.
5421 Do this for both items being compared. If the operands are the
5422 same size and the bits being compared are in the same position
5423 then we can do this by masking both and comparing the masked
5424 results. */
5425 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5426 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5427 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5428 {
5429 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5430 ll_unsignedp || rl_unsignedp);
5431 if (! all_ones_mask_p (ll_mask, lnbitsize))
5432 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5433
5434 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5435 lr_unsignedp || rr_unsignedp);
5436 if (! all_ones_mask_p (lr_mask, rnbitsize))
5437 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5438
5439 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5440 }
5441
5442 /* There is still another way we can do something: If both pairs of
5443 fields being compared are adjacent, we may be able to make a wider
5444 field containing them both.
5445
5446 Note that we still must mask the lhs/rhs expressions. Furthermore,
5447 the mask must be shifted to account for the shift done by
5448 make_bit_field_ref. */
5449 if ((ll_bitsize + ll_bitpos == rl_bitpos
5450 && lr_bitsize + lr_bitpos == rr_bitpos)
5451 || (ll_bitpos == rl_bitpos + rl_bitsize
5452 && lr_bitpos == rr_bitpos + rr_bitsize))
5453 {
5454 tree type;
5455
5456 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5457 ll_bitsize + rl_bitsize,
5458 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5459 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5460 lr_bitsize + rr_bitsize,
5461 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5462
5463 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5464 size_int (MIN (xll_bitpos, xrl_bitpos)));
5465 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5466 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5467
5468 /* Convert to the smaller type before masking out unwanted bits. */
5469 type = lntype;
5470 if (lntype != rntype)
5471 {
5472 if (lnbitsize > rnbitsize)
5473 {
5474 lhs = fold_convert_loc (loc, rntype, lhs);
5475 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5476 type = rntype;
5477 }
5478 else if (lnbitsize < rnbitsize)
5479 {
5480 rhs = fold_convert_loc (loc, lntype, rhs);
5481 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5482 type = lntype;
5483 }
5484 }
5485
5486 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5487 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5488
5489 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5490 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5491
5492 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5493 }
5494
5495 return 0;
5496 }
5497
5498 /* Handle the case of comparisons with constants. If there is something in
5499 common between the masks, those bits of the constants must be the same.
5500 If not, the condition is always false. Test for this to avoid generating
5501 incorrect code below. */
5502 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5503 if (! integer_zerop (result)
5504 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5505 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5506 {
5507 if (wanted_code == NE_EXPR)
5508 {
5509 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5510 return constant_boolean_node (true, truth_type);
5511 }
5512 else
5513 {
5514 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5515 return constant_boolean_node (false, truth_type);
5516 }
5517 }
5518
5519 /* Construct the expression we will return. First get the component
5520 reference we will make. Unless the mask is all ones the width of
5521 that field, perform the mask operation. Then compare with the
5522 merged constant. */
5523 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5524 ll_unsignedp || rl_unsignedp);
5525
5526 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5527 if (! all_ones_mask_p (ll_mask, lnbitsize))
5528 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5529
5530 return build2_loc (loc, wanted_code, truth_type, result,
5531 const_binop (BIT_IOR_EXPR, l_const, r_const));
5532 }
5533 \f
5534 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5535 constant. */
5536
5537 static tree
5538 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5539 tree op0, tree op1)
5540 {
5541 tree arg0 = op0;
5542 enum tree_code op_code;
5543 tree comp_const;
5544 tree minmax_const;
5545 int consts_equal, consts_lt;
5546 tree inner;
5547
5548 STRIP_SIGN_NOPS (arg0);
5549
5550 op_code = TREE_CODE (arg0);
5551 minmax_const = TREE_OPERAND (arg0, 1);
5552 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5553 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5554 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5555 inner = TREE_OPERAND (arg0, 0);
5556
5557 /* If something does not permit us to optimize, return the original tree. */
5558 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5559 || TREE_CODE (comp_const) != INTEGER_CST
5560 || TREE_OVERFLOW (comp_const)
5561 || TREE_CODE (minmax_const) != INTEGER_CST
5562 || TREE_OVERFLOW (minmax_const))
5563 return NULL_TREE;
5564
5565 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5566 and GT_EXPR, doing the rest with recursive calls using logical
5567 simplifications. */
5568 switch (code)
5569 {
5570 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5571 {
5572 tree tem
5573 = optimize_minmax_comparison (loc,
5574 invert_tree_comparison (code, false),
5575 type, op0, op1);
5576 if (tem)
5577 return invert_truthvalue_loc (loc, tem);
5578 return NULL_TREE;
5579 }
5580
5581 case GE_EXPR:
5582 return
5583 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5584 optimize_minmax_comparison
5585 (loc, EQ_EXPR, type, arg0, comp_const),
5586 optimize_minmax_comparison
5587 (loc, GT_EXPR, type, arg0, comp_const));
5588
5589 case EQ_EXPR:
5590 if (op_code == MAX_EXPR && consts_equal)
5591 /* MAX (X, 0) == 0 -> X <= 0 */
5592 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5593
5594 else if (op_code == MAX_EXPR && consts_lt)
5595 /* MAX (X, 0) == 5 -> X == 5 */
5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5597
5598 else if (op_code == MAX_EXPR)
5599 /* MAX (X, 0) == -1 -> false */
5600 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5601
5602 else if (consts_equal)
5603 /* MIN (X, 0) == 0 -> X >= 0 */
5604 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5605
5606 else if (consts_lt)
5607 /* MIN (X, 0) == 5 -> false */
5608 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5609
5610 else
5611 /* MIN (X, 0) == -1 -> X == -1 */
5612 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5613
5614 case GT_EXPR:
5615 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5616 /* MAX (X, 0) > 0 -> X > 0
5617 MAX (X, 0) > 5 -> X > 5 */
5618 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5619
5620 else if (op_code == MAX_EXPR)
5621 /* MAX (X, 0) > -1 -> true */
5622 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5623
5624 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5625 /* MIN (X, 0) > 0 -> false
5626 MIN (X, 0) > 5 -> false */
5627 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5628
5629 else
5630 /* MIN (X, 0) > -1 -> X > -1 */
5631 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5632
5633 default:
5634 return NULL_TREE;
5635 }
5636 }
5637 \f
5638 /* T is an integer expression that is being multiplied, divided, or taken a
5639 modulus (CODE says which and what kind of divide or modulus) by a
5640 constant C. See if we can eliminate that operation by folding it with
5641 other operations already in T. WIDE_TYPE, if non-null, is a type that
5642 should be used for the computation if wider than our type.
5643
5644 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5645 (X * 2) + (Y * 4). We must, however, be assured that either the original
5646 expression would not overflow or that overflow is undefined for the type
5647 in the language in question.
5648
5649 If we return a non-null expression, it is an equivalent form of the
5650 original computation, but need not be in the original type.
5651
5652 We set *STRICT_OVERFLOW_P to true if the return values depends on
5653 signed overflow being undefined. Otherwise we do not change
5654 *STRICT_OVERFLOW_P. */
5655
5656 static tree
5657 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5658 bool *strict_overflow_p)
5659 {
5660 /* To avoid exponential search depth, refuse to allow recursion past
5661 three levels. Beyond that (1) it's highly unlikely that we'll find
5662 something interesting and (2) we've probably processed it before
5663 when we built the inner expression. */
5664
5665 static int depth;
5666 tree ret;
5667
5668 if (depth > 3)
5669 return NULL;
5670
5671 depth++;
5672 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5673 depth--;
5674
5675 return ret;
5676 }
5677
5678 static tree
5679 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5680 bool *strict_overflow_p)
5681 {
5682 tree type = TREE_TYPE (t);
5683 enum tree_code tcode = TREE_CODE (t);
5684 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5685 > GET_MODE_SIZE (TYPE_MODE (type)))
5686 ? wide_type : type);
5687 tree t1, t2;
5688 int same_p = tcode == code;
5689 tree op0 = NULL_TREE, op1 = NULL_TREE;
5690 bool sub_strict_overflow_p;
5691
5692 /* Don't deal with constants of zero here; they confuse the code below. */
5693 if (integer_zerop (c))
5694 return NULL_TREE;
5695
5696 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5697 op0 = TREE_OPERAND (t, 0);
5698
5699 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5700 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5701
5702 /* Note that we need not handle conditional operations here since fold
5703 already handles those cases. So just do arithmetic here. */
5704 switch (tcode)
5705 {
5706 case INTEGER_CST:
5707 /* For a constant, we can always simplify if we are a multiply
5708 or (for divide and modulus) if it is a multiple of our constant. */
5709 if (code == MULT_EXPR
5710 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5711 return const_binop (code, fold_convert (ctype, t),
5712 fold_convert (ctype, c));
5713 break;
5714
5715 CASE_CONVERT: case NON_LVALUE_EXPR:
5716 /* If op0 is an expression ... */
5717 if ((COMPARISON_CLASS_P (op0)
5718 || UNARY_CLASS_P (op0)
5719 || BINARY_CLASS_P (op0)
5720 || VL_EXP_CLASS_P (op0)
5721 || EXPRESSION_CLASS_P (op0))
5722 /* ... and has wrapping overflow, and its type is smaller
5723 than ctype, then we cannot pass through as widening. */
5724 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5725 && (TYPE_PRECISION (ctype)
5726 > TYPE_PRECISION (TREE_TYPE (op0))))
5727 /* ... or this is a truncation (t is narrower than op0),
5728 then we cannot pass through this narrowing. */
5729 || (TYPE_PRECISION (type)
5730 < TYPE_PRECISION (TREE_TYPE (op0)))
5731 /* ... or signedness changes for division or modulus,
5732 then we cannot pass through this conversion. */
5733 || (code != MULT_EXPR
5734 && (TYPE_UNSIGNED (ctype)
5735 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5736 /* ... or has undefined overflow while the converted to
5737 type has not, we cannot do the operation in the inner type
5738 as that would introduce undefined overflow. */
5739 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5740 && !TYPE_OVERFLOW_UNDEFINED (type))))
5741 break;
5742
5743 /* Pass the constant down and see if we can make a simplification. If
5744 we can, replace this expression with the inner simplification for
5745 possible later conversion to our or some other type. */
5746 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5747 && TREE_CODE (t2) == INTEGER_CST
5748 && !TREE_OVERFLOW (t2)
5749 && (0 != (t1 = extract_muldiv (op0, t2, code,
5750 code == MULT_EXPR
5751 ? ctype : NULL_TREE,
5752 strict_overflow_p))))
5753 return t1;
5754 break;
5755
5756 case ABS_EXPR:
5757 /* If widening the type changes it from signed to unsigned, then we
5758 must avoid building ABS_EXPR itself as unsigned. */
5759 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5760 {
5761 tree cstype = (*signed_type_for) (ctype);
5762 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5763 != 0)
5764 {
5765 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5766 return fold_convert (ctype, t1);
5767 }
5768 break;
5769 }
5770 /* If the constant is negative, we cannot simplify this. */
5771 if (tree_int_cst_sgn (c) == -1)
5772 break;
5773 /* FALLTHROUGH */
5774 case NEGATE_EXPR:
5775 /* For division and modulus, type can't be unsigned, as e.g.
5776 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5777 For signed types, even with wrapping overflow, this is fine. */
5778 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5779 break;
5780 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5781 != 0)
5782 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5783 break;
5784
5785 case MIN_EXPR: case MAX_EXPR:
5786 /* If widening the type changes the signedness, then we can't perform
5787 this optimization as that changes the result. */
5788 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5789 break;
5790
5791 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5792 sub_strict_overflow_p = false;
5793 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5794 &sub_strict_overflow_p)) != 0
5795 && (t2 = extract_muldiv (op1, c, code, wide_type,
5796 &sub_strict_overflow_p)) != 0)
5797 {
5798 if (tree_int_cst_sgn (c) < 0)
5799 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5800 if (sub_strict_overflow_p)
5801 *strict_overflow_p = true;
5802 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5803 fold_convert (ctype, t2));
5804 }
5805 break;
5806
5807 case LSHIFT_EXPR: case RSHIFT_EXPR:
5808 /* If the second operand is constant, this is a multiplication
5809 or floor division, by a power of two, so we can treat it that
5810 way unless the multiplier or divisor overflows. Signed
5811 left-shift overflow is implementation-defined rather than
5812 undefined in C90, so do not convert signed left shift into
5813 multiplication. */
5814 if (TREE_CODE (op1) == INTEGER_CST
5815 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5816 /* const_binop may not detect overflow correctly,
5817 so check for it explicitly here. */
5818 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5819 && 0 != (t1 = fold_convert (ctype,
5820 const_binop (LSHIFT_EXPR,
5821 size_one_node,
5822 op1)))
5823 && !TREE_OVERFLOW (t1))
5824 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5825 ? MULT_EXPR : FLOOR_DIV_EXPR,
5826 ctype,
5827 fold_convert (ctype, op0),
5828 t1),
5829 c, code, wide_type, strict_overflow_p);
5830 break;
5831
5832 case PLUS_EXPR: case MINUS_EXPR:
5833 /* See if we can eliminate the operation on both sides. If we can, we
5834 can return a new PLUS or MINUS. If we can't, the only remaining
5835 cases where we can do anything are if the second operand is a
5836 constant. */
5837 sub_strict_overflow_p = false;
5838 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5839 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5840 if (t1 != 0 && t2 != 0
5841 && (code == MULT_EXPR
5842 /* If not multiplication, we can only do this if both operands
5843 are divisible by c. */
5844 || (multiple_of_p (ctype, op0, c)
5845 && multiple_of_p (ctype, op1, c))))
5846 {
5847 if (sub_strict_overflow_p)
5848 *strict_overflow_p = true;
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, t2));
5851 }
5852
5853 /* If this was a subtraction, negate OP1 and set it to be an addition.
5854 This simplifies the logic below. */
5855 if (tcode == MINUS_EXPR)
5856 {
5857 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5858 /* If OP1 was not easily negatable, the constant may be OP0. */
5859 if (TREE_CODE (op0) == INTEGER_CST)
5860 {
5861 tree tem = op0;
5862 op0 = op1;
5863 op1 = tem;
5864 tem = t1;
5865 t1 = t2;
5866 t2 = tem;
5867 }
5868 }
5869
5870 if (TREE_CODE (op1) != INTEGER_CST)
5871 break;
5872
5873 /* If either OP1 or C are negative, this optimization is not safe for
5874 some of the division and remainder types while for others we need
5875 to change the code. */
5876 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5877 {
5878 if (code == CEIL_DIV_EXPR)
5879 code = FLOOR_DIV_EXPR;
5880 else if (code == FLOOR_DIV_EXPR)
5881 code = CEIL_DIV_EXPR;
5882 else if (code != MULT_EXPR
5883 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5884 break;
5885 }
5886
5887 /* If it's a multiply or a division/modulus operation of a multiple
5888 of our constant, do the operation and verify it doesn't overflow. */
5889 if (code == MULT_EXPR
5890 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5891 {
5892 op1 = const_binop (code, fold_convert (ctype, op1),
5893 fold_convert (ctype, c));
5894 /* We allow the constant to overflow with wrapping semantics. */
5895 if (op1 == 0
5896 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5897 break;
5898 }
5899 else
5900 break;
5901
5902 /* If we have an unsigned type, we cannot widen the operation since it
5903 will change the result if the original computation overflowed. */
5904 if (TYPE_UNSIGNED (ctype) && ctype != type)
5905 break;
5906
5907 /* If we were able to eliminate our operation from the first side,
5908 apply our operation to the second side and reform the PLUS. */
5909 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5910 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5911
5912 /* The last case is if we are a multiply. In that case, we can
5913 apply the distributive law to commute the multiply and addition
5914 if the multiplication of the constants doesn't overflow
5915 and overflow is defined. With undefined overflow
5916 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5917 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5918 return fold_build2 (tcode, ctype,
5919 fold_build2 (code, ctype,
5920 fold_convert (ctype, op0),
5921 fold_convert (ctype, c)),
5922 op1);
5923
5924 break;
5925
5926 case MULT_EXPR:
5927 /* We have a special case here if we are doing something like
5928 (C * 8) % 4 since we know that's zero. */
5929 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5930 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5931 /* If the multiplication can overflow we cannot optimize this. */
5932 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5933 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5934 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5935 {
5936 *strict_overflow_p = true;
5937 return omit_one_operand (type, integer_zero_node, op0);
5938 }
5939
5940 /* ... fall through ... */
5941
5942 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5943 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5944 /* If we can extract our operation from the LHS, do so and return a
5945 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5946 do something only if the second operand is a constant. */
5947 if (same_p
5948 && (t1 = extract_muldiv (op0, c, code, wide_type,
5949 strict_overflow_p)) != 0)
5950 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5951 fold_convert (ctype, op1));
5952 else if (tcode == MULT_EXPR && code == MULT_EXPR
5953 && (t1 = extract_muldiv (op1, c, code, wide_type,
5954 strict_overflow_p)) != 0)
5955 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5956 fold_convert (ctype, t1));
5957 else if (TREE_CODE (op1) != INTEGER_CST)
5958 return 0;
5959
5960 /* If these are the same operation types, we can associate them
5961 assuming no overflow. */
5962 if (tcode == code)
5963 {
5964 bool overflow_p = false;
5965 bool overflow_mul_p;
5966 signop sign = TYPE_SIGN (ctype);
5967 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5968 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5969 if (overflow_mul_p
5970 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5971 overflow_p = true;
5972 if (!overflow_p)
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5974 wide_int_to_tree (ctype, mul));
5975 }
5976
5977 /* If these operations "cancel" each other, we have the main
5978 optimizations of this pass, which occur when either constant is a
5979 multiple of the other, in which case we replace this with either an
5980 operation or CODE or TCODE.
5981
5982 If we have an unsigned type, we cannot do this since it will change
5983 the result if the original computation overflowed. */
5984 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5985 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5986 || (tcode == MULT_EXPR
5987 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5988 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5989 && code != MULT_EXPR)))
5990 {
5991 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5992 {
5993 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5994 *strict_overflow_p = true;
5995 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5996 fold_convert (ctype,
5997 const_binop (TRUNC_DIV_EXPR,
5998 op1, c)));
5999 }
6000 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6001 {
6002 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6003 *strict_overflow_p = true;
6004 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6005 fold_convert (ctype,
6006 const_binop (TRUNC_DIV_EXPR,
6007 c, op1)));
6008 }
6009 }
6010 break;
6011
6012 default:
6013 break;
6014 }
6015
6016 return 0;
6017 }
6018 \f
6019 /* Return a node which has the indicated constant VALUE (either 0 or
6020 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6021 and is of the indicated TYPE. */
6022
6023 tree
6024 constant_boolean_node (bool value, tree type)
6025 {
6026 if (type == integer_type_node)
6027 return value ? integer_one_node : integer_zero_node;
6028 else if (type == boolean_type_node)
6029 return value ? boolean_true_node : boolean_false_node;
6030 else if (TREE_CODE (type) == VECTOR_TYPE)
6031 return build_vector_from_val (type,
6032 build_int_cst (TREE_TYPE (type),
6033 value ? -1 : 0));
6034 else
6035 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6036 }
6037
6038
6039 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6040 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6041 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6042 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6043 COND is the first argument to CODE; otherwise (as in the example
6044 given here), it is the second argument. TYPE is the type of the
6045 original expression. Return NULL_TREE if no simplification is
6046 possible. */
6047
6048 static tree
6049 fold_binary_op_with_conditional_arg (location_t loc,
6050 enum tree_code code,
6051 tree type, tree op0, tree op1,
6052 tree cond, tree arg, int cond_first_p)
6053 {
6054 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6055 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6056 tree test, true_value, false_value;
6057 tree lhs = NULL_TREE;
6058 tree rhs = NULL_TREE;
6059 enum tree_code cond_code = COND_EXPR;
6060
6061 if (TREE_CODE (cond) == COND_EXPR
6062 || TREE_CODE (cond) == VEC_COND_EXPR)
6063 {
6064 test = TREE_OPERAND (cond, 0);
6065 true_value = TREE_OPERAND (cond, 1);
6066 false_value = TREE_OPERAND (cond, 2);
6067 /* If this operand throws an expression, then it does not make
6068 sense to try to perform a logical or arithmetic operation
6069 involving it. */
6070 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6071 lhs = true_value;
6072 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6073 rhs = false_value;
6074 }
6075 else
6076 {
6077 tree testtype = TREE_TYPE (cond);
6078 test = cond;
6079 true_value = constant_boolean_node (true, testtype);
6080 false_value = constant_boolean_node (false, testtype);
6081 }
6082
6083 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6084 cond_code = VEC_COND_EXPR;
6085
6086 /* This transformation is only worthwhile if we don't have to wrap ARG
6087 in a SAVE_EXPR and the operation can be simplified without recursing
6088 on at least one of the branches once its pushed inside the COND_EXPR. */
6089 if (!TREE_CONSTANT (arg)
6090 && (TREE_SIDE_EFFECTS (arg)
6091 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6092 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6093 return NULL_TREE;
6094
6095 arg = fold_convert_loc (loc, arg_type, arg);
6096 if (lhs == 0)
6097 {
6098 true_value = fold_convert_loc (loc, cond_type, true_value);
6099 if (cond_first_p)
6100 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6101 else
6102 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6103 }
6104 if (rhs == 0)
6105 {
6106 false_value = fold_convert_loc (loc, cond_type, false_value);
6107 if (cond_first_p)
6108 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6109 else
6110 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6111 }
6112
6113 /* Check that we have simplified at least one of the branches. */
6114 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6115 return NULL_TREE;
6116
6117 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6118 }
6119
6120 \f
6121 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6122
6123 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6124 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6125 ADDEND is the same as X.
6126
6127 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6128 and finite. The problematic cases are when X is zero, and its mode
6129 has signed zeros. In the case of rounding towards -infinity,
6130 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6131 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6132
6133 bool
6134 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6135 {
6136 if (!real_zerop (addend))
6137 return false;
6138
6139 /* Don't allow the fold with -fsignaling-nans. */
6140 if (HONOR_SNANS (TYPE_MODE (type)))
6141 return false;
6142
6143 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6144 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6145 return true;
6146
6147 /* In a vector or complex, we would need to check the sign of all zeros. */
6148 if (TREE_CODE (addend) != REAL_CST)
6149 return false;
6150
6151 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6152 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6153 negate = !negate;
6154
6155 /* The mode has signed zeros, and we have to honor their sign.
6156 In this situation, there is only one case we can return true for.
6157 X - 0 is the same as X unless rounding towards -infinity is
6158 supported. */
6159 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6160 }
6161
6162 /* Subroutine of fold() that checks comparisons of built-in math
6163 functions against real constants.
6164
6165 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6166 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6167 is the type of the result and ARG0 and ARG1 are the operands of the
6168 comparison. ARG1 must be a TREE_REAL_CST.
6169
6170 The function returns the constant folded tree if a simplification
6171 can be made, and NULL_TREE otherwise. */
6172
6173 static tree
6174 fold_mathfn_compare (location_t loc,
6175 enum built_in_function fcode, enum tree_code code,
6176 tree type, tree arg0, tree arg1)
6177 {
6178 REAL_VALUE_TYPE c;
6179
6180 if (BUILTIN_SQRT_P (fcode))
6181 {
6182 tree arg = CALL_EXPR_ARG (arg0, 0);
6183 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6184
6185 c = TREE_REAL_CST (arg1);
6186 if (REAL_VALUE_NEGATIVE (c))
6187 {
6188 /* sqrt(x) < y is always false, if y is negative. */
6189 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6190 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6191
6192 /* sqrt(x) > y is always true, if y is negative and we
6193 don't care about NaNs, i.e. negative values of x. */
6194 if (code == NE_EXPR || !HONOR_NANS (mode))
6195 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6196
6197 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6198 return fold_build2_loc (loc, GE_EXPR, type, arg,
6199 build_real (TREE_TYPE (arg), dconst0));
6200 }
6201 else if (code == GT_EXPR || code == GE_EXPR)
6202 {
6203 REAL_VALUE_TYPE c2;
6204
6205 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6206 real_convert (&c2, mode, &c2);
6207
6208 if (REAL_VALUE_ISINF (c2))
6209 {
6210 /* sqrt(x) > y is x == +Inf, when y is very large. */
6211 if (HONOR_INFINITIES (mode))
6212 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6213 build_real (TREE_TYPE (arg), c2));
6214
6215 /* sqrt(x) > y is always false, when y is very large
6216 and we don't care about infinities. */
6217 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6218 }
6219
6220 /* sqrt(x) > c is the same as x > c*c. */
6221 return fold_build2_loc (loc, code, type, arg,
6222 build_real (TREE_TYPE (arg), c2));
6223 }
6224 else if (code == LT_EXPR || code == LE_EXPR)
6225 {
6226 REAL_VALUE_TYPE c2;
6227
6228 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6229 real_convert (&c2, mode, &c2);
6230
6231 if (REAL_VALUE_ISINF (c2))
6232 {
6233 /* sqrt(x) < y is always true, when y is a very large
6234 value and we don't care about NaNs or Infinities. */
6235 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6236 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6237
6238 /* sqrt(x) < y is x != +Inf when y is very large and we
6239 don't care about NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2_loc (loc, NE_EXPR, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6243
6244 /* sqrt(x) < y is x >= 0 when y is very large and we
6245 don't care about Infinities. */
6246 if (! HONOR_INFINITIES (mode))
6247 return fold_build2_loc (loc, GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg), dconst0));
6249
6250 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6251 arg = save_expr (arg);
6252 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6253 fold_build2_loc (loc, GE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 dconst0)),
6256 fold_build2_loc (loc, NE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg),
6258 c2)));
6259 }
6260
6261 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6262 if (! HONOR_NANS (mode))
6263 return fold_build2_loc (loc, code, type, arg,
6264 build_real (TREE_TYPE (arg), c2));
6265
6266 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6267 arg = save_expr (arg);
6268 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6269 fold_build2_loc (loc, GE_EXPR, type, arg,
6270 build_real (TREE_TYPE (arg),
6271 dconst0)),
6272 fold_build2_loc (loc, code, type, arg,
6273 build_real (TREE_TYPE (arg),
6274 c2)));
6275 }
6276 }
6277
6278 return NULL_TREE;
6279 }
6280
6281 /* Subroutine of fold() that optimizes comparisons against Infinities,
6282 either +Inf or -Inf.
6283
6284 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6285 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6286 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6287
6288 The function returns the constant folded tree if a simplification
6289 can be made, and NULL_TREE otherwise. */
6290
6291 static tree
6292 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6293 tree arg0, tree arg1)
6294 {
6295 machine_mode mode;
6296 REAL_VALUE_TYPE max;
6297 tree temp;
6298 bool neg;
6299
6300 mode = TYPE_MODE (TREE_TYPE (arg0));
6301
6302 /* For negative infinity swap the sense of the comparison. */
6303 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6304 if (neg)
6305 code = swap_tree_comparison (code);
6306
6307 switch (code)
6308 {
6309 case GT_EXPR:
6310 /* x > +Inf is always false, if with ignore sNANs. */
6311 if (HONOR_SNANS (mode))
6312 return NULL_TREE;
6313 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6314
6315 case LE_EXPR:
6316 /* x <= +Inf is always true, if we don't case about NaNs. */
6317 if (! HONOR_NANS (mode))
6318 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6319
6320 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6321 arg0 = save_expr (arg0);
6322 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6323
6324 case EQ_EXPR:
6325 case GE_EXPR:
6326 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6327 real_maxval (&max, neg, mode);
6328 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6329 arg0, build_real (TREE_TYPE (arg0), max));
6330
6331 case LT_EXPR:
6332 /* x < +Inf is always equal to x <= DBL_MAX. */
6333 real_maxval (&max, neg, mode);
6334 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6335 arg0, build_real (TREE_TYPE (arg0), max));
6336
6337 case NE_EXPR:
6338 /* x != +Inf is always equal to !(x > DBL_MAX). */
6339 real_maxval (&max, neg, mode);
6340 if (! HONOR_NANS (mode))
6341 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6343
6344 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6345 arg0, build_real (TREE_TYPE (arg0), max));
6346 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6347
6348 default:
6349 break;
6350 }
6351
6352 return NULL_TREE;
6353 }
6354
6355 /* Subroutine of fold() that optimizes comparisons of a division by
6356 a nonzero integer constant against an integer constant, i.e.
6357 X/C1 op C2.
6358
6359 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6360 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6361 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6362
6363 The function returns the constant folded tree if a simplification
6364 can be made, and NULL_TREE otherwise. */
6365
6366 static tree
6367 fold_div_compare (location_t loc,
6368 enum tree_code code, tree type, tree arg0, tree arg1)
6369 {
6370 tree prod, tmp, hi, lo;
6371 tree arg00 = TREE_OPERAND (arg0, 0);
6372 tree arg01 = TREE_OPERAND (arg0, 1);
6373 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6374 bool neg_overflow = false;
6375 bool overflow;
6376
6377 /* We have to do this the hard way to detect unsigned overflow.
6378 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6379 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6380 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6381 neg_overflow = false;
6382
6383 if (sign == UNSIGNED)
6384 {
6385 tmp = int_const_binop (MINUS_EXPR, arg01,
6386 build_int_cst (TREE_TYPE (arg01), 1));
6387 lo = prod;
6388
6389 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6390 val = wi::add (prod, tmp, sign, &overflow);
6391 hi = force_fit_type (TREE_TYPE (arg00), val,
6392 -1, overflow | TREE_OVERFLOW (prod));
6393 }
6394 else if (tree_int_cst_sgn (arg01) >= 0)
6395 {
6396 tmp = int_const_binop (MINUS_EXPR, arg01,
6397 build_int_cst (TREE_TYPE (arg01), 1));
6398 switch (tree_int_cst_sgn (arg1))
6399 {
6400 case -1:
6401 neg_overflow = true;
6402 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6403 hi = prod;
6404 break;
6405
6406 case 0:
6407 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6408 hi = tmp;
6409 break;
6410
6411 case 1:
6412 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6413 lo = prod;
6414 break;
6415
6416 default:
6417 gcc_unreachable ();
6418 }
6419 }
6420 else
6421 {
6422 /* A negative divisor reverses the relational operators. */
6423 code = swap_tree_comparison (code);
6424
6425 tmp = int_const_binop (PLUS_EXPR, arg01,
6426 build_int_cst (TREE_TYPE (arg01), 1));
6427 switch (tree_int_cst_sgn (arg1))
6428 {
6429 case -1:
6430 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6431 lo = prod;
6432 break;
6433
6434 case 0:
6435 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6436 lo = tmp;
6437 break;
6438
6439 case 1:
6440 neg_overflow = true;
6441 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6442 hi = prod;
6443 break;
6444
6445 default:
6446 gcc_unreachable ();
6447 }
6448 }
6449
6450 switch (code)
6451 {
6452 case EQ_EXPR:
6453 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6454 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6455 if (TREE_OVERFLOW (hi))
6456 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6457 if (TREE_OVERFLOW (lo))
6458 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6459 return build_range_check (loc, type, arg00, 1, lo, hi);
6460
6461 case NE_EXPR:
6462 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6463 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6464 if (TREE_OVERFLOW (hi))
6465 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6466 if (TREE_OVERFLOW (lo))
6467 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6468 return build_range_check (loc, type, arg00, 0, lo, hi);
6469
6470 case LT_EXPR:
6471 if (TREE_OVERFLOW (lo))
6472 {
6473 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6474 return omit_one_operand_loc (loc, type, tmp, arg00);
6475 }
6476 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6477
6478 case LE_EXPR:
6479 if (TREE_OVERFLOW (hi))
6480 {
6481 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6482 return omit_one_operand_loc (loc, type, tmp, arg00);
6483 }
6484 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6485
6486 case GT_EXPR:
6487 if (TREE_OVERFLOW (hi))
6488 {
6489 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6491 }
6492 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6493
6494 case GE_EXPR:
6495 if (TREE_OVERFLOW (lo))
6496 {
6497 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6499 }
6500 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6501
6502 default:
6503 break;
6504 }
6505
6506 return NULL_TREE;
6507 }
6508
6509
6510 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6511 equality/inequality test, then return a simplified form of the test
6512 using a sign testing. Otherwise return NULL. TYPE is the desired
6513 result type. */
6514
6515 static tree
6516 fold_single_bit_test_into_sign_test (location_t loc,
6517 enum tree_code code, tree arg0, tree arg1,
6518 tree result_type)
6519 {
6520 /* If this is testing a single bit, we can optimize the test. */
6521 if ((code == NE_EXPR || code == EQ_EXPR)
6522 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6523 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6524 {
6525 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6526 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6527 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6528
6529 if (arg00 != NULL_TREE
6530 /* This is only a win if casting to a signed type is cheap,
6531 i.e. when arg00's type is not a partial mode. */
6532 && TYPE_PRECISION (TREE_TYPE (arg00))
6533 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6534 {
6535 tree stype = signed_type_for (TREE_TYPE (arg00));
6536 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6537 result_type,
6538 fold_convert_loc (loc, stype, arg00),
6539 build_int_cst (stype, 0));
6540 }
6541 }
6542
6543 return NULL_TREE;
6544 }
6545
6546 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6547 equality/inequality test, then return a simplified form of
6548 the test using shifts and logical operations. Otherwise return
6549 NULL. TYPE is the desired result type. */
6550
6551 tree
6552 fold_single_bit_test (location_t loc, enum tree_code code,
6553 tree arg0, tree arg1, tree result_type)
6554 {
6555 /* If this is testing a single bit, we can optimize the test. */
6556 if ((code == NE_EXPR || code == EQ_EXPR)
6557 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6558 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6559 {
6560 tree inner = TREE_OPERAND (arg0, 0);
6561 tree type = TREE_TYPE (arg0);
6562 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6563 machine_mode operand_mode = TYPE_MODE (type);
6564 int ops_unsigned;
6565 tree signed_type, unsigned_type, intermediate_type;
6566 tree tem, one;
6567
6568 /* First, see if we can fold the single bit test into a sign-bit
6569 test. */
6570 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6571 result_type);
6572 if (tem)
6573 return tem;
6574
6575 /* Otherwise we have (A & C) != 0 where C is a single bit,
6576 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6577 Similarly for (A & C) == 0. */
6578
6579 /* If INNER is a right shift of a constant and it plus BITNUM does
6580 not overflow, adjust BITNUM and INNER. */
6581 if (TREE_CODE (inner) == RSHIFT_EXPR
6582 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6583 && bitnum < TYPE_PRECISION (type)
6584 && wi::ltu_p (TREE_OPERAND (inner, 1),
6585 TYPE_PRECISION (type) - bitnum))
6586 {
6587 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6588 inner = TREE_OPERAND (inner, 0);
6589 }
6590
6591 /* If we are going to be able to omit the AND below, we must do our
6592 operations as unsigned. If we must use the AND, we have a choice.
6593 Normally unsigned is faster, but for some machines signed is. */
6594 #ifdef LOAD_EXTEND_OP
6595 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6596 && !flag_syntax_only) ? 0 : 1;
6597 #else
6598 ops_unsigned = 1;
6599 #endif
6600
6601 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6602 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6603 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6604 inner = fold_convert_loc (loc, intermediate_type, inner);
6605
6606 if (bitnum != 0)
6607 inner = build2 (RSHIFT_EXPR, intermediate_type,
6608 inner, size_int (bitnum));
6609
6610 one = build_int_cst (intermediate_type, 1);
6611
6612 if (code == EQ_EXPR)
6613 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6614
6615 /* Put the AND last so it can combine with more things. */
6616 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6617
6618 /* Make sure to return the proper type. */
6619 inner = fold_convert_loc (loc, result_type, inner);
6620
6621 return inner;
6622 }
6623 return NULL_TREE;
6624 }
6625
6626 /* Check whether we are allowed to reorder operands arg0 and arg1,
6627 such that the evaluation of arg1 occurs before arg0. */
6628
6629 static bool
6630 reorder_operands_p (const_tree arg0, const_tree arg1)
6631 {
6632 if (! flag_evaluation_order)
6633 return true;
6634 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6635 return true;
6636 return ! TREE_SIDE_EFFECTS (arg0)
6637 && ! TREE_SIDE_EFFECTS (arg1);
6638 }
6639
6640 /* Test whether it is preferable two swap two operands, ARG0 and
6641 ARG1, for example because ARG0 is an integer constant and ARG1
6642 isn't. If REORDER is true, only recommend swapping if we can
6643 evaluate the operands in reverse order. */
6644
6645 bool
6646 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6647 {
6648 if (CONSTANT_CLASS_P (arg1))
6649 return 0;
6650 if (CONSTANT_CLASS_P (arg0))
6651 return 1;
6652
6653 STRIP_SIGN_NOPS (arg0);
6654 STRIP_SIGN_NOPS (arg1);
6655
6656 if (TREE_CONSTANT (arg1))
6657 return 0;
6658 if (TREE_CONSTANT (arg0))
6659 return 1;
6660
6661 if (reorder && flag_evaluation_order
6662 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6663 return 0;
6664
6665 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6666 for commutative and comparison operators. Ensuring a canonical
6667 form allows the optimizers to find additional redundancies without
6668 having to explicitly check for both orderings. */
6669 if (TREE_CODE (arg0) == SSA_NAME
6670 && TREE_CODE (arg1) == SSA_NAME
6671 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6672 return 1;
6673
6674 /* Put SSA_NAMEs last. */
6675 if (TREE_CODE (arg1) == SSA_NAME)
6676 return 0;
6677 if (TREE_CODE (arg0) == SSA_NAME)
6678 return 1;
6679
6680 /* Put variables last. */
6681 if (DECL_P (arg1))
6682 return 0;
6683 if (DECL_P (arg0))
6684 return 1;
6685
6686 return 0;
6687 }
6688
6689 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6690 ARG0 is extended to a wider type. */
6691
6692 static tree
6693 fold_widened_comparison (location_t loc, enum tree_code code,
6694 tree type, tree arg0, tree arg1)
6695 {
6696 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6697 tree arg1_unw;
6698 tree shorter_type, outer_type;
6699 tree min, max;
6700 bool above, below;
6701
6702 if (arg0_unw == arg0)
6703 return NULL_TREE;
6704 shorter_type = TREE_TYPE (arg0_unw);
6705
6706 #ifdef HAVE_canonicalize_funcptr_for_compare
6707 /* Disable this optimization if we're casting a function pointer
6708 type on targets that require function pointer canonicalization. */
6709 if (HAVE_canonicalize_funcptr_for_compare
6710 && TREE_CODE (shorter_type) == POINTER_TYPE
6711 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6712 return NULL_TREE;
6713 #endif
6714
6715 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6716 return NULL_TREE;
6717
6718 arg1_unw = get_unwidened (arg1, NULL_TREE);
6719
6720 /* If possible, express the comparison in the shorter mode. */
6721 if ((code == EQ_EXPR || code == NE_EXPR
6722 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6723 && (TREE_TYPE (arg1_unw) == shorter_type
6724 || ((TYPE_PRECISION (shorter_type)
6725 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6726 && (TYPE_UNSIGNED (shorter_type)
6727 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6728 || (TREE_CODE (arg1_unw) == INTEGER_CST
6729 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6730 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6731 && int_fits_type_p (arg1_unw, shorter_type))))
6732 return fold_build2_loc (loc, code, type, arg0_unw,
6733 fold_convert_loc (loc, shorter_type, arg1_unw));
6734
6735 if (TREE_CODE (arg1_unw) != INTEGER_CST
6736 || TREE_CODE (shorter_type) != INTEGER_TYPE
6737 || !int_fits_type_p (arg1_unw, shorter_type))
6738 return NULL_TREE;
6739
6740 /* If we are comparing with the integer that does not fit into the range
6741 of the shorter type, the result is known. */
6742 outer_type = TREE_TYPE (arg1_unw);
6743 min = lower_bound_in_type (outer_type, shorter_type);
6744 max = upper_bound_in_type (outer_type, shorter_type);
6745
6746 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6747 max, arg1_unw));
6748 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6749 arg1_unw, min));
6750
6751 switch (code)
6752 {
6753 case EQ_EXPR:
6754 if (above || below)
6755 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6756 break;
6757
6758 case NE_EXPR:
6759 if (above || below)
6760 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6761 break;
6762
6763 case LT_EXPR:
6764 case LE_EXPR:
6765 if (above)
6766 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6767 else if (below)
6768 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6769
6770 case GT_EXPR:
6771 case GE_EXPR:
6772 if (above)
6773 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6774 else if (below)
6775 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6776
6777 default:
6778 break;
6779 }
6780
6781 return NULL_TREE;
6782 }
6783
6784 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6785 ARG0 just the signedness is changed. */
6786
6787 static tree
6788 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6789 tree arg0, tree arg1)
6790 {
6791 tree arg0_inner;
6792 tree inner_type, outer_type;
6793
6794 if (!CONVERT_EXPR_P (arg0))
6795 return NULL_TREE;
6796
6797 outer_type = TREE_TYPE (arg0);
6798 arg0_inner = TREE_OPERAND (arg0, 0);
6799 inner_type = TREE_TYPE (arg0_inner);
6800
6801 #ifdef HAVE_canonicalize_funcptr_for_compare
6802 /* Disable this optimization if we're casting a function pointer
6803 type on targets that require function pointer canonicalization. */
6804 if (HAVE_canonicalize_funcptr_for_compare
6805 && TREE_CODE (inner_type) == POINTER_TYPE
6806 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6807 return NULL_TREE;
6808 #endif
6809
6810 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6811 return NULL_TREE;
6812
6813 if (TREE_CODE (arg1) != INTEGER_CST
6814 && !(CONVERT_EXPR_P (arg1)
6815 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6816 return NULL_TREE;
6817
6818 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6819 && code != NE_EXPR
6820 && code != EQ_EXPR)
6821 return NULL_TREE;
6822
6823 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6824 return NULL_TREE;
6825
6826 if (TREE_CODE (arg1) == INTEGER_CST)
6827 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6828 TREE_OVERFLOW (arg1));
6829 else
6830 arg1 = fold_convert_loc (loc, inner_type, arg1);
6831
6832 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6833 }
6834
6835
6836 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6837 means A >= Y && A != MAX, but in this case we know that
6838 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6839
6840 static tree
6841 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6842 {
6843 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6844
6845 if (TREE_CODE (bound) == LT_EXPR)
6846 a = TREE_OPERAND (bound, 0);
6847 else if (TREE_CODE (bound) == GT_EXPR)
6848 a = TREE_OPERAND (bound, 1);
6849 else
6850 return NULL_TREE;
6851
6852 typea = TREE_TYPE (a);
6853 if (!INTEGRAL_TYPE_P (typea)
6854 && !POINTER_TYPE_P (typea))
6855 return NULL_TREE;
6856
6857 if (TREE_CODE (ineq) == LT_EXPR)
6858 {
6859 a1 = TREE_OPERAND (ineq, 1);
6860 y = TREE_OPERAND (ineq, 0);
6861 }
6862 else if (TREE_CODE (ineq) == GT_EXPR)
6863 {
6864 a1 = TREE_OPERAND (ineq, 0);
6865 y = TREE_OPERAND (ineq, 1);
6866 }
6867 else
6868 return NULL_TREE;
6869
6870 if (TREE_TYPE (a1) != typea)
6871 return NULL_TREE;
6872
6873 if (POINTER_TYPE_P (typea))
6874 {
6875 /* Convert the pointer types into integer before taking the difference. */
6876 tree ta = fold_convert_loc (loc, ssizetype, a);
6877 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6878 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6879 }
6880 else
6881 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6882
6883 if (!diff || !integer_onep (diff))
6884 return NULL_TREE;
6885
6886 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6887 }
6888
6889 /* Fold a sum or difference of at least one multiplication.
6890 Returns the folded tree or NULL if no simplification could be made. */
6891
6892 static tree
6893 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6894 tree arg0, tree arg1)
6895 {
6896 tree arg00, arg01, arg10, arg11;
6897 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6898
6899 /* (A * C) +- (B * C) -> (A+-B) * C.
6900 (A * C) +- A -> A * (C+-1).
6901 We are most concerned about the case where C is a constant,
6902 but other combinations show up during loop reduction. Since
6903 it is not difficult, try all four possibilities. */
6904
6905 if (TREE_CODE (arg0) == MULT_EXPR)
6906 {
6907 arg00 = TREE_OPERAND (arg0, 0);
6908 arg01 = TREE_OPERAND (arg0, 1);
6909 }
6910 else if (TREE_CODE (arg0) == INTEGER_CST)
6911 {
6912 arg00 = build_one_cst (type);
6913 arg01 = arg0;
6914 }
6915 else
6916 {
6917 /* We cannot generate constant 1 for fract. */
6918 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6919 return NULL_TREE;
6920 arg00 = arg0;
6921 arg01 = build_one_cst (type);
6922 }
6923 if (TREE_CODE (arg1) == MULT_EXPR)
6924 {
6925 arg10 = TREE_OPERAND (arg1, 0);
6926 arg11 = TREE_OPERAND (arg1, 1);
6927 }
6928 else if (TREE_CODE (arg1) == INTEGER_CST)
6929 {
6930 arg10 = build_one_cst (type);
6931 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6932 the purpose of this canonicalization. */
6933 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6934 && negate_expr_p (arg1)
6935 && code == PLUS_EXPR)
6936 {
6937 arg11 = negate_expr (arg1);
6938 code = MINUS_EXPR;
6939 }
6940 else
6941 arg11 = arg1;
6942 }
6943 else
6944 {
6945 /* We cannot generate constant 1 for fract. */
6946 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6947 return NULL_TREE;
6948 arg10 = arg1;
6949 arg11 = build_one_cst (type);
6950 }
6951 same = NULL_TREE;
6952
6953 if (operand_equal_p (arg01, arg11, 0))
6954 same = arg01, alt0 = arg00, alt1 = arg10;
6955 else if (operand_equal_p (arg00, arg10, 0))
6956 same = arg00, alt0 = arg01, alt1 = arg11;
6957 else if (operand_equal_p (arg00, arg11, 0))
6958 same = arg00, alt0 = arg01, alt1 = arg10;
6959 else if (operand_equal_p (arg01, arg10, 0))
6960 same = arg01, alt0 = arg00, alt1 = arg11;
6961
6962 /* No identical multiplicands; see if we can find a common
6963 power-of-two factor in non-power-of-two multiplies. This
6964 can help in multi-dimensional array access. */
6965 else if (tree_fits_shwi_p (arg01)
6966 && tree_fits_shwi_p (arg11))
6967 {
6968 HOST_WIDE_INT int01, int11, tmp;
6969 bool swap = false;
6970 tree maybe_same;
6971 int01 = tree_to_shwi (arg01);
6972 int11 = tree_to_shwi (arg11);
6973
6974 /* Move min of absolute values to int11. */
6975 if (absu_hwi (int01) < absu_hwi (int11))
6976 {
6977 tmp = int01, int01 = int11, int11 = tmp;
6978 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6979 maybe_same = arg01;
6980 swap = true;
6981 }
6982 else
6983 maybe_same = arg11;
6984
6985 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6986 /* The remainder should not be a constant, otherwise we
6987 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6988 increased the number of multiplications necessary. */
6989 && TREE_CODE (arg10) != INTEGER_CST)
6990 {
6991 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6992 build_int_cst (TREE_TYPE (arg00),
6993 int01 / int11));
6994 alt1 = arg10;
6995 same = maybe_same;
6996 if (swap)
6997 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6998 }
6999 }
7000
7001 if (same)
7002 return fold_build2_loc (loc, MULT_EXPR, type,
7003 fold_build2_loc (loc, code, type,
7004 fold_convert_loc (loc, type, alt0),
7005 fold_convert_loc (loc, type, alt1)),
7006 fold_convert_loc (loc, type, same));
7007
7008 return NULL_TREE;
7009 }
7010
7011 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7012 specified by EXPR into the buffer PTR of length LEN bytes.
7013 Return the number of bytes placed in the buffer, or zero
7014 upon failure. */
7015
7016 static int
7017 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7018 {
7019 tree type = TREE_TYPE (expr);
7020 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7021 int byte, offset, word, words;
7022 unsigned char value;
7023
7024 if ((off == -1 && total_bytes > len)
7025 || off >= total_bytes)
7026 return 0;
7027 if (off == -1)
7028 off = 0;
7029 words = total_bytes / UNITS_PER_WORD;
7030
7031 for (byte = 0; byte < total_bytes; byte++)
7032 {
7033 int bitpos = byte * BITS_PER_UNIT;
7034 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7035 number of bytes. */
7036 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7037
7038 if (total_bytes > UNITS_PER_WORD)
7039 {
7040 word = byte / UNITS_PER_WORD;
7041 if (WORDS_BIG_ENDIAN)
7042 word = (words - 1) - word;
7043 offset = word * UNITS_PER_WORD;
7044 if (BYTES_BIG_ENDIAN)
7045 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7046 else
7047 offset += byte % UNITS_PER_WORD;
7048 }
7049 else
7050 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7051 if (offset >= off
7052 && offset - off < len)
7053 ptr[offset - off] = value;
7054 }
7055 return MIN (len, total_bytes - off);
7056 }
7057
7058
7059 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7060 specified by EXPR into the buffer PTR of length LEN bytes.
7061 Return the number of bytes placed in the buffer, or zero
7062 upon failure. */
7063
7064 static int
7065 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7066 {
7067 tree type = TREE_TYPE (expr);
7068 machine_mode mode = TYPE_MODE (type);
7069 int total_bytes = GET_MODE_SIZE (mode);
7070 FIXED_VALUE_TYPE value;
7071 tree i_value, i_type;
7072
7073 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7074 return 0;
7075
7076 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7077
7078 if (NULL_TREE == i_type
7079 || TYPE_PRECISION (i_type) != total_bytes)
7080 return 0;
7081
7082 value = TREE_FIXED_CST (expr);
7083 i_value = double_int_to_tree (i_type, value.data);
7084
7085 return native_encode_int (i_value, ptr, len, off);
7086 }
7087
7088
7089 /* Subroutine of native_encode_expr. Encode the REAL_CST
7090 specified by EXPR into the buffer PTR of length LEN bytes.
7091 Return the number of bytes placed in the buffer, or zero
7092 upon failure. */
7093
7094 static int
7095 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7096 {
7097 tree type = TREE_TYPE (expr);
7098 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7099 int byte, offset, word, words, bitpos;
7100 unsigned char value;
7101
7102 /* There are always 32 bits in each long, no matter the size of
7103 the hosts long. We handle floating point representations with
7104 up to 192 bits. */
7105 long tmp[6];
7106
7107 if ((off == -1 && total_bytes > len)
7108 || off >= total_bytes)
7109 return 0;
7110 if (off == -1)
7111 off = 0;
7112 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7113
7114 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7115
7116 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7117 bitpos += BITS_PER_UNIT)
7118 {
7119 byte = (bitpos / BITS_PER_UNIT) & 3;
7120 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7121
7122 if (UNITS_PER_WORD < 4)
7123 {
7124 word = byte / UNITS_PER_WORD;
7125 if (WORDS_BIG_ENDIAN)
7126 word = (words - 1) - word;
7127 offset = word * UNITS_PER_WORD;
7128 if (BYTES_BIG_ENDIAN)
7129 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7130 else
7131 offset += byte % UNITS_PER_WORD;
7132 }
7133 else
7134 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7135 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7136 if (offset >= off
7137 && offset - off < len)
7138 ptr[offset - off] = value;
7139 }
7140 return MIN (len, total_bytes - off);
7141 }
7142
7143 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7144 specified by EXPR into the buffer PTR of length LEN bytes.
7145 Return the number of bytes placed in the buffer, or zero
7146 upon failure. */
7147
7148 static int
7149 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7150 {
7151 int rsize, isize;
7152 tree part;
7153
7154 part = TREE_REALPART (expr);
7155 rsize = native_encode_expr (part, ptr, len, off);
7156 if (off == -1
7157 && rsize == 0)
7158 return 0;
7159 part = TREE_IMAGPART (expr);
7160 if (off != -1)
7161 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7162 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7163 if (off == -1
7164 && isize != rsize)
7165 return 0;
7166 return rsize + isize;
7167 }
7168
7169
7170 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7171 specified by EXPR into the buffer PTR of length LEN bytes.
7172 Return the number of bytes placed in the buffer, or zero
7173 upon failure. */
7174
7175 static int
7176 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7177 {
7178 unsigned i, count;
7179 int size, offset;
7180 tree itype, elem;
7181
7182 offset = 0;
7183 count = VECTOR_CST_NELTS (expr);
7184 itype = TREE_TYPE (TREE_TYPE (expr));
7185 size = GET_MODE_SIZE (TYPE_MODE (itype));
7186 for (i = 0; i < count; i++)
7187 {
7188 if (off >= size)
7189 {
7190 off -= size;
7191 continue;
7192 }
7193 elem = VECTOR_CST_ELT (expr, i);
7194 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7195 if ((off == -1 && res != size)
7196 || res == 0)
7197 return 0;
7198 offset += res;
7199 if (offset >= len)
7200 return offset;
7201 if (off != -1)
7202 off = 0;
7203 }
7204 return offset;
7205 }
7206
7207
7208 /* Subroutine of native_encode_expr. Encode the STRING_CST
7209 specified by EXPR into the buffer PTR of length LEN bytes.
7210 Return the number of bytes placed in the buffer, or zero
7211 upon failure. */
7212
7213 static int
7214 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7215 {
7216 tree type = TREE_TYPE (expr);
7217 HOST_WIDE_INT total_bytes;
7218
7219 if (TREE_CODE (type) != ARRAY_TYPE
7220 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7221 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7222 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7223 return 0;
7224 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7225 if ((off == -1 && total_bytes > len)
7226 || off >= total_bytes)
7227 return 0;
7228 if (off == -1)
7229 off = 0;
7230 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7231 {
7232 int written = 0;
7233 if (off < TREE_STRING_LENGTH (expr))
7234 {
7235 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7236 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7237 }
7238 memset (ptr + written, 0,
7239 MIN (total_bytes - written, len - written));
7240 }
7241 else
7242 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7243 return MIN (total_bytes - off, len);
7244 }
7245
7246
7247 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7248 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7249 buffer PTR of length LEN bytes. If OFF is not -1 then start
7250 the encoding at byte offset OFF and encode at most LEN bytes.
7251 Return the number of bytes placed in the buffer, or zero upon failure. */
7252
7253 int
7254 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7255 {
7256 switch (TREE_CODE (expr))
7257 {
7258 case INTEGER_CST:
7259 return native_encode_int (expr, ptr, len, off);
7260
7261 case REAL_CST:
7262 return native_encode_real (expr, ptr, len, off);
7263
7264 case FIXED_CST:
7265 return native_encode_fixed (expr, ptr, len, off);
7266
7267 case COMPLEX_CST:
7268 return native_encode_complex (expr, ptr, len, off);
7269
7270 case VECTOR_CST:
7271 return native_encode_vector (expr, ptr, len, off);
7272
7273 case STRING_CST:
7274 return native_encode_string (expr, ptr, len, off);
7275
7276 default:
7277 return 0;
7278 }
7279 }
7280
7281
7282 /* Subroutine of native_interpret_expr. Interpret the contents of
7283 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7284 If the buffer cannot be interpreted, return NULL_TREE. */
7285
7286 static tree
7287 native_interpret_int (tree type, const unsigned char *ptr, int len)
7288 {
7289 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7290
7291 if (total_bytes > len
7292 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7293 return NULL_TREE;
7294
7295 wide_int result = wi::from_buffer (ptr, total_bytes);
7296
7297 return wide_int_to_tree (type, result);
7298 }
7299
7300
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7304
7305 static tree
7306 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7307 {
7308 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7309 double_int result;
7310 FIXED_VALUE_TYPE fixed_value;
7311
7312 if (total_bytes > len
7313 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7314 return NULL_TREE;
7315
7316 result = double_int::from_buffer (ptr, total_bytes);
7317 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7318
7319 return build_fixed (type, fixed_value);
7320 }
7321
7322
7323 /* Subroutine of native_interpret_expr. Interpret the contents of
7324 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7325 If the buffer cannot be interpreted, return NULL_TREE. */
7326
7327 static tree
7328 native_interpret_real (tree type, const unsigned char *ptr, int len)
7329 {
7330 machine_mode mode = TYPE_MODE (type);
7331 int total_bytes = GET_MODE_SIZE (mode);
7332 int byte, offset, word, words, bitpos;
7333 unsigned char value;
7334 /* There are always 32 bits in each long, no matter the size of
7335 the hosts long. We handle floating point representations with
7336 up to 192 bits. */
7337 REAL_VALUE_TYPE r;
7338 long tmp[6];
7339
7340 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7341 if (total_bytes > len || total_bytes > 24)
7342 return NULL_TREE;
7343 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7344
7345 memset (tmp, 0, sizeof (tmp));
7346 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347 bitpos += BITS_PER_UNIT)
7348 {
7349 byte = (bitpos / BITS_PER_UNIT) & 3;
7350 if (UNITS_PER_WORD < 4)
7351 {
7352 word = byte / UNITS_PER_WORD;
7353 if (WORDS_BIG_ENDIAN)
7354 word = (words - 1) - word;
7355 offset = word * UNITS_PER_WORD;
7356 if (BYTES_BIG_ENDIAN)
7357 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7358 else
7359 offset += byte % UNITS_PER_WORD;
7360 }
7361 else
7362 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7363 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7364
7365 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7366 }
7367
7368 real_from_target (&r, tmp, mode);
7369 return build_real (type, r);
7370 }
7371
7372
7373 /* Subroutine of native_interpret_expr. Interpret the contents of
7374 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7375 If the buffer cannot be interpreted, return NULL_TREE. */
7376
7377 static tree
7378 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7379 {
7380 tree etype, rpart, ipart;
7381 int size;
7382
7383 etype = TREE_TYPE (type);
7384 size = GET_MODE_SIZE (TYPE_MODE (etype));
7385 if (size * 2 > len)
7386 return NULL_TREE;
7387 rpart = native_interpret_expr (etype, ptr, size);
7388 if (!rpart)
7389 return NULL_TREE;
7390 ipart = native_interpret_expr (etype, ptr+size, size);
7391 if (!ipart)
7392 return NULL_TREE;
7393 return build_complex (type, rpart, ipart);
7394 }
7395
7396
7397 /* Subroutine of native_interpret_expr. Interpret the contents of
7398 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7399 If the buffer cannot be interpreted, return NULL_TREE. */
7400
7401 static tree
7402 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7403 {
7404 tree etype, elem;
7405 int i, size, count;
7406 tree *elements;
7407
7408 etype = TREE_TYPE (type);
7409 size = GET_MODE_SIZE (TYPE_MODE (etype));
7410 count = TYPE_VECTOR_SUBPARTS (type);
7411 if (size * count > len)
7412 return NULL_TREE;
7413
7414 elements = XALLOCAVEC (tree, count);
7415 for (i = count - 1; i >= 0; i--)
7416 {
7417 elem = native_interpret_expr (etype, ptr+(i*size), size);
7418 if (!elem)
7419 return NULL_TREE;
7420 elements[i] = elem;
7421 }
7422 return build_vector (type, elements);
7423 }
7424
7425
7426 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7427 the buffer PTR of length LEN as a constant of type TYPE. For
7428 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7429 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7430 return NULL_TREE. */
7431
7432 tree
7433 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7434 {
7435 switch (TREE_CODE (type))
7436 {
7437 case INTEGER_TYPE:
7438 case ENUMERAL_TYPE:
7439 case BOOLEAN_TYPE:
7440 case POINTER_TYPE:
7441 case REFERENCE_TYPE:
7442 return native_interpret_int (type, ptr, len);
7443
7444 case REAL_TYPE:
7445 return native_interpret_real (type, ptr, len);
7446
7447 case FIXED_POINT_TYPE:
7448 return native_interpret_fixed (type, ptr, len);
7449
7450 case COMPLEX_TYPE:
7451 return native_interpret_complex (type, ptr, len);
7452
7453 case VECTOR_TYPE:
7454 return native_interpret_vector (type, ptr, len);
7455
7456 default:
7457 return NULL_TREE;
7458 }
7459 }
7460
7461 /* Returns true if we can interpret the contents of a native encoding
7462 as TYPE. */
7463
7464 static bool
7465 can_native_interpret_type_p (tree type)
7466 {
7467 switch (TREE_CODE (type))
7468 {
7469 case INTEGER_TYPE:
7470 case ENUMERAL_TYPE:
7471 case BOOLEAN_TYPE:
7472 case POINTER_TYPE:
7473 case REFERENCE_TYPE:
7474 case FIXED_POINT_TYPE:
7475 case REAL_TYPE:
7476 case COMPLEX_TYPE:
7477 case VECTOR_TYPE:
7478 return true;
7479 default:
7480 return false;
7481 }
7482 }
7483
7484 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7485 TYPE at compile-time. If we're unable to perform the conversion
7486 return NULL_TREE. */
7487
7488 static tree
7489 fold_view_convert_expr (tree type, tree expr)
7490 {
7491 /* We support up to 512-bit values (for V8DFmode). */
7492 unsigned char buffer[64];
7493 int len;
7494
7495 /* Check that the host and target are sane. */
7496 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7497 return NULL_TREE;
7498
7499 len = native_encode_expr (expr, buffer, sizeof (buffer));
7500 if (len == 0)
7501 return NULL_TREE;
7502
7503 return native_interpret_expr (type, buffer, len);
7504 }
7505
7506 /* Build an expression for the address of T. Folds away INDIRECT_REF
7507 to avoid confusing the gimplify process. */
7508
7509 tree
7510 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7511 {
7512 /* The size of the object is not relevant when talking about its address. */
7513 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7514 t = TREE_OPERAND (t, 0);
7515
7516 if (TREE_CODE (t) == INDIRECT_REF)
7517 {
7518 t = TREE_OPERAND (t, 0);
7519
7520 if (TREE_TYPE (t) != ptrtype)
7521 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7522 }
7523 else if (TREE_CODE (t) == MEM_REF
7524 && integer_zerop (TREE_OPERAND (t, 1)))
7525 return TREE_OPERAND (t, 0);
7526 else if (TREE_CODE (t) == MEM_REF
7527 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7528 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7529 TREE_OPERAND (t, 0),
7530 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7531 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7532 {
7533 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7534
7535 if (TREE_TYPE (t) != ptrtype)
7536 t = fold_convert_loc (loc, ptrtype, t);
7537 }
7538 else
7539 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7540
7541 return t;
7542 }
7543
7544 /* Build an expression for the address of T. */
7545
7546 tree
7547 build_fold_addr_expr_loc (location_t loc, tree t)
7548 {
7549 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7550
7551 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7552 }
7553
7554 static bool vec_cst_ctor_to_array (tree, tree *);
7555
7556 /* Fold a unary expression of code CODE and type TYPE with operand
7557 OP0. Return the folded expression if folding is successful.
7558 Otherwise, return NULL_TREE. */
7559
7560 tree
7561 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7562 {
7563 tree tem;
7564 tree arg0;
7565 enum tree_code_class kind = TREE_CODE_CLASS (code);
7566
7567 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7568 && TREE_CODE_LENGTH (code) == 1);
7569
7570 tem = generic_simplify (loc, code, type, op0);
7571 if (tem)
7572 return tem;
7573
7574 arg0 = op0;
7575 if (arg0)
7576 {
7577 if (CONVERT_EXPR_CODE_P (code)
7578 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7579 {
7580 /* Don't use STRIP_NOPS, because signedness of argument type
7581 matters. */
7582 STRIP_SIGN_NOPS (arg0);
7583 }
7584 else
7585 {
7586 /* Strip any conversions that don't change the mode. This
7587 is safe for every expression, except for a comparison
7588 expression because its signedness is derived from its
7589 operands.
7590
7591 Note that this is done as an internal manipulation within
7592 the constant folder, in order to find the simplest
7593 representation of the arguments so that their form can be
7594 studied. In any cases, the appropriate type conversions
7595 should be put back in the tree that will get out of the
7596 constant folder. */
7597 STRIP_NOPS (arg0);
7598 }
7599 }
7600
7601 if (TREE_CODE_CLASS (code) == tcc_unary)
7602 {
7603 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7604 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7605 fold_build1_loc (loc, code, type,
7606 fold_convert_loc (loc, TREE_TYPE (op0),
7607 TREE_OPERAND (arg0, 1))));
7608 else if (TREE_CODE (arg0) == COND_EXPR)
7609 {
7610 tree arg01 = TREE_OPERAND (arg0, 1);
7611 tree arg02 = TREE_OPERAND (arg0, 2);
7612 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7613 arg01 = fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc,
7615 TREE_TYPE (op0), arg01));
7616 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7617 arg02 = fold_build1_loc (loc, code, type,
7618 fold_convert_loc (loc,
7619 TREE_TYPE (op0), arg02));
7620 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7621 arg01, arg02);
7622
7623 /* If this was a conversion, and all we did was to move into
7624 inside the COND_EXPR, bring it back out. But leave it if
7625 it is a conversion from integer to integer and the
7626 result precision is no wider than a word since such a
7627 conversion is cheap and may be optimized away by combine,
7628 while it couldn't if it were outside the COND_EXPR. Then return
7629 so we don't get into an infinite recursion loop taking the
7630 conversion out and then back in. */
7631
7632 if ((CONVERT_EXPR_CODE_P (code)
7633 || code == NON_LVALUE_EXPR)
7634 && TREE_CODE (tem) == COND_EXPR
7635 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7636 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7637 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7638 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7639 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7640 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7641 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7642 && (INTEGRAL_TYPE_P
7643 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7644 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7645 || flag_syntax_only))
7646 tem = build1_loc (loc, code, type,
7647 build3 (COND_EXPR,
7648 TREE_TYPE (TREE_OPERAND
7649 (TREE_OPERAND (tem, 1), 0)),
7650 TREE_OPERAND (tem, 0),
7651 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7652 TREE_OPERAND (TREE_OPERAND (tem, 2),
7653 0)));
7654 return tem;
7655 }
7656 }
7657
7658 switch (code)
7659 {
7660 case NON_LVALUE_EXPR:
7661 if (!maybe_lvalue_p (op0))
7662 return fold_convert_loc (loc, type, op0);
7663 return NULL_TREE;
7664
7665 CASE_CONVERT:
7666 case FLOAT_EXPR:
7667 case FIX_TRUNC_EXPR:
7668 if (COMPARISON_CLASS_P (op0))
7669 {
7670 /* If we have (type) (a CMP b) and type is an integral type, return
7671 new expression involving the new type. Canonicalize
7672 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7673 non-integral type.
7674 Do not fold the result as that would not simplify further, also
7675 folding again results in recursions. */
7676 if (TREE_CODE (type) == BOOLEAN_TYPE)
7677 return build2_loc (loc, TREE_CODE (op0), type,
7678 TREE_OPERAND (op0, 0),
7679 TREE_OPERAND (op0, 1));
7680 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7681 && TREE_CODE (type) != VECTOR_TYPE)
7682 return build3_loc (loc, COND_EXPR, type, op0,
7683 constant_boolean_node (true, type),
7684 constant_boolean_node (false, type));
7685 }
7686
7687 /* Handle (T *)&A.B.C for A being of type T and B and C
7688 living at offset zero. This occurs frequently in
7689 C++ upcasting and then accessing the base. */
7690 if (TREE_CODE (op0) == ADDR_EXPR
7691 && POINTER_TYPE_P (type)
7692 && handled_component_p (TREE_OPERAND (op0, 0)))
7693 {
7694 HOST_WIDE_INT bitsize, bitpos;
7695 tree offset;
7696 machine_mode mode;
7697 int unsignedp, volatilep;
7698 tree base = TREE_OPERAND (op0, 0);
7699 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7700 &mode, &unsignedp, &volatilep, false);
7701 /* If the reference was to a (constant) zero offset, we can use
7702 the address of the base if it has the same base type
7703 as the result type and the pointer type is unqualified. */
7704 if (! offset && bitpos == 0
7705 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7706 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7707 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7708 return fold_convert_loc (loc, type,
7709 build_fold_addr_expr_loc (loc, base));
7710 }
7711
7712 if (TREE_CODE (op0) == MODIFY_EXPR
7713 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7714 /* Detect assigning a bitfield. */
7715 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7716 && DECL_BIT_FIELD
7717 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7718 {
7719 /* Don't leave an assignment inside a conversion
7720 unless assigning a bitfield. */
7721 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7722 /* First do the assignment, then return converted constant. */
7723 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7724 TREE_NO_WARNING (tem) = 1;
7725 TREE_USED (tem) = 1;
7726 return tem;
7727 }
7728
7729 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7730 constants (if x has signed type, the sign bit cannot be set
7731 in c). This folds extension into the BIT_AND_EXPR.
7732 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7733 very likely don't have maximal range for their precision and this
7734 transformation effectively doesn't preserve non-maximal ranges. */
7735 if (TREE_CODE (type) == INTEGER_TYPE
7736 && TREE_CODE (op0) == BIT_AND_EXPR
7737 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7738 {
7739 tree and_expr = op0;
7740 tree and0 = TREE_OPERAND (and_expr, 0);
7741 tree and1 = TREE_OPERAND (and_expr, 1);
7742 int change = 0;
7743
7744 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7745 || (TYPE_PRECISION (type)
7746 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7747 change = 1;
7748 else if (TYPE_PRECISION (TREE_TYPE (and1))
7749 <= HOST_BITS_PER_WIDE_INT
7750 && tree_fits_uhwi_p (and1))
7751 {
7752 unsigned HOST_WIDE_INT cst;
7753
7754 cst = tree_to_uhwi (and1);
7755 cst &= HOST_WIDE_INT_M1U
7756 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7757 change = (cst == 0);
7758 #ifdef LOAD_EXTEND_OP
7759 if (change
7760 && !flag_syntax_only
7761 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7762 == ZERO_EXTEND))
7763 {
7764 tree uns = unsigned_type_for (TREE_TYPE (and0));
7765 and0 = fold_convert_loc (loc, uns, and0);
7766 and1 = fold_convert_loc (loc, uns, and1);
7767 }
7768 #endif
7769 }
7770 if (change)
7771 {
7772 tem = force_fit_type (type, wi::to_widest (and1), 0,
7773 TREE_OVERFLOW (and1));
7774 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7775 fold_convert_loc (loc, type, and0), tem);
7776 }
7777 }
7778
7779 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7780 when one of the new casts will fold away. Conservatively we assume
7781 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7782 if (POINTER_TYPE_P (type)
7783 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7784 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7785 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7786 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7787 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7788 {
7789 tree arg00 = TREE_OPERAND (arg0, 0);
7790 tree arg01 = TREE_OPERAND (arg0, 1);
7791
7792 return fold_build_pointer_plus_loc
7793 (loc, fold_convert_loc (loc, type, arg00), arg01);
7794 }
7795
7796 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7797 of the same precision, and X is an integer type not narrower than
7798 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7799 if (INTEGRAL_TYPE_P (type)
7800 && TREE_CODE (op0) == BIT_NOT_EXPR
7801 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7802 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7803 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7804 {
7805 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7806 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7807 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7808 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7809 fold_convert_loc (loc, type, tem));
7810 }
7811
7812 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7813 type of X and Y (integer types only). */
7814 if (INTEGRAL_TYPE_P (type)
7815 && TREE_CODE (op0) == MULT_EXPR
7816 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7817 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7818 {
7819 /* Be careful not to introduce new overflows. */
7820 tree mult_type;
7821 if (TYPE_OVERFLOW_WRAPS (type))
7822 mult_type = type;
7823 else
7824 mult_type = unsigned_type_for (type);
7825
7826 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7827 {
7828 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7829 fold_convert_loc (loc, mult_type,
7830 TREE_OPERAND (op0, 0)),
7831 fold_convert_loc (loc, mult_type,
7832 TREE_OPERAND (op0, 1)));
7833 return fold_convert_loc (loc, type, tem);
7834 }
7835 }
7836
7837 tem = fold_convert_const (code, type, arg0);
7838 return tem ? tem : NULL_TREE;
7839
7840 case ADDR_SPACE_CONVERT_EXPR:
7841 if (integer_zerop (arg0))
7842 return fold_convert_const (code, type, arg0);
7843 return NULL_TREE;
7844
7845 case FIXED_CONVERT_EXPR:
7846 tem = fold_convert_const (code, type, arg0);
7847 return tem ? tem : NULL_TREE;
7848
7849 case VIEW_CONVERT_EXPR:
7850 if (TREE_CODE (op0) == MEM_REF)
7851 return fold_build2_loc (loc, MEM_REF, type,
7852 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7853
7854 return fold_view_convert_expr (type, op0);
7855
7856 case NEGATE_EXPR:
7857 tem = fold_negate_expr (loc, arg0);
7858 if (tem)
7859 return fold_convert_loc (loc, type, tem);
7860 return NULL_TREE;
7861
7862 case ABS_EXPR:
7863 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7864 return fold_abs_const (arg0, type);
7865 /* Convert fabs((double)float) into (double)fabsf(float). */
7866 else if (TREE_CODE (arg0) == NOP_EXPR
7867 && TREE_CODE (type) == REAL_TYPE)
7868 {
7869 tree targ0 = strip_float_extensions (arg0);
7870 if (targ0 != arg0)
7871 return fold_convert_loc (loc, type,
7872 fold_build1_loc (loc, ABS_EXPR,
7873 TREE_TYPE (targ0),
7874 targ0));
7875 }
7876 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7877 else if (TREE_CODE (arg0) == ABS_EXPR)
7878 return arg0;
7879
7880 /* Strip sign ops from argument. */
7881 if (TREE_CODE (type) == REAL_TYPE)
7882 {
7883 tem = fold_strip_sign_ops (arg0);
7884 if (tem)
7885 return fold_build1_loc (loc, ABS_EXPR, type,
7886 fold_convert_loc (loc, type, tem));
7887 }
7888 return NULL_TREE;
7889
7890 case CONJ_EXPR:
7891 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7892 return fold_convert_loc (loc, type, arg0);
7893 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7894 {
7895 tree itype = TREE_TYPE (type);
7896 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7897 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7898 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7899 negate_expr (ipart));
7900 }
7901 if (TREE_CODE (arg0) == COMPLEX_CST)
7902 {
7903 tree itype = TREE_TYPE (type);
7904 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7905 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7906 return build_complex (type, rpart, negate_expr (ipart));
7907 }
7908 if (TREE_CODE (arg0) == CONJ_EXPR)
7909 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7910 return NULL_TREE;
7911
7912 case BIT_NOT_EXPR:
7913 if (TREE_CODE (arg0) == INTEGER_CST)
7914 return fold_not_const (arg0, type);
7915 /* Convert ~ (-A) to A - 1. */
7916 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7917 return fold_build2_loc (loc, MINUS_EXPR, type,
7918 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7919 build_int_cst (type, 1));
7920 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7921 else if (INTEGRAL_TYPE_P (type)
7922 && ((TREE_CODE (arg0) == MINUS_EXPR
7923 && integer_onep (TREE_OPERAND (arg0, 1)))
7924 || (TREE_CODE (arg0) == PLUS_EXPR
7925 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7926 return fold_build1_loc (loc, NEGATE_EXPR, type,
7927 fold_convert_loc (loc, type,
7928 TREE_OPERAND (arg0, 0)));
7929 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7930 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7931 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7932 fold_convert_loc (loc, type,
7933 TREE_OPERAND (arg0, 0)))))
7934 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7935 fold_convert_loc (loc, type,
7936 TREE_OPERAND (arg0, 1)));
7937 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7938 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7939 fold_convert_loc (loc, type,
7940 TREE_OPERAND (arg0, 1)))))
7941 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7942 fold_convert_loc (loc, type,
7943 TREE_OPERAND (arg0, 0)), tem);
7944 /* Perform BIT_NOT_EXPR on each element individually. */
7945 else if (TREE_CODE (arg0) == VECTOR_CST)
7946 {
7947 tree *elements;
7948 tree elem;
7949 unsigned count = VECTOR_CST_NELTS (arg0), i;
7950
7951 elements = XALLOCAVEC (tree, count);
7952 for (i = 0; i < count; i++)
7953 {
7954 elem = VECTOR_CST_ELT (arg0, i);
7955 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7956 if (elem == NULL_TREE)
7957 break;
7958 elements[i] = elem;
7959 }
7960 if (i == count)
7961 return build_vector (type, elements);
7962 }
7963 else if (COMPARISON_CLASS_P (arg0)
7964 && (VECTOR_TYPE_P (type)
7965 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
7966 {
7967 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
7968 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
7969 HONOR_NANS (TYPE_MODE (op_type)));
7970 if (subcode != ERROR_MARK)
7971 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
7972 TREE_OPERAND (arg0, 1));
7973 }
7974
7975
7976 return NULL_TREE;
7977
7978 case TRUTH_NOT_EXPR:
7979 /* Note that the operand of this must be an int
7980 and its values must be 0 or 1.
7981 ("true" is a fixed value perhaps depending on the language,
7982 but we don't handle values other than 1 correctly yet.) */
7983 tem = fold_truth_not_expr (loc, arg0);
7984 if (!tem)
7985 return NULL_TREE;
7986 return fold_convert_loc (loc, type, tem);
7987
7988 case REALPART_EXPR:
7989 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7990 return fold_convert_loc (loc, type, arg0);
7991 if (TREE_CODE (arg0) == COMPLEX_CST)
7992 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7993 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7994 {
7995 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7996 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7997 fold_build1_loc (loc, REALPART_EXPR, itype,
7998 TREE_OPERAND (arg0, 0)),
7999 fold_build1_loc (loc, REALPART_EXPR, itype,
8000 TREE_OPERAND (arg0, 1)));
8001 return fold_convert_loc (loc, type, tem);
8002 }
8003 if (TREE_CODE (arg0) == CONJ_EXPR)
8004 {
8005 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8006 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8007 TREE_OPERAND (arg0, 0));
8008 return fold_convert_loc (loc, type, tem);
8009 }
8010 if (TREE_CODE (arg0) == CALL_EXPR)
8011 {
8012 tree fn = get_callee_fndecl (arg0);
8013 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8014 switch (DECL_FUNCTION_CODE (fn))
8015 {
8016 CASE_FLT_FN (BUILT_IN_CEXPI):
8017 fn = mathfn_built_in (type, BUILT_IN_COS);
8018 if (fn)
8019 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8020 break;
8021
8022 default:
8023 break;
8024 }
8025 }
8026 return NULL_TREE;
8027
8028 case IMAGPART_EXPR:
8029 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8030 return build_zero_cst (type);
8031 if (TREE_CODE (arg0) == COMPLEX_CST)
8032 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8033 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8034 {
8035 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8036 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8037 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8038 TREE_OPERAND (arg0, 0)),
8039 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8040 TREE_OPERAND (arg0, 1)));
8041 return fold_convert_loc (loc, type, tem);
8042 }
8043 if (TREE_CODE (arg0) == CONJ_EXPR)
8044 {
8045 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8046 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8047 return fold_convert_loc (loc, type, negate_expr (tem));
8048 }
8049 if (TREE_CODE (arg0) == CALL_EXPR)
8050 {
8051 tree fn = get_callee_fndecl (arg0);
8052 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8053 switch (DECL_FUNCTION_CODE (fn))
8054 {
8055 CASE_FLT_FN (BUILT_IN_CEXPI):
8056 fn = mathfn_built_in (type, BUILT_IN_SIN);
8057 if (fn)
8058 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8059 break;
8060
8061 default:
8062 break;
8063 }
8064 }
8065 return NULL_TREE;
8066
8067 case INDIRECT_REF:
8068 /* Fold *&X to X if X is an lvalue. */
8069 if (TREE_CODE (op0) == ADDR_EXPR)
8070 {
8071 tree op00 = TREE_OPERAND (op0, 0);
8072 if ((TREE_CODE (op00) == VAR_DECL
8073 || TREE_CODE (op00) == PARM_DECL
8074 || TREE_CODE (op00) == RESULT_DECL)
8075 && !TREE_READONLY (op00))
8076 return op00;
8077 }
8078 return NULL_TREE;
8079
8080 case VEC_UNPACK_LO_EXPR:
8081 case VEC_UNPACK_HI_EXPR:
8082 case VEC_UNPACK_FLOAT_LO_EXPR:
8083 case VEC_UNPACK_FLOAT_HI_EXPR:
8084 {
8085 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8086 tree *elts;
8087 enum tree_code subcode;
8088
8089 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8090 if (TREE_CODE (arg0) != VECTOR_CST)
8091 return NULL_TREE;
8092
8093 elts = XALLOCAVEC (tree, nelts * 2);
8094 if (!vec_cst_ctor_to_array (arg0, elts))
8095 return NULL_TREE;
8096
8097 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8098 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8099 elts += nelts;
8100
8101 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8102 subcode = NOP_EXPR;
8103 else
8104 subcode = FLOAT_EXPR;
8105
8106 for (i = 0; i < nelts; i++)
8107 {
8108 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8109 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8110 return NULL_TREE;
8111 }
8112
8113 return build_vector (type, elts);
8114 }
8115
8116 case REDUC_MIN_EXPR:
8117 case REDUC_MAX_EXPR:
8118 case REDUC_PLUS_EXPR:
8119 {
8120 unsigned int nelts, i;
8121 tree *elts;
8122 enum tree_code subcode;
8123
8124 if (TREE_CODE (op0) != VECTOR_CST)
8125 return NULL_TREE;
8126 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8127
8128 elts = XALLOCAVEC (tree, nelts);
8129 if (!vec_cst_ctor_to_array (op0, elts))
8130 return NULL_TREE;
8131
8132 switch (code)
8133 {
8134 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8135 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8136 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8137 default: gcc_unreachable ();
8138 }
8139
8140 for (i = 1; i < nelts; i++)
8141 {
8142 elts[0] = const_binop (subcode, elts[0], elts[i]);
8143 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8144 return NULL_TREE;
8145 }
8146
8147 return elts[0];
8148 }
8149
8150 default:
8151 return NULL_TREE;
8152 } /* switch (code) */
8153 }
8154
8155
8156 /* If the operation was a conversion do _not_ mark a resulting constant
8157 with TREE_OVERFLOW if the original constant was not. These conversions
8158 have implementation defined behavior and retaining the TREE_OVERFLOW
8159 flag here would confuse later passes such as VRP. */
8160 tree
8161 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8162 tree type, tree op0)
8163 {
8164 tree res = fold_unary_loc (loc, code, type, op0);
8165 if (res
8166 && TREE_CODE (res) == INTEGER_CST
8167 && TREE_CODE (op0) == INTEGER_CST
8168 && CONVERT_EXPR_CODE_P (code))
8169 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8170
8171 return res;
8172 }
8173
8174 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8175 operands OP0 and OP1. LOC is the location of the resulting expression.
8176 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8177 Return the folded expression if folding is successful. Otherwise,
8178 return NULL_TREE. */
8179 static tree
8180 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8181 tree arg0, tree arg1, tree op0, tree op1)
8182 {
8183 tree tem;
8184
8185 /* We only do these simplifications if we are optimizing. */
8186 if (!optimize)
8187 return NULL_TREE;
8188
8189 /* Check for things like (A || B) && (A || C). We can convert this
8190 to A || (B && C). Note that either operator can be any of the four
8191 truth and/or operations and the transformation will still be
8192 valid. Also note that we only care about order for the
8193 ANDIF and ORIF operators. If B contains side effects, this
8194 might change the truth-value of A. */
8195 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8196 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8197 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8198 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8199 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8200 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8201 {
8202 tree a00 = TREE_OPERAND (arg0, 0);
8203 tree a01 = TREE_OPERAND (arg0, 1);
8204 tree a10 = TREE_OPERAND (arg1, 0);
8205 tree a11 = TREE_OPERAND (arg1, 1);
8206 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8207 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8208 && (code == TRUTH_AND_EXPR
8209 || code == TRUTH_OR_EXPR));
8210
8211 if (operand_equal_p (a00, a10, 0))
8212 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8213 fold_build2_loc (loc, code, type, a01, a11));
8214 else if (commutative && operand_equal_p (a00, a11, 0))
8215 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8216 fold_build2_loc (loc, code, type, a01, a10));
8217 else if (commutative && operand_equal_p (a01, a10, 0))
8218 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8219 fold_build2_loc (loc, code, type, a00, a11));
8220
8221 /* This case if tricky because we must either have commutative
8222 operators or else A10 must not have side-effects. */
8223
8224 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8225 && operand_equal_p (a01, a11, 0))
8226 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8227 fold_build2_loc (loc, code, type, a00, a10),
8228 a01);
8229 }
8230
8231 /* See if we can build a range comparison. */
8232 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8233 return tem;
8234
8235 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8236 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8237 {
8238 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8239 if (tem)
8240 return fold_build2_loc (loc, code, type, tem, arg1);
8241 }
8242
8243 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8244 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8245 {
8246 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8247 if (tem)
8248 return fold_build2_loc (loc, code, type, arg0, tem);
8249 }
8250
8251 /* Check for the possibility of merging component references. If our
8252 lhs is another similar operation, try to merge its rhs with our
8253 rhs. Then try to merge our lhs and rhs. */
8254 if (TREE_CODE (arg0) == code
8255 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8256 TREE_OPERAND (arg0, 1), arg1)))
8257 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8258
8259 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8260 return tem;
8261
8262 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8263 && (code == TRUTH_AND_EXPR
8264 || code == TRUTH_ANDIF_EXPR
8265 || code == TRUTH_OR_EXPR
8266 || code == TRUTH_ORIF_EXPR))
8267 {
8268 enum tree_code ncode, icode;
8269
8270 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8271 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8272 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8273
8274 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8275 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8276 We don't want to pack more than two leafs to a non-IF AND/OR
8277 expression.
8278 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8279 equal to IF-CODE, then we don't want to add right-hand operand.
8280 If the inner right-hand side of left-hand operand has
8281 side-effects, or isn't simple, then we can't add to it,
8282 as otherwise we might destroy if-sequence. */
8283 if (TREE_CODE (arg0) == icode
8284 && simple_operand_p_2 (arg1)
8285 /* Needed for sequence points to handle trappings, and
8286 side-effects. */
8287 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8288 {
8289 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8290 arg1);
8291 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8292 tem);
8293 }
8294 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8295 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8296 else if (TREE_CODE (arg1) == icode
8297 && simple_operand_p_2 (arg0)
8298 /* Needed for sequence points to handle trappings, and
8299 side-effects. */
8300 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8301 {
8302 tem = fold_build2_loc (loc, ncode, type,
8303 arg0, TREE_OPERAND (arg1, 0));
8304 return fold_build2_loc (loc, icode, type, tem,
8305 TREE_OPERAND (arg1, 1));
8306 }
8307 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8308 into (A OR B).
8309 For sequence point consistancy, we need to check for trapping,
8310 and side-effects. */
8311 else if (code == icode && simple_operand_p_2 (arg0)
8312 && simple_operand_p_2 (arg1))
8313 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8314 }
8315
8316 return NULL_TREE;
8317 }
8318
8319 /* Fold a binary expression of code CODE and type TYPE with operands
8320 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8321 Return the folded expression if folding is successful. Otherwise,
8322 return NULL_TREE. */
8323
8324 static tree
8325 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8326 {
8327 enum tree_code compl_code;
8328
8329 if (code == MIN_EXPR)
8330 compl_code = MAX_EXPR;
8331 else if (code == MAX_EXPR)
8332 compl_code = MIN_EXPR;
8333 else
8334 gcc_unreachable ();
8335
8336 /* MIN (MAX (a, b), b) == b. */
8337 if (TREE_CODE (op0) == compl_code
8338 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8339 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8340
8341 /* MIN (MAX (b, a), b) == b. */
8342 if (TREE_CODE (op0) == compl_code
8343 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8344 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8345 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8346
8347 /* MIN (a, MAX (a, b)) == a. */
8348 if (TREE_CODE (op1) == compl_code
8349 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8350 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8351 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8352
8353 /* MIN (a, MAX (b, a)) == a. */
8354 if (TREE_CODE (op1) == compl_code
8355 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8356 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8357 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8358
8359 return NULL_TREE;
8360 }
8361
8362 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8363 by changing CODE to reduce the magnitude of constants involved in
8364 ARG0 of the comparison.
8365 Returns a canonicalized comparison tree if a simplification was
8366 possible, otherwise returns NULL_TREE.
8367 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8368 valid if signed overflow is undefined. */
8369
8370 static tree
8371 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8372 tree arg0, tree arg1,
8373 bool *strict_overflow_p)
8374 {
8375 enum tree_code code0 = TREE_CODE (arg0);
8376 tree t, cst0 = NULL_TREE;
8377 int sgn0;
8378 bool swap = false;
8379
8380 /* Match A +- CST code arg1 and CST code arg1. We can change the
8381 first form only if overflow is undefined. */
8382 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8383 /* In principle pointers also have undefined overflow behavior,
8384 but that causes problems elsewhere. */
8385 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8386 && (code0 == MINUS_EXPR
8387 || code0 == PLUS_EXPR)
8388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8389 || code0 == INTEGER_CST))
8390 return NULL_TREE;
8391
8392 /* Identify the constant in arg0 and its sign. */
8393 if (code0 == INTEGER_CST)
8394 cst0 = arg0;
8395 else
8396 cst0 = TREE_OPERAND (arg0, 1);
8397 sgn0 = tree_int_cst_sgn (cst0);
8398
8399 /* Overflowed constants and zero will cause problems. */
8400 if (integer_zerop (cst0)
8401 || TREE_OVERFLOW (cst0))
8402 return NULL_TREE;
8403
8404 /* See if we can reduce the magnitude of the constant in
8405 arg0 by changing the comparison code. */
8406 if (code0 == INTEGER_CST)
8407 {
8408 /* CST <= arg1 -> CST-1 < arg1. */
8409 if (code == LE_EXPR && sgn0 == 1)
8410 code = LT_EXPR;
8411 /* -CST < arg1 -> -CST-1 <= arg1. */
8412 else if (code == LT_EXPR && sgn0 == -1)
8413 code = LE_EXPR;
8414 /* CST > arg1 -> CST-1 >= arg1. */
8415 else if (code == GT_EXPR && sgn0 == 1)
8416 code = GE_EXPR;
8417 /* -CST >= arg1 -> -CST-1 > arg1. */
8418 else if (code == GE_EXPR && sgn0 == -1)
8419 code = GT_EXPR;
8420 else
8421 return NULL_TREE;
8422 /* arg1 code' CST' might be more canonical. */
8423 swap = true;
8424 }
8425 else
8426 {
8427 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8428 if (code == LT_EXPR
8429 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8430 code = LE_EXPR;
8431 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8432 else if (code == GT_EXPR
8433 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8434 code = GE_EXPR;
8435 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8436 else if (code == LE_EXPR
8437 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8438 code = LT_EXPR;
8439 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8440 else if (code == GE_EXPR
8441 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8442 code = GT_EXPR;
8443 else
8444 return NULL_TREE;
8445 *strict_overflow_p = true;
8446 }
8447
8448 /* Now build the constant reduced in magnitude. But not if that
8449 would produce one outside of its types range. */
8450 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8451 && ((sgn0 == 1
8452 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8453 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8454 || (sgn0 == -1
8455 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8456 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8457 /* We cannot swap the comparison here as that would cause us to
8458 endlessly recurse. */
8459 return NULL_TREE;
8460
8461 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8462 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8463 if (code0 != INTEGER_CST)
8464 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8465 t = fold_convert (TREE_TYPE (arg1), t);
8466
8467 /* If swapping might yield to a more canonical form, do so. */
8468 if (swap)
8469 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8470 else
8471 return fold_build2_loc (loc, code, type, t, arg1);
8472 }
8473
8474 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8475 overflow further. Try to decrease the magnitude of constants involved
8476 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8477 and put sole constants at the second argument position.
8478 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8479
8480 static tree
8481 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8482 tree arg0, tree arg1)
8483 {
8484 tree t;
8485 bool strict_overflow_p;
8486 const char * const warnmsg = G_("assuming signed overflow does not occur "
8487 "when reducing constant in comparison");
8488
8489 /* Try canonicalization by simplifying arg0. */
8490 strict_overflow_p = false;
8491 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8492 &strict_overflow_p);
8493 if (t)
8494 {
8495 if (strict_overflow_p)
8496 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8497 return t;
8498 }
8499
8500 /* Try canonicalization by simplifying arg1 using the swapped
8501 comparison. */
8502 code = swap_tree_comparison (code);
8503 strict_overflow_p = false;
8504 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8505 &strict_overflow_p);
8506 if (t && strict_overflow_p)
8507 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8508 return t;
8509 }
8510
8511 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8512 space. This is used to avoid issuing overflow warnings for
8513 expressions like &p->x which can not wrap. */
8514
8515 static bool
8516 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8517 {
8518 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8519 return true;
8520
8521 if (bitpos < 0)
8522 return true;
8523
8524 wide_int wi_offset;
8525 int precision = TYPE_PRECISION (TREE_TYPE (base));
8526 if (offset == NULL_TREE)
8527 wi_offset = wi::zero (precision);
8528 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8529 return true;
8530 else
8531 wi_offset = offset;
8532
8533 bool overflow;
8534 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8535 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8536 if (overflow)
8537 return true;
8538
8539 if (!wi::fits_uhwi_p (total))
8540 return true;
8541
8542 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8543 if (size <= 0)
8544 return true;
8545
8546 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8547 array. */
8548 if (TREE_CODE (base) == ADDR_EXPR)
8549 {
8550 HOST_WIDE_INT base_size;
8551
8552 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8553 if (base_size > 0 && size < base_size)
8554 size = base_size;
8555 }
8556
8557 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8558 }
8559
8560 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8561 kind INTEGER_CST. This makes sure to properly sign-extend the
8562 constant. */
8563
8564 static HOST_WIDE_INT
8565 size_low_cst (const_tree t)
8566 {
8567 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8568 int prec = TYPE_PRECISION (TREE_TYPE (t));
8569 if (prec < HOST_BITS_PER_WIDE_INT)
8570 return sext_hwi (w, prec);
8571 return w;
8572 }
8573
8574 /* Subroutine of fold_binary. This routine performs all of the
8575 transformations that are common to the equality/inequality
8576 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8577 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8578 fold_binary should call fold_binary. Fold a comparison with
8579 tree code CODE and type TYPE with operands OP0 and OP1. Return
8580 the folded comparison or NULL_TREE. */
8581
8582 static tree
8583 fold_comparison (location_t loc, enum tree_code code, tree type,
8584 tree op0, tree op1)
8585 {
8586 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8587 tree arg0, arg1, tem;
8588
8589 arg0 = op0;
8590 arg1 = op1;
8591
8592 STRIP_SIGN_NOPS (arg0);
8593 STRIP_SIGN_NOPS (arg1);
8594
8595 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8596 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8597 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8598 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8599 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8600 && TREE_CODE (arg1) == INTEGER_CST
8601 && !TREE_OVERFLOW (arg1))
8602 {
8603 const enum tree_code
8604 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8605 tree const1 = TREE_OPERAND (arg0, 1);
8606 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8607 tree variable = TREE_OPERAND (arg0, 0);
8608 tree new_const = int_const_binop (reverse_op, const2, const1);
8609
8610 /* If the constant operation overflowed this can be
8611 simplified as a comparison against INT_MAX/INT_MIN. */
8612 if (TREE_OVERFLOW (new_const)
8613 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8614 {
8615 int const1_sgn = tree_int_cst_sgn (const1);
8616 enum tree_code code2 = code;
8617
8618 /* Get the sign of the constant on the lhs if the
8619 operation were VARIABLE + CONST1. */
8620 if (TREE_CODE (arg0) == MINUS_EXPR)
8621 const1_sgn = -const1_sgn;
8622
8623 /* The sign of the constant determines if we overflowed
8624 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8625 Canonicalize to the INT_MIN overflow by swapping the comparison
8626 if necessary. */
8627 if (const1_sgn == -1)
8628 code2 = swap_tree_comparison (code);
8629
8630 /* We now can look at the canonicalized case
8631 VARIABLE + 1 CODE2 INT_MIN
8632 and decide on the result. */
8633 switch (code2)
8634 {
8635 case EQ_EXPR:
8636 case LT_EXPR:
8637 case LE_EXPR:
8638 return
8639 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8640
8641 case NE_EXPR:
8642 case GE_EXPR:
8643 case GT_EXPR:
8644 return
8645 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8646
8647 default:
8648 gcc_unreachable ();
8649 }
8650 }
8651 else
8652 {
8653 if (!equality_code)
8654 fold_overflow_warning ("assuming signed overflow does not occur "
8655 "when changing X +- C1 cmp C2 to "
8656 "X cmp C2 -+ C1",
8657 WARN_STRICT_OVERFLOW_COMPARISON);
8658 return fold_build2_loc (loc, code, type, variable, new_const);
8659 }
8660 }
8661
8662 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8663 if (TREE_CODE (arg0) == MINUS_EXPR
8664 && equality_code
8665 && integer_zerop (arg1))
8666 {
8667 /* ??? The transformation is valid for the other operators if overflow
8668 is undefined for the type, but performing it here badly interacts
8669 with the transformation in fold_cond_expr_with_comparison which
8670 attempts to synthetize ABS_EXPR. */
8671 if (!equality_code)
8672 fold_overflow_warning ("assuming signed overflow does not occur "
8673 "when changing X - Y cmp 0 to X cmp Y",
8674 WARN_STRICT_OVERFLOW_COMPARISON);
8675 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8676 TREE_OPERAND (arg0, 1));
8677 }
8678
8679 /* For comparisons of pointers we can decompose it to a compile time
8680 comparison of the base objects and the offsets into the object.
8681 This requires at least one operand being an ADDR_EXPR or a
8682 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8683 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8684 && (TREE_CODE (arg0) == ADDR_EXPR
8685 || TREE_CODE (arg1) == ADDR_EXPR
8686 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8687 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8688 {
8689 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8690 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8691 machine_mode mode;
8692 int volatilep, unsignedp;
8693 bool indirect_base0 = false, indirect_base1 = false;
8694
8695 /* Get base and offset for the access. Strip ADDR_EXPR for
8696 get_inner_reference, but put it back by stripping INDIRECT_REF
8697 off the base object if possible. indirect_baseN will be true
8698 if baseN is not an address but refers to the object itself. */
8699 base0 = arg0;
8700 if (TREE_CODE (arg0) == ADDR_EXPR)
8701 {
8702 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8703 &bitsize, &bitpos0, &offset0, &mode,
8704 &unsignedp, &volatilep, false);
8705 if (TREE_CODE (base0) == INDIRECT_REF)
8706 base0 = TREE_OPERAND (base0, 0);
8707 else
8708 indirect_base0 = true;
8709 }
8710 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8711 {
8712 base0 = TREE_OPERAND (arg0, 0);
8713 STRIP_SIGN_NOPS (base0);
8714 if (TREE_CODE (base0) == ADDR_EXPR)
8715 {
8716 base0 = TREE_OPERAND (base0, 0);
8717 indirect_base0 = true;
8718 }
8719 offset0 = TREE_OPERAND (arg0, 1);
8720 if (tree_fits_shwi_p (offset0))
8721 {
8722 HOST_WIDE_INT off = size_low_cst (offset0);
8723 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8724 * BITS_PER_UNIT)
8725 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8726 {
8727 bitpos0 = off * BITS_PER_UNIT;
8728 offset0 = NULL_TREE;
8729 }
8730 }
8731 }
8732
8733 base1 = arg1;
8734 if (TREE_CODE (arg1) == ADDR_EXPR)
8735 {
8736 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8737 &bitsize, &bitpos1, &offset1, &mode,
8738 &unsignedp, &volatilep, false);
8739 if (TREE_CODE (base1) == INDIRECT_REF)
8740 base1 = TREE_OPERAND (base1, 0);
8741 else
8742 indirect_base1 = true;
8743 }
8744 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8745 {
8746 base1 = TREE_OPERAND (arg1, 0);
8747 STRIP_SIGN_NOPS (base1);
8748 if (TREE_CODE (base1) == ADDR_EXPR)
8749 {
8750 base1 = TREE_OPERAND (base1, 0);
8751 indirect_base1 = true;
8752 }
8753 offset1 = TREE_OPERAND (arg1, 1);
8754 if (tree_fits_shwi_p (offset1))
8755 {
8756 HOST_WIDE_INT off = size_low_cst (offset1);
8757 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8758 * BITS_PER_UNIT)
8759 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8760 {
8761 bitpos1 = off * BITS_PER_UNIT;
8762 offset1 = NULL_TREE;
8763 }
8764 }
8765 }
8766
8767 /* A local variable can never be pointed to by
8768 the default SSA name of an incoming parameter. */
8769 if ((TREE_CODE (arg0) == ADDR_EXPR
8770 && indirect_base0
8771 && TREE_CODE (base0) == VAR_DECL
8772 && auto_var_in_fn_p (base0, current_function_decl)
8773 && !indirect_base1
8774 && TREE_CODE (base1) == SSA_NAME
8775 && SSA_NAME_IS_DEFAULT_DEF (base1)
8776 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8777 || (TREE_CODE (arg1) == ADDR_EXPR
8778 && indirect_base1
8779 && TREE_CODE (base1) == VAR_DECL
8780 && auto_var_in_fn_p (base1, current_function_decl)
8781 && !indirect_base0
8782 && TREE_CODE (base0) == SSA_NAME
8783 && SSA_NAME_IS_DEFAULT_DEF (base0)
8784 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8785 {
8786 if (code == NE_EXPR)
8787 return constant_boolean_node (1, type);
8788 else if (code == EQ_EXPR)
8789 return constant_boolean_node (0, type);
8790 }
8791 /* If we have equivalent bases we might be able to simplify. */
8792 else if (indirect_base0 == indirect_base1
8793 && operand_equal_p (base0, base1, 0))
8794 {
8795 /* We can fold this expression to a constant if the non-constant
8796 offset parts are equal. */
8797 if ((offset0 == offset1
8798 || (offset0 && offset1
8799 && operand_equal_p (offset0, offset1, 0)))
8800 && (code == EQ_EXPR
8801 || code == NE_EXPR
8802 || (indirect_base0 && DECL_P (base0))
8803 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8804
8805 {
8806 if (!equality_code
8807 && bitpos0 != bitpos1
8808 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8809 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8810 fold_overflow_warning (("assuming pointer wraparound does not "
8811 "occur when comparing P +- C1 with "
8812 "P +- C2"),
8813 WARN_STRICT_OVERFLOW_CONDITIONAL);
8814
8815 switch (code)
8816 {
8817 case EQ_EXPR:
8818 return constant_boolean_node (bitpos0 == bitpos1, type);
8819 case NE_EXPR:
8820 return constant_boolean_node (bitpos0 != bitpos1, type);
8821 case LT_EXPR:
8822 return constant_boolean_node (bitpos0 < bitpos1, type);
8823 case LE_EXPR:
8824 return constant_boolean_node (bitpos0 <= bitpos1, type);
8825 case GE_EXPR:
8826 return constant_boolean_node (bitpos0 >= bitpos1, type);
8827 case GT_EXPR:
8828 return constant_boolean_node (bitpos0 > bitpos1, type);
8829 default:;
8830 }
8831 }
8832 /* We can simplify the comparison to a comparison of the variable
8833 offset parts if the constant offset parts are equal.
8834 Be careful to use signed sizetype here because otherwise we
8835 mess with array offsets in the wrong way. This is possible
8836 because pointer arithmetic is restricted to retain within an
8837 object and overflow on pointer differences is undefined as of
8838 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8839 else if (bitpos0 == bitpos1
8840 && (equality_code
8841 || (indirect_base0 && DECL_P (base0))
8842 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8843 {
8844 /* By converting to signed sizetype we cover middle-end pointer
8845 arithmetic which operates on unsigned pointer types of size
8846 type size and ARRAY_REF offsets which are properly sign or
8847 zero extended from their type in case it is narrower than
8848 sizetype. */
8849 if (offset0 == NULL_TREE)
8850 offset0 = build_int_cst (ssizetype, 0);
8851 else
8852 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8853 if (offset1 == NULL_TREE)
8854 offset1 = build_int_cst (ssizetype, 0);
8855 else
8856 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8857
8858 if (!equality_code
8859 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8860 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8861 fold_overflow_warning (("assuming pointer wraparound does not "
8862 "occur when comparing P +- C1 with "
8863 "P +- C2"),
8864 WARN_STRICT_OVERFLOW_COMPARISON);
8865
8866 return fold_build2_loc (loc, code, type, offset0, offset1);
8867 }
8868 }
8869 /* For non-equal bases we can simplify if they are addresses
8870 of local binding decls or constants. */
8871 else if (indirect_base0 && indirect_base1
8872 /* We know that !operand_equal_p (base0, base1, 0)
8873 because the if condition was false. But make
8874 sure two decls are not the same. */
8875 && base0 != base1
8876 && TREE_CODE (arg0) == ADDR_EXPR
8877 && TREE_CODE (arg1) == ADDR_EXPR
8878 && (((TREE_CODE (base0) == VAR_DECL
8879 || TREE_CODE (base0) == PARM_DECL)
8880 && (targetm.binds_local_p (base0)
8881 || CONSTANT_CLASS_P (base1)))
8882 || CONSTANT_CLASS_P (base0))
8883 && (((TREE_CODE (base1) == VAR_DECL
8884 || TREE_CODE (base1) == PARM_DECL)
8885 && (targetm.binds_local_p (base1)
8886 || CONSTANT_CLASS_P (base0)))
8887 || CONSTANT_CLASS_P (base1)))
8888 {
8889 if (code == EQ_EXPR)
8890 return omit_two_operands_loc (loc, type, boolean_false_node,
8891 arg0, arg1);
8892 else if (code == NE_EXPR)
8893 return omit_two_operands_loc (loc, type, boolean_true_node,
8894 arg0, arg1);
8895 }
8896 /* For equal offsets we can simplify to a comparison of the
8897 base addresses. */
8898 else if (bitpos0 == bitpos1
8899 && (indirect_base0
8900 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8901 && (indirect_base1
8902 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8903 && ((offset0 == offset1)
8904 || (offset0 && offset1
8905 && operand_equal_p (offset0, offset1, 0))))
8906 {
8907 if (indirect_base0)
8908 base0 = build_fold_addr_expr_loc (loc, base0);
8909 if (indirect_base1)
8910 base1 = build_fold_addr_expr_loc (loc, base1);
8911 return fold_build2_loc (loc, code, type, base0, base1);
8912 }
8913 }
8914
8915 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8916 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8917 the resulting offset is smaller in absolute value than the
8918 original one and has the same sign. */
8919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8920 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8921 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8922 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8923 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8924 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8925 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8926 {
8927 tree const1 = TREE_OPERAND (arg0, 1);
8928 tree const2 = TREE_OPERAND (arg1, 1);
8929 tree variable1 = TREE_OPERAND (arg0, 0);
8930 tree variable2 = TREE_OPERAND (arg1, 0);
8931 tree cst;
8932 const char * const warnmsg = G_("assuming signed overflow does not "
8933 "occur when combining constants around "
8934 "a comparison");
8935
8936 /* Put the constant on the side where it doesn't overflow and is
8937 of lower absolute value and of same sign than before. */
8938 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8939 ? MINUS_EXPR : PLUS_EXPR,
8940 const2, const1);
8941 if (!TREE_OVERFLOW (cst)
8942 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8943 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8944 {
8945 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8946 return fold_build2_loc (loc, code, type,
8947 variable1,
8948 fold_build2_loc (loc, TREE_CODE (arg1),
8949 TREE_TYPE (arg1),
8950 variable2, cst));
8951 }
8952
8953 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8954 ? MINUS_EXPR : PLUS_EXPR,
8955 const1, const2);
8956 if (!TREE_OVERFLOW (cst)
8957 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8958 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8959 {
8960 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8961 return fold_build2_loc (loc, code, type,
8962 fold_build2_loc (loc, TREE_CODE (arg0),
8963 TREE_TYPE (arg0),
8964 variable1, cst),
8965 variable2);
8966 }
8967 }
8968
8969 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8970 signed arithmetic case. That form is created by the compiler
8971 often enough for folding it to be of value. One example is in
8972 computing loop trip counts after Operator Strength Reduction. */
8973 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8974 && TREE_CODE (arg0) == MULT_EXPR
8975 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8976 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8977 && integer_zerop (arg1))
8978 {
8979 tree const1 = TREE_OPERAND (arg0, 1);
8980 tree const2 = arg1; /* zero */
8981 tree variable1 = TREE_OPERAND (arg0, 0);
8982 enum tree_code cmp_code = code;
8983
8984 /* Handle unfolded multiplication by zero. */
8985 if (integer_zerop (const1))
8986 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8987
8988 fold_overflow_warning (("assuming signed overflow does not occur when "
8989 "eliminating multiplication in comparison "
8990 "with zero"),
8991 WARN_STRICT_OVERFLOW_COMPARISON);
8992
8993 /* If const1 is negative we swap the sense of the comparison. */
8994 if (tree_int_cst_sgn (const1) < 0)
8995 cmp_code = swap_tree_comparison (cmp_code);
8996
8997 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8998 }
8999
9000 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9001 if (tem)
9002 return tem;
9003
9004 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9005 {
9006 tree targ0 = strip_float_extensions (arg0);
9007 tree targ1 = strip_float_extensions (arg1);
9008 tree newtype = TREE_TYPE (targ0);
9009
9010 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9011 newtype = TREE_TYPE (targ1);
9012
9013 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9014 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9015 return fold_build2_loc (loc, code, type,
9016 fold_convert_loc (loc, newtype, targ0),
9017 fold_convert_loc (loc, newtype, targ1));
9018
9019 /* (-a) CMP (-b) -> b CMP a */
9020 if (TREE_CODE (arg0) == NEGATE_EXPR
9021 && TREE_CODE (arg1) == NEGATE_EXPR)
9022 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9023 TREE_OPERAND (arg0, 0));
9024
9025 if (TREE_CODE (arg1) == REAL_CST)
9026 {
9027 REAL_VALUE_TYPE cst;
9028 cst = TREE_REAL_CST (arg1);
9029
9030 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9031 if (TREE_CODE (arg0) == NEGATE_EXPR)
9032 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9033 TREE_OPERAND (arg0, 0),
9034 build_real (TREE_TYPE (arg1),
9035 real_value_negate (&cst)));
9036
9037 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9038 /* a CMP (-0) -> a CMP 0 */
9039 if (REAL_VALUE_MINUS_ZERO (cst))
9040 return fold_build2_loc (loc, code, type, arg0,
9041 build_real (TREE_TYPE (arg1), dconst0));
9042
9043 /* x != NaN is always true, other ops are always false. */
9044 if (REAL_VALUE_ISNAN (cst)
9045 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9046 {
9047 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9048 return omit_one_operand_loc (loc, type, tem, arg0);
9049 }
9050
9051 /* Fold comparisons against infinity. */
9052 if (REAL_VALUE_ISINF (cst)
9053 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9054 {
9055 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9056 if (tem != NULL_TREE)
9057 return tem;
9058 }
9059 }
9060
9061 /* If this is a comparison of a real constant with a PLUS_EXPR
9062 or a MINUS_EXPR of a real constant, we can convert it into a
9063 comparison with a revised real constant as long as no overflow
9064 occurs when unsafe_math_optimizations are enabled. */
9065 if (flag_unsafe_math_optimizations
9066 && TREE_CODE (arg1) == REAL_CST
9067 && (TREE_CODE (arg0) == PLUS_EXPR
9068 || TREE_CODE (arg0) == MINUS_EXPR)
9069 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9070 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9071 ? MINUS_EXPR : PLUS_EXPR,
9072 arg1, TREE_OPERAND (arg0, 1)))
9073 && !TREE_OVERFLOW (tem))
9074 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9075
9076 /* Likewise, we can simplify a comparison of a real constant with
9077 a MINUS_EXPR whose first operand is also a real constant, i.e.
9078 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9079 floating-point types only if -fassociative-math is set. */
9080 if (flag_associative_math
9081 && TREE_CODE (arg1) == REAL_CST
9082 && TREE_CODE (arg0) == MINUS_EXPR
9083 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9084 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9085 arg1))
9086 && !TREE_OVERFLOW (tem))
9087 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9088 TREE_OPERAND (arg0, 1), tem);
9089
9090 /* Fold comparisons against built-in math functions. */
9091 if (TREE_CODE (arg1) == REAL_CST
9092 && flag_unsafe_math_optimizations
9093 && ! flag_errno_math)
9094 {
9095 enum built_in_function fcode = builtin_mathfn_code (arg0);
9096
9097 if (fcode != END_BUILTINS)
9098 {
9099 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9100 if (tem != NULL_TREE)
9101 return tem;
9102 }
9103 }
9104 }
9105
9106 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9107 && CONVERT_EXPR_P (arg0))
9108 {
9109 /* If we are widening one operand of an integer comparison,
9110 see if the other operand is similarly being widened. Perhaps we
9111 can do the comparison in the narrower type. */
9112 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9113 if (tem)
9114 return tem;
9115
9116 /* Or if we are changing signedness. */
9117 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9118 if (tem)
9119 return tem;
9120 }
9121
9122 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9123 constant, we can simplify it. */
9124 if (TREE_CODE (arg1) == INTEGER_CST
9125 && (TREE_CODE (arg0) == MIN_EXPR
9126 || TREE_CODE (arg0) == MAX_EXPR)
9127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9128 {
9129 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9130 if (tem)
9131 return tem;
9132 }
9133
9134 /* Simplify comparison of something with itself. (For IEEE
9135 floating-point, we can only do some of these simplifications.) */
9136 if (operand_equal_p (arg0, arg1, 0))
9137 {
9138 switch (code)
9139 {
9140 case EQ_EXPR:
9141 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9142 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9143 return constant_boolean_node (1, type);
9144 break;
9145
9146 case GE_EXPR:
9147 case LE_EXPR:
9148 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9149 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9150 return constant_boolean_node (1, type);
9151 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9152
9153 case NE_EXPR:
9154 /* For NE, we can only do this simplification if integer
9155 or we don't honor IEEE floating point NaNs. */
9156 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9157 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9158 break;
9159 /* ... fall through ... */
9160 case GT_EXPR:
9161 case LT_EXPR:
9162 return constant_boolean_node (0, type);
9163 default:
9164 gcc_unreachable ();
9165 }
9166 }
9167
9168 /* If we are comparing an expression that just has comparisons
9169 of two integer values, arithmetic expressions of those comparisons,
9170 and constants, we can simplify it. There are only three cases
9171 to check: the two values can either be equal, the first can be
9172 greater, or the second can be greater. Fold the expression for
9173 those three values. Since each value must be 0 or 1, we have
9174 eight possibilities, each of which corresponds to the constant 0
9175 or 1 or one of the six possible comparisons.
9176
9177 This handles common cases like (a > b) == 0 but also handles
9178 expressions like ((x > y) - (y > x)) > 0, which supposedly
9179 occur in macroized code. */
9180
9181 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9182 {
9183 tree cval1 = 0, cval2 = 0;
9184 int save_p = 0;
9185
9186 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9187 /* Don't handle degenerate cases here; they should already
9188 have been handled anyway. */
9189 && cval1 != 0 && cval2 != 0
9190 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9191 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9192 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9193 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9194 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9195 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9196 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9197 {
9198 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9199 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9200
9201 /* We can't just pass T to eval_subst in case cval1 or cval2
9202 was the same as ARG1. */
9203
9204 tree high_result
9205 = fold_build2_loc (loc, code, type,
9206 eval_subst (loc, arg0, cval1, maxval,
9207 cval2, minval),
9208 arg1);
9209 tree equal_result
9210 = fold_build2_loc (loc, code, type,
9211 eval_subst (loc, arg0, cval1, maxval,
9212 cval2, maxval),
9213 arg1);
9214 tree low_result
9215 = fold_build2_loc (loc, code, type,
9216 eval_subst (loc, arg0, cval1, minval,
9217 cval2, maxval),
9218 arg1);
9219
9220 /* All three of these results should be 0 or 1. Confirm they are.
9221 Then use those values to select the proper code to use. */
9222
9223 if (TREE_CODE (high_result) == INTEGER_CST
9224 && TREE_CODE (equal_result) == INTEGER_CST
9225 && TREE_CODE (low_result) == INTEGER_CST)
9226 {
9227 /* Make a 3-bit mask with the high-order bit being the
9228 value for `>', the next for '=', and the low for '<'. */
9229 switch ((integer_onep (high_result) * 4)
9230 + (integer_onep (equal_result) * 2)
9231 + integer_onep (low_result))
9232 {
9233 case 0:
9234 /* Always false. */
9235 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9236 case 1:
9237 code = LT_EXPR;
9238 break;
9239 case 2:
9240 code = EQ_EXPR;
9241 break;
9242 case 3:
9243 code = LE_EXPR;
9244 break;
9245 case 4:
9246 code = GT_EXPR;
9247 break;
9248 case 5:
9249 code = NE_EXPR;
9250 break;
9251 case 6:
9252 code = GE_EXPR;
9253 break;
9254 case 7:
9255 /* Always true. */
9256 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9257 }
9258
9259 if (save_p)
9260 {
9261 tem = save_expr (build2 (code, type, cval1, cval2));
9262 SET_EXPR_LOCATION (tem, loc);
9263 return tem;
9264 }
9265 return fold_build2_loc (loc, code, type, cval1, cval2);
9266 }
9267 }
9268 }
9269
9270 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9271 into a single range test. */
9272 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9273 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9274 && TREE_CODE (arg1) == INTEGER_CST
9275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9276 && !integer_zerop (TREE_OPERAND (arg0, 1))
9277 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9278 && !TREE_OVERFLOW (arg1))
9279 {
9280 tem = fold_div_compare (loc, code, type, arg0, arg1);
9281 if (tem != NULL_TREE)
9282 return tem;
9283 }
9284
9285 /* Fold ~X op ~Y as Y op X. */
9286 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9287 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9288 {
9289 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9290 return fold_build2_loc (loc, code, type,
9291 fold_convert_loc (loc, cmp_type,
9292 TREE_OPERAND (arg1, 0)),
9293 TREE_OPERAND (arg0, 0));
9294 }
9295
9296 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9297 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9298 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9299 {
9300 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9301 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9302 TREE_OPERAND (arg0, 0),
9303 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9304 fold_convert_loc (loc, cmp_type, arg1)));
9305 }
9306
9307 return NULL_TREE;
9308 }
9309
9310
9311 /* Subroutine of fold_binary. Optimize complex multiplications of the
9312 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9313 argument EXPR represents the expression "z" of type TYPE. */
9314
9315 static tree
9316 fold_mult_zconjz (location_t loc, tree type, tree expr)
9317 {
9318 tree itype = TREE_TYPE (type);
9319 tree rpart, ipart, tem;
9320
9321 if (TREE_CODE (expr) == COMPLEX_EXPR)
9322 {
9323 rpart = TREE_OPERAND (expr, 0);
9324 ipart = TREE_OPERAND (expr, 1);
9325 }
9326 else if (TREE_CODE (expr) == COMPLEX_CST)
9327 {
9328 rpart = TREE_REALPART (expr);
9329 ipart = TREE_IMAGPART (expr);
9330 }
9331 else
9332 {
9333 expr = save_expr (expr);
9334 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9335 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9336 }
9337
9338 rpart = save_expr (rpart);
9339 ipart = save_expr (ipart);
9340 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9341 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9342 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9343 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9344 build_zero_cst (itype));
9345 }
9346
9347
9348 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9349 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9350 guarantees that P and N have the same least significant log2(M) bits.
9351 N is not otherwise constrained. In particular, N is not normalized to
9352 0 <= N < M as is common. In general, the precise value of P is unknown.
9353 M is chosen as large as possible such that constant N can be determined.
9354
9355 Returns M and sets *RESIDUE to N.
9356
9357 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9358 account. This is not always possible due to PR 35705.
9359 */
9360
9361 static unsigned HOST_WIDE_INT
9362 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9363 bool allow_func_align)
9364 {
9365 enum tree_code code;
9366
9367 *residue = 0;
9368
9369 code = TREE_CODE (expr);
9370 if (code == ADDR_EXPR)
9371 {
9372 unsigned int bitalign;
9373 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9374 *residue /= BITS_PER_UNIT;
9375 return bitalign / BITS_PER_UNIT;
9376 }
9377 else if (code == POINTER_PLUS_EXPR)
9378 {
9379 tree op0, op1;
9380 unsigned HOST_WIDE_INT modulus;
9381 enum tree_code inner_code;
9382
9383 op0 = TREE_OPERAND (expr, 0);
9384 STRIP_NOPS (op0);
9385 modulus = get_pointer_modulus_and_residue (op0, residue,
9386 allow_func_align);
9387
9388 op1 = TREE_OPERAND (expr, 1);
9389 STRIP_NOPS (op1);
9390 inner_code = TREE_CODE (op1);
9391 if (inner_code == INTEGER_CST)
9392 {
9393 *residue += TREE_INT_CST_LOW (op1);
9394 return modulus;
9395 }
9396 else if (inner_code == MULT_EXPR)
9397 {
9398 op1 = TREE_OPERAND (op1, 1);
9399 if (TREE_CODE (op1) == INTEGER_CST)
9400 {
9401 unsigned HOST_WIDE_INT align;
9402
9403 /* Compute the greatest power-of-2 divisor of op1. */
9404 align = TREE_INT_CST_LOW (op1);
9405 align &= -align;
9406
9407 /* If align is non-zero and less than *modulus, replace
9408 *modulus with align., If align is 0, then either op1 is 0
9409 or the greatest power-of-2 divisor of op1 doesn't fit in an
9410 unsigned HOST_WIDE_INT. In either case, no additional
9411 constraint is imposed. */
9412 if (align)
9413 modulus = MIN (modulus, align);
9414
9415 return modulus;
9416 }
9417 }
9418 }
9419
9420 /* If we get here, we were unable to determine anything useful about the
9421 expression. */
9422 return 1;
9423 }
9424
9425 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9426 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9427
9428 static bool
9429 vec_cst_ctor_to_array (tree arg, tree *elts)
9430 {
9431 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9432
9433 if (TREE_CODE (arg) == VECTOR_CST)
9434 {
9435 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9436 elts[i] = VECTOR_CST_ELT (arg, i);
9437 }
9438 else if (TREE_CODE (arg) == CONSTRUCTOR)
9439 {
9440 constructor_elt *elt;
9441
9442 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9443 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9444 return false;
9445 else
9446 elts[i] = elt->value;
9447 }
9448 else
9449 return false;
9450 for (; i < nelts; i++)
9451 elts[i]
9452 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9453 return true;
9454 }
9455
9456 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9457 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9458 NULL_TREE otherwise. */
9459
9460 static tree
9461 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9462 {
9463 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9464 tree *elts;
9465 bool need_ctor = false;
9466
9467 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9468 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9469 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9470 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9471 return NULL_TREE;
9472
9473 elts = XALLOCAVEC (tree, nelts * 3);
9474 if (!vec_cst_ctor_to_array (arg0, elts)
9475 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9476 return NULL_TREE;
9477
9478 for (i = 0; i < nelts; i++)
9479 {
9480 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9481 need_ctor = true;
9482 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9483 }
9484
9485 if (need_ctor)
9486 {
9487 vec<constructor_elt, va_gc> *v;
9488 vec_alloc (v, nelts);
9489 for (i = 0; i < nelts; i++)
9490 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9491 return build_constructor (type, v);
9492 }
9493 else
9494 return build_vector (type, &elts[2 * nelts]);
9495 }
9496
9497 /* Try to fold a pointer difference of type TYPE two address expressions of
9498 array references AREF0 and AREF1 using location LOC. Return a
9499 simplified expression for the difference or NULL_TREE. */
9500
9501 static tree
9502 fold_addr_of_array_ref_difference (location_t loc, tree type,
9503 tree aref0, tree aref1)
9504 {
9505 tree base0 = TREE_OPERAND (aref0, 0);
9506 tree base1 = TREE_OPERAND (aref1, 0);
9507 tree base_offset = build_int_cst (type, 0);
9508
9509 /* If the bases are array references as well, recurse. If the bases
9510 are pointer indirections compute the difference of the pointers.
9511 If the bases are equal, we are set. */
9512 if ((TREE_CODE (base0) == ARRAY_REF
9513 && TREE_CODE (base1) == ARRAY_REF
9514 && (base_offset
9515 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9516 || (INDIRECT_REF_P (base0)
9517 && INDIRECT_REF_P (base1)
9518 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9519 TREE_OPERAND (base0, 0),
9520 TREE_OPERAND (base1, 0))))
9521 || operand_equal_p (base0, base1, 0))
9522 {
9523 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9524 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9525 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9526 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9527 return fold_build2_loc (loc, PLUS_EXPR, type,
9528 base_offset,
9529 fold_build2_loc (loc, MULT_EXPR, type,
9530 diff, esz));
9531 }
9532 return NULL_TREE;
9533 }
9534
9535 /* If the real or vector real constant CST of type TYPE has an exact
9536 inverse, return it, else return NULL. */
9537
9538 static tree
9539 exact_inverse (tree type, tree cst)
9540 {
9541 REAL_VALUE_TYPE r;
9542 tree unit_type, *elts;
9543 machine_mode mode;
9544 unsigned vec_nelts, i;
9545
9546 switch (TREE_CODE (cst))
9547 {
9548 case REAL_CST:
9549 r = TREE_REAL_CST (cst);
9550
9551 if (exact_real_inverse (TYPE_MODE (type), &r))
9552 return build_real (type, r);
9553
9554 return NULL_TREE;
9555
9556 case VECTOR_CST:
9557 vec_nelts = VECTOR_CST_NELTS (cst);
9558 elts = XALLOCAVEC (tree, vec_nelts);
9559 unit_type = TREE_TYPE (type);
9560 mode = TYPE_MODE (unit_type);
9561
9562 for (i = 0; i < vec_nelts; i++)
9563 {
9564 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9565 if (!exact_real_inverse (mode, &r))
9566 return NULL_TREE;
9567 elts[i] = build_real (unit_type, r);
9568 }
9569
9570 return build_vector (type, elts);
9571
9572 default:
9573 return NULL_TREE;
9574 }
9575 }
9576
9577 /* Mask out the tz least significant bits of X of type TYPE where
9578 tz is the number of trailing zeroes in Y. */
9579 static wide_int
9580 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9581 {
9582 int tz = wi::ctz (y);
9583 if (tz > 0)
9584 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9585 return x;
9586 }
9587
9588 /* Return true when T is an address and is known to be nonzero.
9589 For floating point we further ensure that T is not denormal.
9590 Similar logic is present in nonzero_address in rtlanal.h.
9591
9592 If the return value is based on the assumption that signed overflow
9593 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9594 change *STRICT_OVERFLOW_P. */
9595
9596 static bool
9597 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9598 {
9599 tree type = TREE_TYPE (t);
9600 enum tree_code code;
9601
9602 /* Doing something useful for floating point would need more work. */
9603 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9604 return false;
9605
9606 code = TREE_CODE (t);
9607 switch (TREE_CODE_CLASS (code))
9608 {
9609 case tcc_unary:
9610 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9611 strict_overflow_p);
9612 case tcc_binary:
9613 case tcc_comparison:
9614 return tree_binary_nonzero_warnv_p (code, type,
9615 TREE_OPERAND (t, 0),
9616 TREE_OPERAND (t, 1),
9617 strict_overflow_p);
9618 case tcc_constant:
9619 case tcc_declaration:
9620 case tcc_reference:
9621 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9622
9623 default:
9624 break;
9625 }
9626
9627 switch (code)
9628 {
9629 case TRUTH_NOT_EXPR:
9630 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9631 strict_overflow_p);
9632
9633 case TRUTH_AND_EXPR:
9634 case TRUTH_OR_EXPR:
9635 case TRUTH_XOR_EXPR:
9636 return tree_binary_nonzero_warnv_p (code, type,
9637 TREE_OPERAND (t, 0),
9638 TREE_OPERAND (t, 1),
9639 strict_overflow_p);
9640
9641 case COND_EXPR:
9642 case CONSTRUCTOR:
9643 case OBJ_TYPE_REF:
9644 case ASSERT_EXPR:
9645 case ADDR_EXPR:
9646 case WITH_SIZE_EXPR:
9647 case SSA_NAME:
9648 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9649
9650 case COMPOUND_EXPR:
9651 case MODIFY_EXPR:
9652 case BIND_EXPR:
9653 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9654 strict_overflow_p);
9655
9656 case SAVE_EXPR:
9657 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9658 strict_overflow_p);
9659
9660 case CALL_EXPR:
9661 {
9662 tree fndecl = get_callee_fndecl (t);
9663 if (!fndecl) return false;
9664 if (flag_delete_null_pointer_checks && !flag_check_new
9665 && DECL_IS_OPERATOR_NEW (fndecl)
9666 && !TREE_NOTHROW (fndecl))
9667 return true;
9668 if (flag_delete_null_pointer_checks
9669 && lookup_attribute ("returns_nonnull",
9670 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9671 return true;
9672 return alloca_call_p (t);
9673 }
9674
9675 default:
9676 break;
9677 }
9678 return false;
9679 }
9680
9681 /* Return true when T is an address and is known to be nonzero.
9682 Handle warnings about undefined signed overflow. */
9683
9684 static bool
9685 tree_expr_nonzero_p (tree t)
9686 {
9687 bool ret, strict_overflow_p;
9688
9689 strict_overflow_p = false;
9690 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9691 if (strict_overflow_p)
9692 fold_overflow_warning (("assuming signed overflow does not occur when "
9693 "determining that expression is always "
9694 "non-zero"),
9695 WARN_STRICT_OVERFLOW_MISC);
9696 return ret;
9697 }
9698
9699 /* Fold a binary expression of code CODE and type TYPE with operands
9700 OP0 and OP1. LOC is the location of the resulting expression.
9701 Return the folded expression if folding is successful. Otherwise,
9702 return NULL_TREE. */
9703
9704 tree
9705 fold_binary_loc (location_t loc,
9706 enum tree_code code, tree type, tree op0, tree op1)
9707 {
9708 enum tree_code_class kind = TREE_CODE_CLASS (code);
9709 tree arg0, arg1, tem;
9710 tree t1 = NULL_TREE;
9711 bool strict_overflow_p;
9712 unsigned int prec;
9713
9714 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9715 && TREE_CODE_LENGTH (code) == 2
9716 && op0 != NULL_TREE
9717 && op1 != NULL_TREE);
9718
9719 arg0 = op0;
9720 arg1 = op1;
9721
9722 /* Strip any conversions that don't change the mode. This is
9723 safe for every expression, except for a comparison expression
9724 because its signedness is derived from its operands. So, in
9725 the latter case, only strip conversions that don't change the
9726 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9727 preserved.
9728
9729 Note that this is done as an internal manipulation within the
9730 constant folder, in order to find the simplest representation
9731 of the arguments so that their form can be studied. In any
9732 cases, the appropriate type conversions should be put back in
9733 the tree that will get out of the constant folder. */
9734
9735 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9736 {
9737 STRIP_SIGN_NOPS (arg0);
9738 STRIP_SIGN_NOPS (arg1);
9739 }
9740 else
9741 {
9742 STRIP_NOPS (arg0);
9743 STRIP_NOPS (arg1);
9744 }
9745
9746 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9747 constant but we can't do arithmetic on them. */
9748 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9749 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9750 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9751 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9752 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9753 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9754 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9755 {
9756 if (kind == tcc_binary)
9757 {
9758 /* Make sure type and arg0 have the same saturating flag. */
9759 gcc_assert (TYPE_SATURATING (type)
9760 == TYPE_SATURATING (TREE_TYPE (arg0)));
9761 tem = const_binop (code, arg0, arg1);
9762 }
9763 else if (kind == tcc_comparison)
9764 tem = fold_relational_const (code, type, arg0, arg1);
9765 else
9766 tem = NULL_TREE;
9767
9768 if (tem != NULL_TREE)
9769 {
9770 if (TREE_TYPE (tem) != type)
9771 tem = fold_convert_loc (loc, type, tem);
9772 return tem;
9773 }
9774 }
9775
9776 /* If this is a commutative operation, and ARG0 is a constant, move it
9777 to ARG1 to reduce the number of tests below. */
9778 if (commutative_tree_code (code)
9779 && tree_swap_operands_p (arg0, arg1, true))
9780 return fold_build2_loc (loc, code, type, op1, op0);
9781
9782 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9783 to ARG1 to reduce the number of tests below. */
9784 if (kind == tcc_comparison
9785 && tree_swap_operands_p (arg0, arg1, true))
9786 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9787
9788 tem = generic_simplify (loc, code, type, op0, op1);
9789 if (tem)
9790 return tem;
9791
9792 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9793
9794 First check for cases where an arithmetic operation is applied to a
9795 compound, conditional, or comparison operation. Push the arithmetic
9796 operation inside the compound or conditional to see if any folding
9797 can then be done. Convert comparison to conditional for this purpose.
9798 The also optimizes non-constant cases that used to be done in
9799 expand_expr.
9800
9801 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9802 one of the operands is a comparison and the other is a comparison, a
9803 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9804 code below would make the expression more complex. Change it to a
9805 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9806 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9807
9808 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9809 || code == EQ_EXPR || code == NE_EXPR)
9810 && TREE_CODE (type) != VECTOR_TYPE
9811 && ((truth_value_p (TREE_CODE (arg0))
9812 && (truth_value_p (TREE_CODE (arg1))
9813 || (TREE_CODE (arg1) == BIT_AND_EXPR
9814 && integer_onep (TREE_OPERAND (arg1, 1)))))
9815 || (truth_value_p (TREE_CODE (arg1))
9816 && (truth_value_p (TREE_CODE (arg0))
9817 || (TREE_CODE (arg0) == BIT_AND_EXPR
9818 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9819 {
9820 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9821 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9822 : TRUTH_XOR_EXPR,
9823 boolean_type_node,
9824 fold_convert_loc (loc, boolean_type_node, arg0),
9825 fold_convert_loc (loc, boolean_type_node, arg1));
9826
9827 if (code == EQ_EXPR)
9828 tem = invert_truthvalue_loc (loc, tem);
9829
9830 return fold_convert_loc (loc, type, tem);
9831 }
9832
9833 if (TREE_CODE_CLASS (code) == tcc_binary
9834 || TREE_CODE_CLASS (code) == tcc_comparison)
9835 {
9836 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9837 {
9838 tem = fold_build2_loc (loc, code, type,
9839 fold_convert_loc (loc, TREE_TYPE (op0),
9840 TREE_OPERAND (arg0, 1)), op1);
9841 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9842 tem);
9843 }
9844 if (TREE_CODE (arg1) == COMPOUND_EXPR
9845 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9846 {
9847 tem = fold_build2_loc (loc, code, type, op0,
9848 fold_convert_loc (loc, TREE_TYPE (op1),
9849 TREE_OPERAND (arg1, 1)));
9850 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9851 tem);
9852 }
9853
9854 if (TREE_CODE (arg0) == COND_EXPR
9855 || TREE_CODE (arg0) == VEC_COND_EXPR
9856 || COMPARISON_CLASS_P (arg0))
9857 {
9858 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9859 arg0, arg1,
9860 /*cond_first_p=*/1);
9861 if (tem != NULL_TREE)
9862 return tem;
9863 }
9864
9865 if (TREE_CODE (arg1) == COND_EXPR
9866 || TREE_CODE (arg1) == VEC_COND_EXPR
9867 || COMPARISON_CLASS_P (arg1))
9868 {
9869 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9870 arg1, arg0,
9871 /*cond_first_p=*/0);
9872 if (tem != NULL_TREE)
9873 return tem;
9874 }
9875 }
9876
9877 switch (code)
9878 {
9879 case MEM_REF:
9880 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9881 if (TREE_CODE (arg0) == ADDR_EXPR
9882 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9883 {
9884 tree iref = TREE_OPERAND (arg0, 0);
9885 return fold_build2 (MEM_REF, type,
9886 TREE_OPERAND (iref, 0),
9887 int_const_binop (PLUS_EXPR, arg1,
9888 TREE_OPERAND (iref, 1)));
9889 }
9890
9891 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9892 if (TREE_CODE (arg0) == ADDR_EXPR
9893 && handled_component_p (TREE_OPERAND (arg0, 0)))
9894 {
9895 tree base;
9896 HOST_WIDE_INT coffset;
9897 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9898 &coffset);
9899 if (!base)
9900 return NULL_TREE;
9901 return fold_build2 (MEM_REF, type,
9902 build_fold_addr_expr (base),
9903 int_const_binop (PLUS_EXPR, arg1,
9904 size_int (coffset)));
9905 }
9906
9907 return NULL_TREE;
9908
9909 case POINTER_PLUS_EXPR:
9910 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9911 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9912 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9913 return fold_convert_loc (loc, type,
9914 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9915 fold_convert_loc (loc, sizetype,
9916 arg1),
9917 fold_convert_loc (loc, sizetype,
9918 arg0)));
9919
9920 /* PTR_CST +p CST -> CST1 */
9921 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9922 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9923 fold_convert_loc (loc, type, arg1));
9924
9925 return NULL_TREE;
9926
9927 case PLUS_EXPR:
9928 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9929 {
9930 /* X + (X / CST) * -CST is X % CST. */
9931 if (TREE_CODE (arg1) == MULT_EXPR
9932 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9933 && operand_equal_p (arg0,
9934 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9935 {
9936 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9937 tree cst1 = TREE_OPERAND (arg1, 1);
9938 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9939 cst1, cst0);
9940 if (sum && integer_zerop (sum))
9941 return fold_convert_loc (loc, type,
9942 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9943 TREE_TYPE (arg0), arg0,
9944 cst0));
9945 }
9946 }
9947
9948 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9949 one. Make sure the type is not saturating and has the signedness of
9950 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9951 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9952 if ((TREE_CODE (arg0) == MULT_EXPR
9953 || TREE_CODE (arg1) == MULT_EXPR)
9954 && !TYPE_SATURATING (type)
9955 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9956 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9957 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9958 {
9959 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9960 if (tem)
9961 return tem;
9962 }
9963
9964 if (! FLOAT_TYPE_P (type))
9965 {
9966 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9967 with a constant, and the two constants have no bits in common,
9968 we should treat this as a BIT_IOR_EXPR since this may produce more
9969 simplifications. */
9970 if (TREE_CODE (arg0) == BIT_AND_EXPR
9971 && TREE_CODE (arg1) == BIT_AND_EXPR
9972 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9973 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9974 && wi::bit_and (TREE_OPERAND (arg0, 1),
9975 TREE_OPERAND (arg1, 1)) == 0)
9976 {
9977 code = BIT_IOR_EXPR;
9978 goto bit_ior;
9979 }
9980
9981 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9982 (plus (plus (mult) (mult)) (foo)) so that we can
9983 take advantage of the factoring cases below. */
9984 if (TYPE_OVERFLOW_WRAPS (type)
9985 && (((TREE_CODE (arg0) == PLUS_EXPR
9986 || TREE_CODE (arg0) == MINUS_EXPR)
9987 && TREE_CODE (arg1) == MULT_EXPR)
9988 || ((TREE_CODE (arg1) == PLUS_EXPR
9989 || TREE_CODE (arg1) == MINUS_EXPR)
9990 && TREE_CODE (arg0) == MULT_EXPR)))
9991 {
9992 tree parg0, parg1, parg, marg;
9993 enum tree_code pcode;
9994
9995 if (TREE_CODE (arg1) == MULT_EXPR)
9996 parg = arg0, marg = arg1;
9997 else
9998 parg = arg1, marg = arg0;
9999 pcode = TREE_CODE (parg);
10000 parg0 = TREE_OPERAND (parg, 0);
10001 parg1 = TREE_OPERAND (parg, 1);
10002 STRIP_NOPS (parg0);
10003 STRIP_NOPS (parg1);
10004
10005 if (TREE_CODE (parg0) == MULT_EXPR
10006 && TREE_CODE (parg1) != MULT_EXPR)
10007 return fold_build2_loc (loc, pcode, type,
10008 fold_build2_loc (loc, PLUS_EXPR, type,
10009 fold_convert_loc (loc, type,
10010 parg0),
10011 fold_convert_loc (loc, type,
10012 marg)),
10013 fold_convert_loc (loc, type, parg1));
10014 if (TREE_CODE (parg0) != MULT_EXPR
10015 && TREE_CODE (parg1) == MULT_EXPR)
10016 return
10017 fold_build2_loc (loc, PLUS_EXPR, type,
10018 fold_convert_loc (loc, type, parg0),
10019 fold_build2_loc (loc, pcode, type,
10020 fold_convert_loc (loc, type, marg),
10021 fold_convert_loc (loc, type,
10022 parg1)));
10023 }
10024 }
10025 else
10026 {
10027 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10028 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10029 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10030
10031 /* Likewise if the operands are reversed. */
10032 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10033 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10034
10035 /* Convert X + -C into X - C. */
10036 if (TREE_CODE (arg1) == REAL_CST
10037 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10038 {
10039 tem = fold_negate_const (arg1, type);
10040 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10041 return fold_build2_loc (loc, MINUS_EXPR, type,
10042 fold_convert_loc (loc, type, arg0),
10043 fold_convert_loc (loc, type, tem));
10044 }
10045
10046 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10047 to __complex__ ( x, y ). This is not the same for SNaNs or
10048 if signed zeros are involved. */
10049 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10050 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10051 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10052 {
10053 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10054 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10055 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10056 bool arg0rz = false, arg0iz = false;
10057 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10058 || (arg0i && (arg0iz = real_zerop (arg0i))))
10059 {
10060 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10061 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10062 if (arg0rz && arg1i && real_zerop (arg1i))
10063 {
10064 tree rp = arg1r ? arg1r
10065 : build1 (REALPART_EXPR, rtype, arg1);
10066 tree ip = arg0i ? arg0i
10067 : build1 (IMAGPART_EXPR, rtype, arg0);
10068 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10069 }
10070 else if (arg0iz && arg1r && real_zerop (arg1r))
10071 {
10072 tree rp = arg0r ? arg0r
10073 : build1 (REALPART_EXPR, rtype, arg0);
10074 tree ip = arg1i ? arg1i
10075 : build1 (IMAGPART_EXPR, rtype, arg1);
10076 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10077 }
10078 }
10079 }
10080
10081 if (flag_unsafe_math_optimizations
10082 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10083 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10084 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10085 return tem;
10086
10087 /* Convert x+x into x*2.0. */
10088 if (operand_equal_p (arg0, arg1, 0)
10089 && SCALAR_FLOAT_TYPE_P (type))
10090 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10091 build_real (type, dconst2));
10092
10093 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10094 We associate floats only if the user has specified
10095 -fassociative-math. */
10096 if (flag_associative_math
10097 && TREE_CODE (arg1) == PLUS_EXPR
10098 && TREE_CODE (arg0) != MULT_EXPR)
10099 {
10100 tree tree10 = TREE_OPERAND (arg1, 0);
10101 tree tree11 = TREE_OPERAND (arg1, 1);
10102 if (TREE_CODE (tree11) == MULT_EXPR
10103 && TREE_CODE (tree10) == MULT_EXPR)
10104 {
10105 tree tree0;
10106 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10107 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10108 }
10109 }
10110 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10111 We associate floats only if the user has specified
10112 -fassociative-math. */
10113 if (flag_associative_math
10114 && TREE_CODE (arg0) == PLUS_EXPR
10115 && TREE_CODE (arg1) != MULT_EXPR)
10116 {
10117 tree tree00 = TREE_OPERAND (arg0, 0);
10118 tree tree01 = TREE_OPERAND (arg0, 1);
10119 if (TREE_CODE (tree01) == MULT_EXPR
10120 && TREE_CODE (tree00) == MULT_EXPR)
10121 {
10122 tree tree0;
10123 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10124 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10125 }
10126 }
10127 }
10128
10129 bit_rotate:
10130 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10131 is a rotate of A by C1 bits. */
10132 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10133 is a rotate of A by B bits. */
10134 {
10135 enum tree_code code0, code1;
10136 tree rtype;
10137 code0 = TREE_CODE (arg0);
10138 code1 = TREE_CODE (arg1);
10139 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10140 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10141 && operand_equal_p (TREE_OPERAND (arg0, 0),
10142 TREE_OPERAND (arg1, 0), 0)
10143 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10144 TYPE_UNSIGNED (rtype))
10145 /* Only create rotates in complete modes. Other cases are not
10146 expanded properly. */
10147 && (element_precision (rtype)
10148 == element_precision (TYPE_MODE (rtype))))
10149 {
10150 tree tree01, tree11;
10151 enum tree_code code01, code11;
10152
10153 tree01 = TREE_OPERAND (arg0, 1);
10154 tree11 = TREE_OPERAND (arg1, 1);
10155 STRIP_NOPS (tree01);
10156 STRIP_NOPS (tree11);
10157 code01 = TREE_CODE (tree01);
10158 code11 = TREE_CODE (tree11);
10159 if (code01 == INTEGER_CST
10160 && code11 == INTEGER_CST
10161 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10162 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10163 {
10164 tem = build2_loc (loc, LROTATE_EXPR,
10165 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10166 TREE_OPERAND (arg0, 0),
10167 code0 == LSHIFT_EXPR ? tree01 : tree11);
10168 return fold_convert_loc (loc, type, tem);
10169 }
10170 else if (code11 == MINUS_EXPR)
10171 {
10172 tree tree110, tree111;
10173 tree110 = TREE_OPERAND (tree11, 0);
10174 tree111 = TREE_OPERAND (tree11, 1);
10175 STRIP_NOPS (tree110);
10176 STRIP_NOPS (tree111);
10177 if (TREE_CODE (tree110) == INTEGER_CST
10178 && 0 == compare_tree_int (tree110,
10179 element_precision
10180 (TREE_TYPE (TREE_OPERAND
10181 (arg0, 0))))
10182 && operand_equal_p (tree01, tree111, 0))
10183 return
10184 fold_convert_loc (loc, type,
10185 build2 ((code0 == LSHIFT_EXPR
10186 ? LROTATE_EXPR
10187 : RROTATE_EXPR),
10188 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10189 TREE_OPERAND (arg0, 0), tree01));
10190 }
10191 else if (code01 == MINUS_EXPR)
10192 {
10193 tree tree010, tree011;
10194 tree010 = TREE_OPERAND (tree01, 0);
10195 tree011 = TREE_OPERAND (tree01, 1);
10196 STRIP_NOPS (tree010);
10197 STRIP_NOPS (tree011);
10198 if (TREE_CODE (tree010) == INTEGER_CST
10199 && 0 == compare_tree_int (tree010,
10200 element_precision
10201 (TREE_TYPE (TREE_OPERAND
10202 (arg0, 0))))
10203 && operand_equal_p (tree11, tree011, 0))
10204 return fold_convert_loc
10205 (loc, type,
10206 build2 ((code0 != LSHIFT_EXPR
10207 ? LROTATE_EXPR
10208 : RROTATE_EXPR),
10209 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10210 TREE_OPERAND (arg0, 0), tree11));
10211 }
10212 }
10213 }
10214
10215 associate:
10216 /* In most languages, can't associate operations on floats through
10217 parentheses. Rather than remember where the parentheses were, we
10218 don't associate floats at all, unless the user has specified
10219 -fassociative-math.
10220 And, we need to make sure type is not saturating. */
10221
10222 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10223 && !TYPE_SATURATING (type))
10224 {
10225 tree var0, con0, lit0, minus_lit0;
10226 tree var1, con1, lit1, minus_lit1;
10227 tree atype = type;
10228 bool ok = true;
10229
10230 /* Split both trees into variables, constants, and literals. Then
10231 associate each group together, the constants with literals,
10232 then the result with variables. This increases the chances of
10233 literals being recombined later and of generating relocatable
10234 expressions for the sum of a constant and literal. */
10235 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10236 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10237 code == MINUS_EXPR);
10238
10239 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10240 if (code == MINUS_EXPR)
10241 code = PLUS_EXPR;
10242
10243 /* With undefined overflow prefer doing association in a type
10244 which wraps on overflow, if that is one of the operand types. */
10245 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10246 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10247 {
10248 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10249 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10250 atype = TREE_TYPE (arg0);
10251 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10252 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10253 atype = TREE_TYPE (arg1);
10254 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10255 }
10256
10257 /* With undefined overflow we can only associate constants with one
10258 variable, and constants whose association doesn't overflow. */
10259 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10260 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10261 {
10262 if (var0 && var1)
10263 {
10264 tree tmp0 = var0;
10265 tree tmp1 = var1;
10266
10267 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10268 tmp0 = TREE_OPERAND (tmp0, 0);
10269 if (CONVERT_EXPR_P (tmp0)
10270 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10271 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10272 <= TYPE_PRECISION (atype)))
10273 tmp0 = TREE_OPERAND (tmp0, 0);
10274 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10275 tmp1 = TREE_OPERAND (tmp1, 0);
10276 if (CONVERT_EXPR_P (tmp1)
10277 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10278 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10279 <= TYPE_PRECISION (atype)))
10280 tmp1 = TREE_OPERAND (tmp1, 0);
10281 /* The only case we can still associate with two variables
10282 is if they are the same, modulo negation and bit-pattern
10283 preserving conversions. */
10284 if (!operand_equal_p (tmp0, tmp1, 0))
10285 ok = false;
10286 }
10287 }
10288
10289 /* Only do something if we found more than two objects. Otherwise,
10290 nothing has changed and we risk infinite recursion. */
10291 if (ok
10292 && (2 < ((var0 != 0) + (var1 != 0)
10293 + (con0 != 0) + (con1 != 0)
10294 + (lit0 != 0) + (lit1 != 0)
10295 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10296 {
10297 bool any_overflows = false;
10298 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10299 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10300 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10301 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10302 var0 = associate_trees (loc, var0, var1, code, atype);
10303 con0 = associate_trees (loc, con0, con1, code, atype);
10304 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10305 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10306 code, atype);
10307
10308 /* Preserve the MINUS_EXPR if the negative part of the literal is
10309 greater than the positive part. Otherwise, the multiplicative
10310 folding code (i.e extract_muldiv) may be fooled in case
10311 unsigned constants are subtracted, like in the following
10312 example: ((X*2 + 4) - 8U)/2. */
10313 if (minus_lit0 && lit0)
10314 {
10315 if (TREE_CODE (lit0) == INTEGER_CST
10316 && TREE_CODE (minus_lit0) == INTEGER_CST
10317 && tree_int_cst_lt (lit0, minus_lit0))
10318 {
10319 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10320 MINUS_EXPR, atype);
10321 lit0 = 0;
10322 }
10323 else
10324 {
10325 lit0 = associate_trees (loc, lit0, minus_lit0,
10326 MINUS_EXPR, atype);
10327 minus_lit0 = 0;
10328 }
10329 }
10330
10331 /* Don't introduce overflows through reassociation. */
10332 if (!any_overflows
10333 && ((lit0 && TREE_OVERFLOW (lit0))
10334 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10335 return NULL_TREE;
10336
10337 if (minus_lit0)
10338 {
10339 if (con0 == 0)
10340 return
10341 fold_convert_loc (loc, type,
10342 associate_trees (loc, var0, minus_lit0,
10343 MINUS_EXPR, atype));
10344 else
10345 {
10346 con0 = associate_trees (loc, con0, minus_lit0,
10347 MINUS_EXPR, atype);
10348 return
10349 fold_convert_loc (loc, type,
10350 associate_trees (loc, var0, con0,
10351 PLUS_EXPR, atype));
10352 }
10353 }
10354
10355 con0 = associate_trees (loc, con0, lit0, code, atype);
10356 return
10357 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10358 code, atype));
10359 }
10360 }
10361
10362 return NULL_TREE;
10363
10364 case MINUS_EXPR:
10365 /* Pointer simplifications for subtraction, simple reassociations. */
10366 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10367 {
10368 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10369 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10370 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10371 {
10372 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10373 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10374 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10375 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10376 return fold_build2_loc (loc, PLUS_EXPR, type,
10377 fold_build2_loc (loc, MINUS_EXPR, type,
10378 arg00, arg10),
10379 fold_build2_loc (loc, MINUS_EXPR, type,
10380 arg01, arg11));
10381 }
10382 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10383 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10384 {
10385 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10386 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10387 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10388 fold_convert_loc (loc, type, arg1));
10389 if (tmp)
10390 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10391 }
10392 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10393 simplifies. */
10394 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10395 {
10396 tree arg10 = fold_convert_loc (loc, type,
10397 TREE_OPERAND (arg1, 0));
10398 tree arg11 = fold_convert_loc (loc, type,
10399 TREE_OPERAND (arg1, 1));
10400 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10401 fold_convert_loc (loc, type, arg0),
10402 arg10);
10403 if (tmp)
10404 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10405 }
10406 }
10407 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10408 if (TREE_CODE (arg0) == NEGATE_EXPR
10409 && negate_expr_p (arg1)
10410 && reorder_operands_p (arg0, arg1))
10411 return fold_build2_loc (loc, MINUS_EXPR, type,
10412 fold_convert_loc (loc, type,
10413 negate_expr (arg1)),
10414 fold_convert_loc (loc, type,
10415 TREE_OPERAND (arg0, 0)));
10416 /* Convert -A - 1 to ~A. */
10417 if (TREE_CODE (arg0) == NEGATE_EXPR
10418 && integer_each_onep (arg1)
10419 && !TYPE_OVERFLOW_TRAPS (type))
10420 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10421 fold_convert_loc (loc, type,
10422 TREE_OPERAND (arg0, 0)));
10423
10424 /* Convert -1 - A to ~A. */
10425 if (TREE_CODE (type) != COMPLEX_TYPE
10426 && integer_all_onesp (arg0))
10427 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10428
10429
10430 /* X - (X / Y) * Y is X % Y. */
10431 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10432 && TREE_CODE (arg1) == MULT_EXPR
10433 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10434 && operand_equal_p (arg0,
10435 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10436 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10437 TREE_OPERAND (arg1, 1), 0))
10438 return
10439 fold_convert_loc (loc, type,
10440 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10441 arg0, TREE_OPERAND (arg1, 1)));
10442
10443 if (! FLOAT_TYPE_P (type))
10444 {
10445 if (integer_zerop (arg0))
10446 return negate_expr (fold_convert_loc (loc, type, arg1));
10447
10448 /* Fold A - (A & B) into ~B & A. */
10449 if (!TREE_SIDE_EFFECTS (arg0)
10450 && TREE_CODE (arg1) == BIT_AND_EXPR)
10451 {
10452 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10453 {
10454 tree arg10 = fold_convert_loc (loc, type,
10455 TREE_OPERAND (arg1, 0));
10456 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10457 fold_build1_loc (loc, BIT_NOT_EXPR,
10458 type, arg10),
10459 fold_convert_loc (loc, type, arg0));
10460 }
10461 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10462 {
10463 tree arg11 = fold_convert_loc (loc,
10464 type, TREE_OPERAND (arg1, 1));
10465 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10466 fold_build1_loc (loc, BIT_NOT_EXPR,
10467 type, arg11),
10468 fold_convert_loc (loc, type, arg0));
10469 }
10470 }
10471
10472 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10473 any power of 2 minus 1. */
10474 if (TREE_CODE (arg0) == BIT_AND_EXPR
10475 && TREE_CODE (arg1) == BIT_AND_EXPR
10476 && operand_equal_p (TREE_OPERAND (arg0, 0),
10477 TREE_OPERAND (arg1, 0), 0))
10478 {
10479 tree mask0 = TREE_OPERAND (arg0, 1);
10480 tree mask1 = TREE_OPERAND (arg1, 1);
10481 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10482
10483 if (operand_equal_p (tem, mask1, 0))
10484 {
10485 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10486 TREE_OPERAND (arg0, 0), mask1);
10487 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10488 }
10489 }
10490 }
10491
10492 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10493 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10495
10496 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10497 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10498 (-ARG1 + ARG0) reduces to -ARG1. */
10499 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10500 return negate_expr (fold_convert_loc (loc, type, arg1));
10501
10502 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10503 __complex__ ( x, -y ). This is not the same for SNaNs or if
10504 signed zeros are involved. */
10505 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10506 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10507 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10508 {
10509 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10510 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10511 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10512 bool arg0rz = false, arg0iz = false;
10513 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10514 || (arg0i && (arg0iz = real_zerop (arg0i))))
10515 {
10516 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10517 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10518 if (arg0rz && arg1i && real_zerop (arg1i))
10519 {
10520 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10521 arg1r ? arg1r
10522 : build1 (REALPART_EXPR, rtype, arg1));
10523 tree ip = arg0i ? arg0i
10524 : build1 (IMAGPART_EXPR, rtype, arg0);
10525 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10526 }
10527 else if (arg0iz && arg1r && real_zerop (arg1r))
10528 {
10529 tree rp = arg0r ? arg0r
10530 : build1 (REALPART_EXPR, rtype, arg0);
10531 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10532 arg1i ? arg1i
10533 : build1 (IMAGPART_EXPR, rtype, arg1));
10534 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10535 }
10536 }
10537 }
10538
10539 /* A - B -> A + (-B) if B is easily negatable. */
10540 if (negate_expr_p (arg1)
10541 && (!INTEGRAL_TYPE_P (type)
10542 || TYPE_OVERFLOW_WRAPS (type)
10543 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10544 && ((FLOAT_TYPE_P (type)
10545 /* Avoid this transformation if B is a positive REAL_CST. */
10546 && (TREE_CODE (arg1) != REAL_CST
10547 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10548 || INTEGRAL_TYPE_P (type)))
10549 return fold_build2_loc (loc, PLUS_EXPR, type,
10550 fold_convert_loc (loc, type, arg0),
10551 fold_convert_loc (loc, type,
10552 negate_expr (arg1)));
10553
10554 /* Try folding difference of addresses. */
10555 {
10556 HOST_WIDE_INT diff;
10557
10558 if ((TREE_CODE (arg0) == ADDR_EXPR
10559 || TREE_CODE (arg1) == ADDR_EXPR)
10560 && ptr_difference_const (arg0, arg1, &diff))
10561 return build_int_cst_type (type, diff);
10562 }
10563
10564 /* Fold &a[i] - &a[j] to i-j. */
10565 if (TREE_CODE (arg0) == ADDR_EXPR
10566 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10567 && TREE_CODE (arg1) == ADDR_EXPR
10568 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10569 {
10570 tree tem = fold_addr_of_array_ref_difference (loc, type,
10571 TREE_OPERAND (arg0, 0),
10572 TREE_OPERAND (arg1, 0));
10573 if (tem)
10574 return tem;
10575 }
10576
10577 if (FLOAT_TYPE_P (type)
10578 && flag_unsafe_math_optimizations
10579 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10580 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10581 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10582 return tem;
10583
10584 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10585 one. Make sure the type is not saturating and has the signedness of
10586 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10587 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10588 if ((TREE_CODE (arg0) == MULT_EXPR
10589 || TREE_CODE (arg1) == MULT_EXPR)
10590 && !TYPE_SATURATING (type)
10591 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10592 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10593 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10594 {
10595 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10596 if (tem)
10597 return tem;
10598 }
10599
10600 goto associate;
10601
10602 case MULT_EXPR:
10603 /* (-A) * (-B) -> A * B */
10604 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10605 return fold_build2_loc (loc, MULT_EXPR, type,
10606 fold_convert_loc (loc, type,
10607 TREE_OPERAND (arg0, 0)),
10608 fold_convert_loc (loc, type,
10609 negate_expr (arg1)));
10610 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10611 return fold_build2_loc (loc, MULT_EXPR, type,
10612 fold_convert_loc (loc, type,
10613 negate_expr (arg0)),
10614 fold_convert_loc (loc, type,
10615 TREE_OPERAND (arg1, 0)));
10616
10617 if (! FLOAT_TYPE_P (type))
10618 {
10619 /* Transform x * -1 into -x. Make sure to do the negation
10620 on the original operand with conversions not stripped
10621 because we can only strip non-sign-changing conversions. */
10622 if (integer_minus_onep (arg1))
10623 return fold_convert_loc (loc, type, negate_expr (op0));
10624 /* Transform x * -C into -x * C if x is easily negatable. */
10625 if (TREE_CODE (arg1) == INTEGER_CST
10626 && tree_int_cst_sgn (arg1) == -1
10627 && negate_expr_p (arg0)
10628 && (tem = negate_expr (arg1)) != arg1
10629 && !TREE_OVERFLOW (tem))
10630 return fold_build2_loc (loc, MULT_EXPR, type,
10631 fold_convert_loc (loc, type,
10632 negate_expr (arg0)),
10633 tem);
10634
10635 /* (a * (1 << b)) is (a << b) */
10636 if (TREE_CODE (arg1) == LSHIFT_EXPR
10637 && integer_onep (TREE_OPERAND (arg1, 0)))
10638 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10639 TREE_OPERAND (arg1, 1));
10640 if (TREE_CODE (arg0) == LSHIFT_EXPR
10641 && integer_onep (TREE_OPERAND (arg0, 0)))
10642 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10643 TREE_OPERAND (arg0, 1));
10644
10645 /* (A + A) * C -> A * 2 * C */
10646 if (TREE_CODE (arg0) == PLUS_EXPR
10647 && TREE_CODE (arg1) == INTEGER_CST
10648 && operand_equal_p (TREE_OPERAND (arg0, 0),
10649 TREE_OPERAND (arg0, 1), 0))
10650 return fold_build2_loc (loc, MULT_EXPR, type,
10651 omit_one_operand_loc (loc, type,
10652 TREE_OPERAND (arg0, 0),
10653 TREE_OPERAND (arg0, 1)),
10654 fold_build2_loc (loc, MULT_EXPR, type,
10655 build_int_cst (type, 2) , arg1));
10656
10657 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10658 sign-changing only. */
10659 if (TREE_CODE (arg1) == INTEGER_CST
10660 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10661 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10662 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10663
10664 strict_overflow_p = false;
10665 if (TREE_CODE (arg1) == INTEGER_CST
10666 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10667 &strict_overflow_p)))
10668 {
10669 if (strict_overflow_p)
10670 fold_overflow_warning (("assuming signed overflow does not "
10671 "occur when simplifying "
10672 "multiplication"),
10673 WARN_STRICT_OVERFLOW_MISC);
10674 return fold_convert_loc (loc, type, tem);
10675 }
10676
10677 /* Optimize z * conj(z) for integer complex numbers. */
10678 if (TREE_CODE (arg0) == CONJ_EXPR
10679 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10680 return fold_mult_zconjz (loc, type, arg1);
10681 if (TREE_CODE (arg1) == CONJ_EXPR
10682 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10683 return fold_mult_zconjz (loc, type, arg0);
10684 }
10685 else
10686 {
10687 /* Maybe fold x * 0 to 0. The expressions aren't the same
10688 when x is NaN, since x * 0 is also NaN. Nor are they the
10689 same in modes with signed zeros, since multiplying a
10690 negative value by 0 gives -0, not +0. */
10691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10692 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10693 && real_zerop (arg1))
10694 return omit_one_operand_loc (loc, type, arg1, arg0);
10695 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10696 Likewise for complex arithmetic with signed zeros. */
10697 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10698 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10699 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10700 && real_onep (arg1))
10701 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10702
10703 /* Transform x * -1.0 into -x. */
10704 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10705 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10706 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10707 && real_minus_onep (arg1))
10708 return fold_convert_loc (loc, type, negate_expr (arg0));
10709
10710 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10711 the result for floating point types due to rounding so it is applied
10712 only if -fassociative-math was specify. */
10713 if (flag_associative_math
10714 && TREE_CODE (arg0) == RDIV_EXPR
10715 && TREE_CODE (arg1) == REAL_CST
10716 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10717 {
10718 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10719 arg1);
10720 if (tem)
10721 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10722 TREE_OPERAND (arg0, 1));
10723 }
10724
10725 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10726 if (operand_equal_p (arg0, arg1, 0))
10727 {
10728 tree tem = fold_strip_sign_ops (arg0);
10729 if (tem != NULL_TREE)
10730 {
10731 tem = fold_convert_loc (loc, type, tem);
10732 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10733 }
10734 }
10735
10736 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10737 This is not the same for NaNs or if signed zeros are
10738 involved. */
10739 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10740 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10741 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10742 && TREE_CODE (arg1) == COMPLEX_CST
10743 && real_zerop (TREE_REALPART (arg1)))
10744 {
10745 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10746 if (real_onep (TREE_IMAGPART (arg1)))
10747 return
10748 fold_build2_loc (loc, COMPLEX_EXPR, type,
10749 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10750 rtype, arg0)),
10751 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10752 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10753 return
10754 fold_build2_loc (loc, COMPLEX_EXPR, type,
10755 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10756 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10757 rtype, arg0)));
10758 }
10759
10760 /* Optimize z * conj(z) for floating point complex numbers.
10761 Guarded by flag_unsafe_math_optimizations as non-finite
10762 imaginary components don't produce scalar results. */
10763 if (flag_unsafe_math_optimizations
10764 && TREE_CODE (arg0) == CONJ_EXPR
10765 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10766 return fold_mult_zconjz (loc, type, arg1);
10767 if (flag_unsafe_math_optimizations
10768 && TREE_CODE (arg1) == CONJ_EXPR
10769 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10770 return fold_mult_zconjz (loc, type, arg0);
10771
10772 if (flag_unsafe_math_optimizations)
10773 {
10774 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10775 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10776
10777 /* Optimizations of root(...)*root(...). */
10778 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10779 {
10780 tree rootfn, arg;
10781 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10782 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10783
10784 /* Optimize sqrt(x)*sqrt(x) as x. */
10785 if (BUILTIN_SQRT_P (fcode0)
10786 && operand_equal_p (arg00, arg10, 0)
10787 && ! HONOR_SNANS (TYPE_MODE (type)))
10788 return arg00;
10789
10790 /* Optimize root(x)*root(y) as root(x*y). */
10791 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10792 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10793 return build_call_expr_loc (loc, rootfn, 1, arg);
10794 }
10795
10796 /* Optimize expN(x)*expN(y) as expN(x+y). */
10797 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10798 {
10799 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10800 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10801 CALL_EXPR_ARG (arg0, 0),
10802 CALL_EXPR_ARG (arg1, 0));
10803 return build_call_expr_loc (loc, expfn, 1, arg);
10804 }
10805
10806 /* Optimizations of pow(...)*pow(...). */
10807 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10808 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10809 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10810 {
10811 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10812 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10813 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10814 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10815
10816 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10817 if (operand_equal_p (arg01, arg11, 0))
10818 {
10819 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10820 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10821 arg00, arg10);
10822 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10823 }
10824
10825 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10826 if (operand_equal_p (arg00, arg10, 0))
10827 {
10828 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10829 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10830 arg01, arg11);
10831 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10832 }
10833 }
10834
10835 /* Optimize tan(x)*cos(x) as sin(x). */
10836 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10837 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10838 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10839 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10840 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10841 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10842 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10843 CALL_EXPR_ARG (arg1, 0), 0))
10844 {
10845 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10846
10847 if (sinfn != NULL_TREE)
10848 return build_call_expr_loc (loc, sinfn, 1,
10849 CALL_EXPR_ARG (arg0, 0));
10850 }
10851
10852 /* Optimize x*pow(x,c) as pow(x,c+1). */
10853 if (fcode1 == BUILT_IN_POW
10854 || fcode1 == BUILT_IN_POWF
10855 || fcode1 == BUILT_IN_POWL)
10856 {
10857 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10858 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10859 if (TREE_CODE (arg11) == REAL_CST
10860 && !TREE_OVERFLOW (arg11)
10861 && operand_equal_p (arg0, arg10, 0))
10862 {
10863 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10864 REAL_VALUE_TYPE c;
10865 tree arg;
10866
10867 c = TREE_REAL_CST (arg11);
10868 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10869 arg = build_real (type, c);
10870 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10871 }
10872 }
10873
10874 /* Optimize pow(x,c)*x as pow(x,c+1). */
10875 if (fcode0 == BUILT_IN_POW
10876 || fcode0 == BUILT_IN_POWF
10877 || fcode0 == BUILT_IN_POWL)
10878 {
10879 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10880 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10881 if (TREE_CODE (arg01) == REAL_CST
10882 && !TREE_OVERFLOW (arg01)
10883 && operand_equal_p (arg1, arg00, 0))
10884 {
10885 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10886 REAL_VALUE_TYPE c;
10887 tree arg;
10888
10889 c = TREE_REAL_CST (arg01);
10890 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10891 arg = build_real (type, c);
10892 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10893 }
10894 }
10895
10896 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10897 if (!in_gimple_form
10898 && optimize
10899 && operand_equal_p (arg0, arg1, 0))
10900 {
10901 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10902
10903 if (powfn)
10904 {
10905 tree arg = build_real (type, dconst2);
10906 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10907 }
10908 }
10909 }
10910 }
10911 goto associate;
10912
10913 case BIT_IOR_EXPR:
10914 bit_ior:
10915 /* ~X | X is -1. */
10916 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10917 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10918 {
10919 t1 = build_zero_cst (type);
10920 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10921 return omit_one_operand_loc (loc, type, t1, arg1);
10922 }
10923
10924 /* X | ~X is -1. */
10925 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10926 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10927 {
10928 t1 = build_zero_cst (type);
10929 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10930 return omit_one_operand_loc (loc, type, t1, arg0);
10931 }
10932
10933 /* Canonicalize (X & C1) | C2. */
10934 if (TREE_CODE (arg0) == BIT_AND_EXPR
10935 && TREE_CODE (arg1) == INTEGER_CST
10936 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10937 {
10938 int width = TYPE_PRECISION (type), w;
10939 wide_int c1 = TREE_OPERAND (arg0, 1);
10940 wide_int c2 = arg1;
10941
10942 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10943 if ((c1 & c2) == c1)
10944 return omit_one_operand_loc (loc, type, arg1,
10945 TREE_OPERAND (arg0, 0));
10946
10947 wide_int msk = wi::mask (width, false,
10948 TYPE_PRECISION (TREE_TYPE (arg1)));
10949
10950 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10951 if (msk.and_not (c1 | c2) == 0)
10952 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10953 TREE_OPERAND (arg0, 0), arg1);
10954
10955 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10956 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10957 mode which allows further optimizations. */
10958 c1 &= msk;
10959 c2 &= msk;
10960 wide_int c3 = c1.and_not (c2);
10961 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10962 {
10963 wide_int mask = wi::mask (w, false,
10964 TYPE_PRECISION (type));
10965 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10966 {
10967 c3 = mask;
10968 break;
10969 }
10970 }
10971
10972 if (c3 != c1)
10973 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10974 fold_build2_loc (loc, BIT_AND_EXPR, type,
10975 TREE_OPERAND (arg0, 0),
10976 wide_int_to_tree (type,
10977 c3)),
10978 arg1);
10979 }
10980
10981 /* (X & ~Y) | (~X & Y) is X ^ Y */
10982 if (TREE_CODE (arg0) == BIT_AND_EXPR
10983 && TREE_CODE (arg1) == BIT_AND_EXPR)
10984 {
10985 tree a0, a1, l0, l1, n0, n1;
10986
10987 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10988 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10989
10990 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10991 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10992
10993 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10994 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10995
10996 if ((operand_equal_p (n0, a0, 0)
10997 && operand_equal_p (n1, a1, 0))
10998 || (operand_equal_p (n0, a1, 0)
10999 && operand_equal_p (n1, a0, 0)))
11000 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11001 }
11002
11003 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11004 if (t1 != NULL_TREE)
11005 return t1;
11006
11007 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11008
11009 This results in more efficient code for machines without a NAND
11010 instruction. Combine will canonicalize to the first form
11011 which will allow use of NAND instructions provided by the
11012 backend if they exist. */
11013 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11014 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11015 {
11016 return
11017 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11018 build2 (BIT_AND_EXPR, type,
11019 fold_convert_loc (loc, type,
11020 TREE_OPERAND (arg0, 0)),
11021 fold_convert_loc (loc, type,
11022 TREE_OPERAND (arg1, 0))));
11023 }
11024
11025 /* See if this can be simplified into a rotate first. If that
11026 is unsuccessful continue in the association code. */
11027 goto bit_rotate;
11028
11029 case BIT_XOR_EXPR:
11030 /* ~X ^ X is -1. */
11031 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11032 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11033 {
11034 t1 = build_zero_cst (type);
11035 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11036 return omit_one_operand_loc (loc, type, t1, arg1);
11037 }
11038
11039 /* X ^ ~X is -1. */
11040 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11041 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11042 {
11043 t1 = build_zero_cst (type);
11044 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11045 return omit_one_operand_loc (loc, type, t1, arg0);
11046 }
11047
11048 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11049 with a constant, and the two constants have no bits in common,
11050 we should treat this as a BIT_IOR_EXPR since this may produce more
11051 simplifications. */
11052 if (TREE_CODE (arg0) == BIT_AND_EXPR
11053 && TREE_CODE (arg1) == BIT_AND_EXPR
11054 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11055 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11056 && wi::bit_and (TREE_OPERAND (arg0, 1),
11057 TREE_OPERAND (arg1, 1)) == 0)
11058 {
11059 code = BIT_IOR_EXPR;
11060 goto bit_ior;
11061 }
11062
11063 /* (X | Y) ^ X -> Y & ~ X*/
11064 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11065 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11066 {
11067 tree t2 = TREE_OPERAND (arg0, 1);
11068 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11069 arg1);
11070 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11071 fold_convert_loc (loc, type, t2),
11072 fold_convert_loc (loc, type, t1));
11073 return t1;
11074 }
11075
11076 /* (Y | X) ^ X -> Y & ~ X*/
11077 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11078 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11079 {
11080 tree t2 = TREE_OPERAND (arg0, 0);
11081 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11082 arg1);
11083 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11084 fold_convert_loc (loc, type, t2),
11085 fold_convert_loc (loc, type, t1));
11086 return t1;
11087 }
11088
11089 /* X ^ (X | Y) -> Y & ~ X*/
11090 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11092 {
11093 tree t2 = TREE_OPERAND (arg1, 1);
11094 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11095 arg0);
11096 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11097 fold_convert_loc (loc, type, t2),
11098 fold_convert_loc (loc, type, t1));
11099 return t1;
11100 }
11101
11102 /* X ^ (Y | X) -> Y & ~ X*/
11103 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11104 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11105 {
11106 tree t2 = TREE_OPERAND (arg1, 0);
11107 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11108 arg0);
11109 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11110 fold_convert_loc (loc, type, t2),
11111 fold_convert_loc (loc, type, t1));
11112 return t1;
11113 }
11114
11115 /* Convert ~X ^ ~Y to X ^ Y. */
11116 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11117 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11118 return fold_build2_loc (loc, code, type,
11119 fold_convert_loc (loc, type,
11120 TREE_OPERAND (arg0, 0)),
11121 fold_convert_loc (loc, type,
11122 TREE_OPERAND (arg1, 0)));
11123
11124 /* Convert ~X ^ C to X ^ ~C. */
11125 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11126 && TREE_CODE (arg1) == INTEGER_CST)
11127 return fold_build2_loc (loc, code, type,
11128 fold_convert_loc (loc, type,
11129 TREE_OPERAND (arg0, 0)),
11130 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11131
11132 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11133 if (TREE_CODE (arg0) == BIT_AND_EXPR
11134 && INTEGRAL_TYPE_P (type)
11135 && integer_onep (TREE_OPERAND (arg0, 1))
11136 && integer_onep (arg1))
11137 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11138 build_zero_cst (TREE_TYPE (arg0)));
11139
11140 /* Fold (X & Y) ^ Y as ~X & Y. */
11141 if (TREE_CODE (arg0) == BIT_AND_EXPR
11142 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11143 {
11144 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11145 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11146 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11147 fold_convert_loc (loc, type, arg1));
11148 }
11149 /* Fold (X & Y) ^ X as ~Y & X. */
11150 if (TREE_CODE (arg0) == BIT_AND_EXPR
11151 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11152 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11153 {
11154 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11155 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11156 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11157 fold_convert_loc (loc, type, arg1));
11158 }
11159 /* Fold X ^ (X & Y) as X & ~Y. */
11160 if (TREE_CODE (arg1) == BIT_AND_EXPR
11161 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11162 {
11163 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11164 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11165 fold_convert_loc (loc, type, arg0),
11166 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11167 }
11168 /* Fold X ^ (Y & X) as ~Y & X. */
11169 if (TREE_CODE (arg1) == BIT_AND_EXPR
11170 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11171 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11172 {
11173 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11174 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11175 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11176 fold_convert_loc (loc, type, arg0));
11177 }
11178
11179 /* See if this can be simplified into a rotate first. If that
11180 is unsuccessful continue in the association code. */
11181 goto bit_rotate;
11182
11183 case BIT_AND_EXPR:
11184 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11185 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11186 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11187 || (TREE_CODE (arg0) == EQ_EXPR
11188 && integer_zerop (TREE_OPERAND (arg0, 1))))
11189 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11190 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11191
11192 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11193 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11194 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11195 || (TREE_CODE (arg1) == EQ_EXPR
11196 && integer_zerop (TREE_OPERAND (arg1, 1))))
11197 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11198 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11199
11200 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11201 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11202 && INTEGRAL_TYPE_P (type)
11203 && integer_onep (TREE_OPERAND (arg0, 1))
11204 && integer_onep (arg1))
11205 {
11206 tree tem2;
11207 tem = TREE_OPERAND (arg0, 0);
11208 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11209 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11210 tem, tem2);
11211 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11212 build_zero_cst (TREE_TYPE (tem)));
11213 }
11214 /* Fold ~X & 1 as (X & 1) == 0. */
11215 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11216 && INTEGRAL_TYPE_P (type)
11217 && integer_onep (arg1))
11218 {
11219 tree tem2;
11220 tem = TREE_OPERAND (arg0, 0);
11221 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11222 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11223 tem, tem2);
11224 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11225 build_zero_cst (TREE_TYPE (tem)));
11226 }
11227 /* Fold !X & 1 as X == 0. */
11228 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11229 && integer_onep (arg1))
11230 {
11231 tem = TREE_OPERAND (arg0, 0);
11232 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11233 build_zero_cst (TREE_TYPE (tem)));
11234 }
11235
11236 /* Fold (X ^ Y) & Y as ~X & Y. */
11237 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11238 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11239 {
11240 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11241 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11242 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11243 fold_convert_loc (loc, type, arg1));
11244 }
11245 /* Fold (X ^ Y) & X as ~Y & X. */
11246 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11247 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11248 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11249 {
11250 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11251 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11252 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11253 fold_convert_loc (loc, type, arg1));
11254 }
11255 /* Fold X & (X ^ Y) as X & ~Y. */
11256 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11257 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11258 {
11259 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11260 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11261 fold_convert_loc (loc, type, arg0),
11262 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11263 }
11264 /* Fold X & (Y ^ X) as ~Y & X. */
11265 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11266 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11267 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11268 {
11269 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11270 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11271 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11272 fold_convert_loc (loc, type, arg0));
11273 }
11274
11275 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11276 multiple of 1 << CST. */
11277 if (TREE_CODE (arg1) == INTEGER_CST)
11278 {
11279 wide_int cst1 = arg1;
11280 wide_int ncst1 = -cst1;
11281 if ((cst1 & ncst1) == ncst1
11282 && multiple_of_p (type, arg0,
11283 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11284 return fold_convert_loc (loc, type, arg0);
11285 }
11286
11287 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11288 bits from CST2. */
11289 if (TREE_CODE (arg1) == INTEGER_CST
11290 && TREE_CODE (arg0) == MULT_EXPR
11291 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11292 {
11293 wide_int warg1 = arg1;
11294 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11295
11296 if (masked == 0)
11297 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11298 arg0, arg1);
11299 else if (masked != warg1)
11300 {
11301 /* Avoid the transform if arg1 is a mask of some
11302 mode which allows further optimizations. */
11303 int pop = wi::popcount (warg1);
11304 if (!(pop >= BITS_PER_UNIT
11305 && exact_log2 (pop) != -1
11306 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11307 return fold_build2_loc (loc, code, type, op0,
11308 wide_int_to_tree (type, masked));
11309 }
11310 }
11311
11312 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11313 ((A & N) + B) & M -> (A + B) & M
11314 Similarly if (N & M) == 0,
11315 ((A | N) + B) & M -> (A + B) & M
11316 and for - instead of + (or unary - instead of +)
11317 and/or ^ instead of |.
11318 If B is constant and (B & M) == 0, fold into A & M. */
11319 if (TREE_CODE (arg1) == INTEGER_CST)
11320 {
11321 wide_int cst1 = arg1;
11322 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11323 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11324 && (TREE_CODE (arg0) == PLUS_EXPR
11325 || TREE_CODE (arg0) == MINUS_EXPR
11326 || TREE_CODE (arg0) == NEGATE_EXPR)
11327 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11328 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11329 {
11330 tree pmop[2];
11331 int which = 0;
11332 wide_int cst0;
11333
11334 /* Now we know that arg0 is (C + D) or (C - D) or
11335 -C and arg1 (M) is == (1LL << cst) - 1.
11336 Store C into PMOP[0] and D into PMOP[1]. */
11337 pmop[0] = TREE_OPERAND (arg0, 0);
11338 pmop[1] = NULL;
11339 if (TREE_CODE (arg0) != NEGATE_EXPR)
11340 {
11341 pmop[1] = TREE_OPERAND (arg0, 1);
11342 which = 1;
11343 }
11344
11345 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11346 which = -1;
11347
11348 for (; which >= 0; which--)
11349 switch (TREE_CODE (pmop[which]))
11350 {
11351 case BIT_AND_EXPR:
11352 case BIT_IOR_EXPR:
11353 case BIT_XOR_EXPR:
11354 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11355 != INTEGER_CST)
11356 break;
11357 cst0 = TREE_OPERAND (pmop[which], 1);
11358 cst0 &= cst1;
11359 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11360 {
11361 if (cst0 != cst1)
11362 break;
11363 }
11364 else if (cst0 != 0)
11365 break;
11366 /* If C or D is of the form (A & N) where
11367 (N & M) == M, or of the form (A | N) or
11368 (A ^ N) where (N & M) == 0, replace it with A. */
11369 pmop[which] = TREE_OPERAND (pmop[which], 0);
11370 break;
11371 case INTEGER_CST:
11372 /* If C or D is a N where (N & M) == 0, it can be
11373 omitted (assumed 0). */
11374 if ((TREE_CODE (arg0) == PLUS_EXPR
11375 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11376 && (cst1 & pmop[which]) == 0)
11377 pmop[which] = NULL;
11378 break;
11379 default:
11380 break;
11381 }
11382
11383 /* Only build anything new if we optimized one or both arguments
11384 above. */
11385 if (pmop[0] != TREE_OPERAND (arg0, 0)
11386 || (TREE_CODE (arg0) != NEGATE_EXPR
11387 && pmop[1] != TREE_OPERAND (arg0, 1)))
11388 {
11389 tree utype = TREE_TYPE (arg0);
11390 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11391 {
11392 /* Perform the operations in a type that has defined
11393 overflow behavior. */
11394 utype = unsigned_type_for (TREE_TYPE (arg0));
11395 if (pmop[0] != NULL)
11396 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11397 if (pmop[1] != NULL)
11398 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11399 }
11400
11401 if (TREE_CODE (arg0) == NEGATE_EXPR)
11402 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11403 else if (TREE_CODE (arg0) == PLUS_EXPR)
11404 {
11405 if (pmop[0] != NULL && pmop[1] != NULL)
11406 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11407 pmop[0], pmop[1]);
11408 else if (pmop[0] != NULL)
11409 tem = pmop[0];
11410 else if (pmop[1] != NULL)
11411 tem = pmop[1];
11412 else
11413 return build_int_cst (type, 0);
11414 }
11415 else if (pmop[0] == NULL)
11416 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11417 else
11418 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11419 pmop[0], pmop[1]);
11420 /* TEM is now the new binary +, - or unary - replacement. */
11421 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11422 fold_convert_loc (loc, utype, arg1));
11423 return fold_convert_loc (loc, type, tem);
11424 }
11425 }
11426 }
11427
11428 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11429 if (t1 != NULL_TREE)
11430 return t1;
11431 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11432 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11433 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11434 {
11435 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11436
11437 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11438 if (mask == -1)
11439 return
11440 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11441 }
11442
11443 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11444
11445 This results in more efficient code for machines without a NOR
11446 instruction. Combine will canonicalize to the first form
11447 which will allow use of NOR instructions provided by the
11448 backend if they exist. */
11449 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11450 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11451 {
11452 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11453 build2 (BIT_IOR_EXPR, type,
11454 fold_convert_loc (loc, type,
11455 TREE_OPERAND (arg0, 0)),
11456 fold_convert_loc (loc, type,
11457 TREE_OPERAND (arg1, 0))));
11458 }
11459
11460 /* If arg0 is derived from the address of an object or function, we may
11461 be able to fold this expression using the object or function's
11462 alignment. */
11463 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11464 {
11465 unsigned HOST_WIDE_INT modulus, residue;
11466 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11467
11468 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11469 integer_onep (arg1));
11470
11471 /* This works because modulus is a power of 2. If this weren't the
11472 case, we'd have to replace it by its greatest power-of-2
11473 divisor: modulus & -modulus. */
11474 if (low < modulus)
11475 return build_int_cst (type, residue & low);
11476 }
11477
11478 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11479 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11480 if the new mask might be further optimized. */
11481 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11482 || TREE_CODE (arg0) == RSHIFT_EXPR)
11483 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11484 && TREE_CODE (arg1) == INTEGER_CST
11485 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11486 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11487 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11488 < TYPE_PRECISION (TREE_TYPE (arg0))))
11489 {
11490 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11491 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11492 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11493 tree shift_type = TREE_TYPE (arg0);
11494
11495 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11496 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11497 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11498 && TYPE_PRECISION (TREE_TYPE (arg0))
11499 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11500 {
11501 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11502 tree arg00 = TREE_OPERAND (arg0, 0);
11503 /* See if more bits can be proven as zero because of
11504 zero extension. */
11505 if (TREE_CODE (arg00) == NOP_EXPR
11506 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11507 {
11508 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11509 if (TYPE_PRECISION (inner_type)
11510 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11511 && TYPE_PRECISION (inner_type) < prec)
11512 {
11513 prec = TYPE_PRECISION (inner_type);
11514 /* See if we can shorten the right shift. */
11515 if (shiftc < prec)
11516 shift_type = inner_type;
11517 /* Otherwise X >> C1 is all zeros, so we'll optimize
11518 it into (X, 0) later on by making sure zerobits
11519 is all ones. */
11520 }
11521 }
11522 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11523 if (shiftc < prec)
11524 {
11525 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11526 zerobits <<= prec - shiftc;
11527 }
11528 /* For arithmetic shift if sign bit could be set, zerobits
11529 can contain actually sign bits, so no transformation is
11530 possible, unless MASK masks them all away. In that
11531 case the shift needs to be converted into logical shift. */
11532 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11533 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11534 {
11535 if ((mask & zerobits) == 0)
11536 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11537 else
11538 zerobits = 0;
11539 }
11540 }
11541
11542 /* ((X << 16) & 0xff00) is (X, 0). */
11543 if ((mask & zerobits) == mask)
11544 return omit_one_operand_loc (loc, type,
11545 build_int_cst (type, 0), arg0);
11546
11547 newmask = mask | zerobits;
11548 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11549 {
11550 /* Only do the transformation if NEWMASK is some integer
11551 mode's mask. */
11552 for (prec = BITS_PER_UNIT;
11553 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11554 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11555 break;
11556 if (prec < HOST_BITS_PER_WIDE_INT
11557 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11558 {
11559 tree newmaskt;
11560
11561 if (shift_type != TREE_TYPE (arg0))
11562 {
11563 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11564 fold_convert_loc (loc, shift_type,
11565 TREE_OPERAND (arg0, 0)),
11566 TREE_OPERAND (arg0, 1));
11567 tem = fold_convert_loc (loc, type, tem);
11568 }
11569 else
11570 tem = op0;
11571 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11572 if (!tree_int_cst_equal (newmaskt, arg1))
11573 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11574 }
11575 }
11576 }
11577
11578 goto associate;
11579
11580 case RDIV_EXPR:
11581 /* Don't touch a floating-point divide by zero unless the mode
11582 of the constant can represent infinity. */
11583 if (TREE_CODE (arg1) == REAL_CST
11584 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11585 && real_zerop (arg1))
11586 return NULL_TREE;
11587
11588 /* Optimize A / A to 1.0 if we don't care about
11589 NaNs or Infinities. Skip the transformation
11590 for non-real operands. */
11591 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11592 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11593 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11594 && operand_equal_p (arg0, arg1, 0))
11595 {
11596 tree r = build_real (TREE_TYPE (arg0), dconst1);
11597
11598 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11599 }
11600
11601 /* The complex version of the above A / A optimization. */
11602 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11603 && operand_equal_p (arg0, arg1, 0))
11604 {
11605 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11606 if (! HONOR_NANS (TYPE_MODE (elem_type))
11607 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11608 {
11609 tree r = build_real (elem_type, dconst1);
11610 /* omit_two_operands will call fold_convert for us. */
11611 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11612 }
11613 }
11614
11615 /* (-A) / (-B) -> A / B */
11616 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11617 return fold_build2_loc (loc, RDIV_EXPR, type,
11618 TREE_OPERAND (arg0, 0),
11619 negate_expr (arg1));
11620 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11621 return fold_build2_loc (loc, RDIV_EXPR, type,
11622 negate_expr (arg0),
11623 TREE_OPERAND (arg1, 0));
11624
11625 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11626 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11627 && real_onep (arg1))
11628 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11629
11630 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11631 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11632 && real_minus_onep (arg1))
11633 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11634 negate_expr (arg0)));
11635
11636 /* If ARG1 is a constant, we can convert this to a multiply by the
11637 reciprocal. This does not have the same rounding properties,
11638 so only do this if -freciprocal-math. We can actually
11639 always safely do it if ARG1 is a power of two, but it's hard to
11640 tell if it is or not in a portable manner. */
11641 if (optimize
11642 && (TREE_CODE (arg1) == REAL_CST
11643 || (TREE_CODE (arg1) == COMPLEX_CST
11644 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11645 || (TREE_CODE (arg1) == VECTOR_CST
11646 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11647 {
11648 if (flag_reciprocal_math
11649 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11650 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11651 /* Find the reciprocal if optimizing and the result is exact.
11652 TODO: Complex reciprocal not implemented. */
11653 if (TREE_CODE (arg1) != COMPLEX_CST)
11654 {
11655 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11656
11657 if (inverse)
11658 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11659 }
11660 }
11661 /* Convert A/B/C to A/(B*C). */
11662 if (flag_reciprocal_math
11663 && TREE_CODE (arg0) == RDIV_EXPR)
11664 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11665 fold_build2_loc (loc, MULT_EXPR, type,
11666 TREE_OPERAND (arg0, 1), arg1));
11667
11668 /* Convert A/(B/C) to (A/B)*C. */
11669 if (flag_reciprocal_math
11670 && TREE_CODE (arg1) == RDIV_EXPR)
11671 return fold_build2_loc (loc, MULT_EXPR, type,
11672 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11673 TREE_OPERAND (arg1, 0)),
11674 TREE_OPERAND (arg1, 1));
11675
11676 /* Convert C1/(X*C2) into (C1/C2)/X. */
11677 if (flag_reciprocal_math
11678 && TREE_CODE (arg1) == MULT_EXPR
11679 && TREE_CODE (arg0) == REAL_CST
11680 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11681 {
11682 tree tem = const_binop (RDIV_EXPR, arg0,
11683 TREE_OPERAND (arg1, 1));
11684 if (tem)
11685 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11686 TREE_OPERAND (arg1, 0));
11687 }
11688
11689 if (flag_unsafe_math_optimizations)
11690 {
11691 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11692 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11693
11694 /* Optimize sin(x)/cos(x) as tan(x). */
11695 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11696 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11697 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11698 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11699 CALL_EXPR_ARG (arg1, 0), 0))
11700 {
11701 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11702
11703 if (tanfn != NULL_TREE)
11704 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11705 }
11706
11707 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11708 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11709 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11710 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11711 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11712 CALL_EXPR_ARG (arg1, 0), 0))
11713 {
11714 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11715
11716 if (tanfn != NULL_TREE)
11717 {
11718 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11719 CALL_EXPR_ARG (arg0, 0));
11720 return fold_build2_loc (loc, RDIV_EXPR, type,
11721 build_real (type, dconst1), tmp);
11722 }
11723 }
11724
11725 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11726 NaNs or Infinities. */
11727 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11728 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11729 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11730 {
11731 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11732 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11733
11734 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11735 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11736 && operand_equal_p (arg00, arg01, 0))
11737 {
11738 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11739
11740 if (cosfn != NULL_TREE)
11741 return build_call_expr_loc (loc, cosfn, 1, arg00);
11742 }
11743 }
11744
11745 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11746 NaNs or Infinities. */
11747 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11748 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11749 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11750 {
11751 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11752 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11753
11754 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11755 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11756 && operand_equal_p (arg00, arg01, 0))
11757 {
11758 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11759
11760 if (cosfn != NULL_TREE)
11761 {
11762 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11763 return fold_build2_loc (loc, RDIV_EXPR, type,
11764 build_real (type, dconst1),
11765 tmp);
11766 }
11767 }
11768 }
11769
11770 /* Optimize pow(x,c)/x as pow(x,c-1). */
11771 if (fcode0 == BUILT_IN_POW
11772 || fcode0 == BUILT_IN_POWF
11773 || fcode0 == BUILT_IN_POWL)
11774 {
11775 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11776 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11777 if (TREE_CODE (arg01) == REAL_CST
11778 && !TREE_OVERFLOW (arg01)
11779 && operand_equal_p (arg1, arg00, 0))
11780 {
11781 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11782 REAL_VALUE_TYPE c;
11783 tree arg;
11784
11785 c = TREE_REAL_CST (arg01);
11786 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11787 arg = build_real (type, c);
11788 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11789 }
11790 }
11791
11792 /* Optimize a/root(b/c) into a*root(c/b). */
11793 if (BUILTIN_ROOT_P (fcode1))
11794 {
11795 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11796
11797 if (TREE_CODE (rootarg) == RDIV_EXPR)
11798 {
11799 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11800 tree b = TREE_OPERAND (rootarg, 0);
11801 tree c = TREE_OPERAND (rootarg, 1);
11802
11803 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11804
11805 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11806 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11807 }
11808 }
11809
11810 /* Optimize x/expN(y) into x*expN(-y). */
11811 if (BUILTIN_EXPONENT_P (fcode1))
11812 {
11813 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11814 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11815 arg1 = build_call_expr_loc (loc,
11816 expfn, 1,
11817 fold_convert_loc (loc, type, arg));
11818 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11819 }
11820
11821 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11822 if (fcode1 == BUILT_IN_POW
11823 || fcode1 == BUILT_IN_POWF
11824 || fcode1 == BUILT_IN_POWL)
11825 {
11826 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11827 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11828 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11829 tree neg11 = fold_convert_loc (loc, type,
11830 negate_expr (arg11));
11831 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11832 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11833 }
11834 }
11835 return NULL_TREE;
11836
11837 case TRUNC_DIV_EXPR:
11838 /* Optimize (X & (-A)) / A where A is a power of 2,
11839 to X >> log2(A) */
11840 if (TREE_CODE (arg0) == BIT_AND_EXPR
11841 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11842 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11843 {
11844 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11845 arg1, TREE_OPERAND (arg0, 1));
11846 if (sum && integer_zerop (sum)) {
11847 tree pow2 = build_int_cst (integer_type_node,
11848 wi::exact_log2 (arg1));
11849 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11850 TREE_OPERAND (arg0, 0), pow2);
11851 }
11852 }
11853
11854 /* Fall through */
11855
11856 case FLOOR_DIV_EXPR:
11857 /* Simplify A / (B << N) where A and B are positive and B is
11858 a power of 2, to A >> (N + log2(B)). */
11859 strict_overflow_p = false;
11860 if (TREE_CODE (arg1) == LSHIFT_EXPR
11861 && (TYPE_UNSIGNED (type)
11862 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11863 {
11864 tree sval = TREE_OPERAND (arg1, 0);
11865 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11866 {
11867 tree sh_cnt = TREE_OPERAND (arg1, 1);
11868 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11869 wi::exact_log2 (sval));
11870
11871 if (strict_overflow_p)
11872 fold_overflow_warning (("assuming signed overflow does not "
11873 "occur when simplifying A / (B << N)"),
11874 WARN_STRICT_OVERFLOW_MISC);
11875
11876 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11877 sh_cnt, pow2);
11878 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11879 fold_convert_loc (loc, type, arg0), sh_cnt);
11880 }
11881 }
11882
11883 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11884 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11885 if (INTEGRAL_TYPE_P (type)
11886 && TYPE_UNSIGNED (type)
11887 && code == FLOOR_DIV_EXPR)
11888 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11889
11890 /* Fall through */
11891
11892 case ROUND_DIV_EXPR:
11893 case CEIL_DIV_EXPR:
11894 case EXACT_DIV_EXPR:
11895 if (integer_zerop (arg1))
11896 return NULL_TREE;
11897 /* X / -1 is -X. */
11898 if (!TYPE_UNSIGNED (type)
11899 && TREE_CODE (arg1) == INTEGER_CST
11900 && wi::eq_p (arg1, -1))
11901 return fold_convert_loc (loc, type, negate_expr (arg0));
11902
11903 /* Convert -A / -B to A / B when the type is signed and overflow is
11904 undefined. */
11905 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11906 && TREE_CODE (arg0) == NEGATE_EXPR
11907 && negate_expr_p (arg1))
11908 {
11909 if (INTEGRAL_TYPE_P (type))
11910 fold_overflow_warning (("assuming signed overflow does not occur "
11911 "when distributing negation across "
11912 "division"),
11913 WARN_STRICT_OVERFLOW_MISC);
11914 return fold_build2_loc (loc, code, type,
11915 fold_convert_loc (loc, type,
11916 TREE_OPERAND (arg0, 0)),
11917 fold_convert_loc (loc, type,
11918 negate_expr (arg1)));
11919 }
11920 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11921 && TREE_CODE (arg1) == NEGATE_EXPR
11922 && negate_expr_p (arg0))
11923 {
11924 if (INTEGRAL_TYPE_P (type))
11925 fold_overflow_warning (("assuming signed overflow does not occur "
11926 "when distributing negation across "
11927 "division"),
11928 WARN_STRICT_OVERFLOW_MISC);
11929 return fold_build2_loc (loc, code, type,
11930 fold_convert_loc (loc, type,
11931 negate_expr (arg0)),
11932 fold_convert_loc (loc, type,
11933 TREE_OPERAND (arg1, 0)));
11934 }
11935
11936 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11937 operation, EXACT_DIV_EXPR.
11938
11939 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11940 At one time others generated faster code, it's not clear if they do
11941 after the last round to changes to the DIV code in expmed.c. */
11942 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11943 && multiple_of_p (type, arg0, arg1))
11944 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11945
11946 strict_overflow_p = false;
11947 if (TREE_CODE (arg1) == INTEGER_CST
11948 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11949 &strict_overflow_p)))
11950 {
11951 if (strict_overflow_p)
11952 fold_overflow_warning (("assuming signed overflow does not occur "
11953 "when simplifying division"),
11954 WARN_STRICT_OVERFLOW_MISC);
11955 return fold_convert_loc (loc, type, tem);
11956 }
11957
11958 return NULL_TREE;
11959
11960 case CEIL_MOD_EXPR:
11961 case FLOOR_MOD_EXPR:
11962 case ROUND_MOD_EXPR:
11963 case TRUNC_MOD_EXPR:
11964 /* X % -1 is zero. */
11965 if (!TYPE_UNSIGNED (type)
11966 && TREE_CODE (arg1) == INTEGER_CST
11967 && wi::eq_p (arg1, -1))
11968 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11969
11970 /* X % -C is the same as X % C. */
11971 if (code == TRUNC_MOD_EXPR
11972 && TYPE_SIGN (type) == SIGNED
11973 && TREE_CODE (arg1) == INTEGER_CST
11974 && !TREE_OVERFLOW (arg1)
11975 && wi::neg_p (arg1)
11976 && !TYPE_OVERFLOW_TRAPS (type)
11977 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11978 && !sign_bit_p (arg1, arg1))
11979 return fold_build2_loc (loc, code, type,
11980 fold_convert_loc (loc, type, arg0),
11981 fold_convert_loc (loc, type,
11982 negate_expr (arg1)));
11983
11984 /* X % -Y is the same as X % Y. */
11985 if (code == TRUNC_MOD_EXPR
11986 && !TYPE_UNSIGNED (type)
11987 && TREE_CODE (arg1) == NEGATE_EXPR
11988 && !TYPE_OVERFLOW_TRAPS (type))
11989 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11990 fold_convert_loc (loc, type,
11991 TREE_OPERAND (arg1, 0)));
11992
11993 strict_overflow_p = false;
11994 if (TREE_CODE (arg1) == INTEGER_CST
11995 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11996 &strict_overflow_p)))
11997 {
11998 if (strict_overflow_p)
11999 fold_overflow_warning (("assuming signed overflow does not occur "
12000 "when simplifying modulus"),
12001 WARN_STRICT_OVERFLOW_MISC);
12002 return fold_convert_loc (loc, type, tem);
12003 }
12004
12005 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12006 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12007 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12008 && (TYPE_UNSIGNED (type)
12009 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12010 {
12011 tree c = arg1;
12012 /* Also optimize A % (C << N) where C is a power of 2,
12013 to A & ((C << N) - 1). */
12014 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12015 c = TREE_OPERAND (arg1, 0);
12016
12017 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12018 {
12019 tree mask
12020 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12021 build_int_cst (TREE_TYPE (arg1), 1));
12022 if (strict_overflow_p)
12023 fold_overflow_warning (("assuming signed overflow does not "
12024 "occur when simplifying "
12025 "X % (power of two)"),
12026 WARN_STRICT_OVERFLOW_MISC);
12027 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12028 fold_convert_loc (loc, type, arg0),
12029 fold_convert_loc (loc, type, mask));
12030 }
12031 }
12032
12033 return NULL_TREE;
12034
12035 case LROTATE_EXPR:
12036 case RROTATE_EXPR:
12037 if (integer_all_onesp (arg0))
12038 return omit_one_operand_loc (loc, type, arg0, arg1);
12039 goto shift;
12040
12041 case RSHIFT_EXPR:
12042 /* Optimize -1 >> x for arithmetic right shifts. */
12043 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12044 && tree_expr_nonnegative_p (arg1))
12045 return omit_one_operand_loc (loc, type, arg0, arg1);
12046 /* ... fall through ... */
12047
12048 case LSHIFT_EXPR:
12049 shift:
12050 if (integer_zerop (arg1))
12051 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12052 if (integer_zerop (arg0))
12053 return omit_one_operand_loc (loc, type, arg0, arg1);
12054
12055 /* Prefer vector1 << scalar to vector1 << vector2
12056 if vector2 is uniform. */
12057 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12058 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12059 return fold_build2_loc (loc, code, type, op0, tem);
12060
12061 /* Since negative shift count is not well-defined,
12062 don't try to compute it in the compiler. */
12063 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12064 return NULL_TREE;
12065
12066 prec = element_precision (type);
12067
12068 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12069 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12070 && tree_to_uhwi (arg1) < prec
12071 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12072 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12073 {
12074 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12075 + tree_to_uhwi (arg1));
12076
12077 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12078 being well defined. */
12079 if (low >= prec)
12080 {
12081 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12082 low = low % prec;
12083 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12084 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12085 TREE_OPERAND (arg0, 0));
12086 else
12087 low = prec - 1;
12088 }
12089
12090 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12091 build_int_cst (TREE_TYPE (arg1), low));
12092 }
12093
12094 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12095 into x & ((unsigned)-1 >> c) for unsigned types. */
12096 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12097 || (TYPE_UNSIGNED (type)
12098 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12099 && tree_fits_uhwi_p (arg1)
12100 && tree_to_uhwi (arg1) < prec
12101 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12102 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12103 {
12104 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12105 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12106 tree lshift;
12107 tree arg00;
12108
12109 if (low0 == low1)
12110 {
12111 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12112
12113 lshift = build_minus_one_cst (type);
12114 lshift = const_binop (code, lshift, arg1);
12115
12116 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12117 }
12118 }
12119
12120 /* Rewrite an LROTATE_EXPR by a constant into an
12121 RROTATE_EXPR by a new constant. */
12122 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12123 {
12124 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12125 tem = const_binop (MINUS_EXPR, tem, arg1);
12126 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12127 }
12128
12129 /* If we have a rotate of a bit operation with the rotate count and
12130 the second operand of the bit operation both constant,
12131 permute the two operations. */
12132 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12133 && (TREE_CODE (arg0) == BIT_AND_EXPR
12134 || TREE_CODE (arg0) == BIT_IOR_EXPR
12135 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12136 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12137 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12138 fold_build2_loc (loc, code, type,
12139 TREE_OPERAND (arg0, 0), arg1),
12140 fold_build2_loc (loc, code, type,
12141 TREE_OPERAND (arg0, 1), arg1));
12142
12143 /* Two consecutive rotates adding up to the some integer
12144 multiple of the precision of the type can be ignored. */
12145 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12146 && TREE_CODE (arg0) == RROTATE_EXPR
12147 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12148 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12149 prec) == 0)
12150 return TREE_OPERAND (arg0, 0);
12151
12152 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12153 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12154 if the latter can be further optimized. */
12155 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12156 && TREE_CODE (arg0) == BIT_AND_EXPR
12157 && TREE_CODE (arg1) == INTEGER_CST
12158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12159 {
12160 tree mask = fold_build2_loc (loc, code, type,
12161 fold_convert_loc (loc, type,
12162 TREE_OPERAND (arg0, 1)),
12163 arg1);
12164 tree shift = fold_build2_loc (loc, code, type,
12165 fold_convert_loc (loc, type,
12166 TREE_OPERAND (arg0, 0)),
12167 arg1);
12168 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12169 if (tem)
12170 return tem;
12171 }
12172
12173 return NULL_TREE;
12174
12175 case MIN_EXPR:
12176 if (operand_equal_p (arg0, arg1, 0))
12177 return omit_one_operand_loc (loc, type, arg0, arg1);
12178 if (INTEGRAL_TYPE_P (type)
12179 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12180 return omit_one_operand_loc (loc, type, arg1, arg0);
12181 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12182 if (tem)
12183 return tem;
12184 goto associate;
12185
12186 case MAX_EXPR:
12187 if (operand_equal_p (arg0, arg1, 0))
12188 return omit_one_operand_loc (loc, type, arg0, arg1);
12189 if (INTEGRAL_TYPE_P (type)
12190 && TYPE_MAX_VALUE (type)
12191 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12192 return omit_one_operand_loc (loc, type, arg1, arg0);
12193 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12194 if (tem)
12195 return tem;
12196 goto associate;
12197
12198 case TRUTH_ANDIF_EXPR:
12199 /* Note that the operands of this must be ints
12200 and their values must be 0 or 1.
12201 ("true" is a fixed value perhaps depending on the language.) */
12202 /* If first arg is constant zero, return it. */
12203 if (integer_zerop (arg0))
12204 return fold_convert_loc (loc, type, arg0);
12205 case TRUTH_AND_EXPR:
12206 /* If either arg is constant true, drop it. */
12207 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12208 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12209 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12210 /* Preserve sequence points. */
12211 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12212 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12213 /* If second arg is constant zero, result is zero, but first arg
12214 must be evaluated. */
12215 if (integer_zerop (arg1))
12216 return omit_one_operand_loc (loc, type, arg1, arg0);
12217 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12218 case will be handled here. */
12219 if (integer_zerop (arg0))
12220 return omit_one_operand_loc (loc, type, arg0, arg1);
12221
12222 /* !X && X is always false. */
12223 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12224 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12225 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12226 /* X && !X is always false. */
12227 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12228 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12229 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12230
12231 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12232 means A >= Y && A != MAX, but in this case we know that
12233 A < X <= MAX. */
12234
12235 if (!TREE_SIDE_EFFECTS (arg0)
12236 && !TREE_SIDE_EFFECTS (arg1))
12237 {
12238 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12239 if (tem && !operand_equal_p (tem, arg0, 0))
12240 return fold_build2_loc (loc, code, type, tem, arg1);
12241
12242 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12243 if (tem && !operand_equal_p (tem, arg1, 0))
12244 return fold_build2_loc (loc, code, type, arg0, tem);
12245 }
12246
12247 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12248 != NULL_TREE)
12249 return tem;
12250
12251 return NULL_TREE;
12252
12253 case TRUTH_ORIF_EXPR:
12254 /* Note that the operands of this must be ints
12255 and their values must be 0 or true.
12256 ("true" is a fixed value perhaps depending on the language.) */
12257 /* If first arg is constant true, return it. */
12258 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12259 return fold_convert_loc (loc, type, arg0);
12260 case TRUTH_OR_EXPR:
12261 /* If either arg is constant zero, drop it. */
12262 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12263 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12264 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12265 /* Preserve sequence points. */
12266 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12267 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12268 /* If second arg is constant true, result is true, but we must
12269 evaluate first arg. */
12270 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12271 return omit_one_operand_loc (loc, type, arg1, arg0);
12272 /* Likewise for first arg, but note this only occurs here for
12273 TRUTH_OR_EXPR. */
12274 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12275 return omit_one_operand_loc (loc, type, arg0, arg1);
12276
12277 /* !X || X is always true. */
12278 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12279 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12280 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12281 /* X || !X is always true. */
12282 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12283 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12284 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12285
12286 /* (X && !Y) || (!X && Y) is X ^ Y */
12287 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12288 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12289 {
12290 tree a0, a1, l0, l1, n0, n1;
12291
12292 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12293 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12294
12295 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12296 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12297
12298 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12299 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12300
12301 if ((operand_equal_p (n0, a0, 0)
12302 && operand_equal_p (n1, a1, 0))
12303 || (operand_equal_p (n0, a1, 0)
12304 && operand_equal_p (n1, a0, 0)))
12305 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12306 }
12307
12308 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12309 != NULL_TREE)
12310 return tem;
12311
12312 return NULL_TREE;
12313
12314 case TRUTH_XOR_EXPR:
12315 /* If the second arg is constant zero, drop it. */
12316 if (integer_zerop (arg1))
12317 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12318 /* If the second arg is constant true, this is a logical inversion. */
12319 if (integer_onep (arg1))
12320 {
12321 tem = invert_truthvalue_loc (loc, arg0);
12322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12323 }
12324 /* Identical arguments cancel to zero. */
12325 if (operand_equal_p (arg0, arg1, 0))
12326 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12327
12328 /* !X ^ X is always true. */
12329 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12330 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12331 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12332
12333 /* X ^ !X is always true. */
12334 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12336 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12337
12338 return NULL_TREE;
12339
12340 case EQ_EXPR:
12341 case NE_EXPR:
12342 STRIP_NOPS (arg0);
12343 STRIP_NOPS (arg1);
12344
12345 tem = fold_comparison (loc, code, type, op0, op1);
12346 if (tem != NULL_TREE)
12347 return tem;
12348
12349 /* bool_var != 0 becomes bool_var. */
12350 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12351 && code == NE_EXPR)
12352 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12353
12354 /* bool_var == 1 becomes bool_var. */
12355 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12356 && code == EQ_EXPR)
12357 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12358
12359 /* bool_var != 1 becomes !bool_var. */
12360 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12361 && code == NE_EXPR)
12362 return fold_convert_loc (loc, type,
12363 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12364 TREE_TYPE (arg0), arg0));
12365
12366 /* bool_var == 0 becomes !bool_var. */
12367 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12368 && code == EQ_EXPR)
12369 return fold_convert_loc (loc, type,
12370 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12371 TREE_TYPE (arg0), arg0));
12372
12373 /* !exp != 0 becomes !exp */
12374 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12375 && code == NE_EXPR)
12376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12377
12378 /* If this is an equality comparison of the address of two non-weak,
12379 unaliased symbols neither of which are extern (since we do not
12380 have access to attributes for externs), then we know the result. */
12381 if (TREE_CODE (arg0) == ADDR_EXPR
12382 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12383 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12384 && ! lookup_attribute ("alias",
12385 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12386 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12387 && TREE_CODE (arg1) == ADDR_EXPR
12388 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12389 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12390 && ! lookup_attribute ("alias",
12391 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12392 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12393 {
12394 /* We know that we're looking at the address of two
12395 non-weak, unaliased, static _DECL nodes.
12396
12397 It is both wasteful and incorrect to call operand_equal_p
12398 to compare the two ADDR_EXPR nodes. It is wasteful in that
12399 all we need to do is test pointer equality for the arguments
12400 to the two ADDR_EXPR nodes. It is incorrect to use
12401 operand_equal_p as that function is NOT equivalent to a
12402 C equality test. It can in fact return false for two
12403 objects which would test as equal using the C equality
12404 operator. */
12405 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12406 return constant_boolean_node (equal
12407 ? code == EQ_EXPR : code != EQ_EXPR,
12408 type);
12409 }
12410
12411 /* Similarly for a NEGATE_EXPR. */
12412 if (TREE_CODE (arg0) == NEGATE_EXPR
12413 && TREE_CODE (arg1) == INTEGER_CST
12414 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12415 arg1)))
12416 && TREE_CODE (tem) == INTEGER_CST
12417 && !TREE_OVERFLOW (tem))
12418 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12419
12420 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12421 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12422 && TREE_CODE (arg1) == INTEGER_CST
12423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12424 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12425 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12426 fold_convert_loc (loc,
12427 TREE_TYPE (arg0),
12428 arg1),
12429 TREE_OPERAND (arg0, 1)));
12430
12431 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12432 if ((TREE_CODE (arg0) == PLUS_EXPR
12433 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12434 || TREE_CODE (arg0) == MINUS_EXPR)
12435 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12436 0)),
12437 arg1, 0)
12438 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12439 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12440 {
12441 tree val = TREE_OPERAND (arg0, 1);
12442 return omit_two_operands_loc (loc, type,
12443 fold_build2_loc (loc, code, type,
12444 val,
12445 build_int_cst (TREE_TYPE (val),
12446 0)),
12447 TREE_OPERAND (arg0, 0), arg1);
12448 }
12449
12450 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12451 if (TREE_CODE (arg0) == MINUS_EXPR
12452 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12453 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12454 1)),
12455 arg1, 0)
12456 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12457 {
12458 return omit_two_operands_loc (loc, type,
12459 code == NE_EXPR
12460 ? boolean_true_node : boolean_false_node,
12461 TREE_OPERAND (arg0, 1), arg1);
12462 }
12463
12464 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12465 if (TREE_CODE (arg0) == ABS_EXPR
12466 && (integer_zerop (arg1) || real_zerop (arg1)))
12467 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12468
12469 /* If this is an EQ or NE comparison with zero and ARG0 is
12470 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12471 two operations, but the latter can be done in one less insn
12472 on machines that have only two-operand insns or on which a
12473 constant cannot be the first operand. */
12474 if (TREE_CODE (arg0) == BIT_AND_EXPR
12475 && integer_zerop (arg1))
12476 {
12477 tree arg00 = TREE_OPERAND (arg0, 0);
12478 tree arg01 = TREE_OPERAND (arg0, 1);
12479 if (TREE_CODE (arg00) == LSHIFT_EXPR
12480 && integer_onep (TREE_OPERAND (arg00, 0)))
12481 {
12482 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12483 arg01, TREE_OPERAND (arg00, 1));
12484 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12485 build_int_cst (TREE_TYPE (arg0), 1));
12486 return fold_build2_loc (loc, code, type,
12487 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12488 arg1);
12489 }
12490 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12491 && integer_onep (TREE_OPERAND (arg01, 0)))
12492 {
12493 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12494 arg00, TREE_OPERAND (arg01, 1));
12495 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12496 build_int_cst (TREE_TYPE (arg0), 1));
12497 return fold_build2_loc (loc, code, type,
12498 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12499 arg1);
12500 }
12501 }
12502
12503 /* If this is an NE or EQ comparison of zero against the result of a
12504 signed MOD operation whose second operand is a power of 2, make
12505 the MOD operation unsigned since it is simpler and equivalent. */
12506 if (integer_zerop (arg1)
12507 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12508 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12509 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12510 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12511 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12512 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12513 {
12514 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12515 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12516 fold_convert_loc (loc, newtype,
12517 TREE_OPERAND (arg0, 0)),
12518 fold_convert_loc (loc, newtype,
12519 TREE_OPERAND (arg0, 1)));
12520
12521 return fold_build2_loc (loc, code, type, newmod,
12522 fold_convert_loc (loc, newtype, arg1));
12523 }
12524
12525 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12526 C1 is a valid shift constant, and C2 is a power of two, i.e.
12527 a single bit. */
12528 if (TREE_CODE (arg0) == BIT_AND_EXPR
12529 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12530 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12531 == INTEGER_CST
12532 && integer_pow2p (TREE_OPERAND (arg0, 1))
12533 && integer_zerop (arg1))
12534 {
12535 tree itype = TREE_TYPE (arg0);
12536 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12537 prec = TYPE_PRECISION (itype);
12538
12539 /* Check for a valid shift count. */
12540 if (wi::ltu_p (arg001, prec))
12541 {
12542 tree arg01 = TREE_OPERAND (arg0, 1);
12543 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12544 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12545 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12546 can be rewritten as (X & (C2 << C1)) != 0. */
12547 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12548 {
12549 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12550 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12551 return fold_build2_loc (loc, code, type, tem,
12552 fold_convert_loc (loc, itype, arg1));
12553 }
12554 /* Otherwise, for signed (arithmetic) shifts,
12555 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12556 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12557 else if (!TYPE_UNSIGNED (itype))
12558 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12559 arg000, build_int_cst (itype, 0));
12560 /* Otherwise, of unsigned (logical) shifts,
12561 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12562 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12563 else
12564 return omit_one_operand_loc (loc, type,
12565 code == EQ_EXPR ? integer_one_node
12566 : integer_zero_node,
12567 arg000);
12568 }
12569 }
12570
12571 /* If we have (A & C) == C where C is a power of 2, convert this into
12572 (A & C) != 0. Similarly for NE_EXPR. */
12573 if (TREE_CODE (arg0) == BIT_AND_EXPR
12574 && integer_pow2p (TREE_OPERAND (arg0, 1))
12575 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12576 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12577 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12578 integer_zero_node));
12579
12580 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12581 bit, then fold the expression into A < 0 or A >= 0. */
12582 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12583 if (tem)
12584 return tem;
12585
12586 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12587 Similarly for NE_EXPR. */
12588 if (TREE_CODE (arg0) == BIT_AND_EXPR
12589 && TREE_CODE (arg1) == INTEGER_CST
12590 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12591 {
12592 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12593 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12594 TREE_OPERAND (arg0, 1));
12595 tree dandnotc
12596 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12597 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12598 notc);
12599 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12600 if (integer_nonzerop (dandnotc))
12601 return omit_one_operand_loc (loc, type, rslt, arg0);
12602 }
12603
12604 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12605 Similarly for NE_EXPR. */
12606 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12607 && TREE_CODE (arg1) == INTEGER_CST
12608 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12609 {
12610 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12611 tree candnotd
12612 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12613 TREE_OPERAND (arg0, 1),
12614 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12615 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12616 if (integer_nonzerop (candnotd))
12617 return omit_one_operand_loc (loc, type, rslt, arg0);
12618 }
12619
12620 /* If this is a comparison of a field, we may be able to simplify it. */
12621 if ((TREE_CODE (arg0) == COMPONENT_REF
12622 || TREE_CODE (arg0) == BIT_FIELD_REF)
12623 /* Handle the constant case even without -O
12624 to make sure the warnings are given. */
12625 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12626 {
12627 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12628 if (t1)
12629 return t1;
12630 }
12631
12632 /* Optimize comparisons of strlen vs zero to a compare of the
12633 first character of the string vs zero. To wit,
12634 strlen(ptr) == 0 => *ptr == 0
12635 strlen(ptr) != 0 => *ptr != 0
12636 Other cases should reduce to one of these two (or a constant)
12637 due to the return value of strlen being unsigned. */
12638 if (TREE_CODE (arg0) == CALL_EXPR
12639 && integer_zerop (arg1))
12640 {
12641 tree fndecl = get_callee_fndecl (arg0);
12642
12643 if (fndecl
12644 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12645 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12646 && call_expr_nargs (arg0) == 1
12647 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12648 {
12649 tree iref = build_fold_indirect_ref_loc (loc,
12650 CALL_EXPR_ARG (arg0, 0));
12651 return fold_build2_loc (loc, code, type, iref,
12652 build_int_cst (TREE_TYPE (iref), 0));
12653 }
12654 }
12655
12656 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12657 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12658 if (TREE_CODE (arg0) == RSHIFT_EXPR
12659 && integer_zerop (arg1)
12660 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12661 {
12662 tree arg00 = TREE_OPERAND (arg0, 0);
12663 tree arg01 = TREE_OPERAND (arg0, 1);
12664 tree itype = TREE_TYPE (arg00);
12665 if (wi::eq_p (arg01, element_precision (itype) - 1))
12666 {
12667 if (TYPE_UNSIGNED (itype))
12668 {
12669 itype = signed_type_for (itype);
12670 arg00 = fold_convert_loc (loc, itype, arg00);
12671 }
12672 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12673 type, arg00, build_zero_cst (itype));
12674 }
12675 }
12676
12677 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12678 if (integer_zerop (arg1)
12679 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12680 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12681 TREE_OPERAND (arg0, 1));
12682
12683 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12684 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12685 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12686 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12687 build_zero_cst (TREE_TYPE (arg0)));
12688 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12689 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12690 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12691 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12692 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12693 build_zero_cst (TREE_TYPE (arg0)));
12694
12695 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12696 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12697 && TREE_CODE (arg1) == INTEGER_CST
12698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12699 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12700 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12701 TREE_OPERAND (arg0, 1), arg1));
12702
12703 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12704 (X & C) == 0 when C is a single bit. */
12705 if (TREE_CODE (arg0) == BIT_AND_EXPR
12706 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12707 && integer_zerop (arg1)
12708 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12709 {
12710 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12711 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12712 TREE_OPERAND (arg0, 1));
12713 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12714 type, tem,
12715 fold_convert_loc (loc, TREE_TYPE (arg0),
12716 arg1));
12717 }
12718
12719 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12720 constant C is a power of two, i.e. a single bit. */
12721 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12722 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12723 && integer_zerop (arg1)
12724 && integer_pow2p (TREE_OPERAND (arg0, 1))
12725 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12726 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12727 {
12728 tree arg00 = TREE_OPERAND (arg0, 0);
12729 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12730 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12731 }
12732
12733 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12734 when is C is a power of two, i.e. a single bit. */
12735 if (TREE_CODE (arg0) == BIT_AND_EXPR
12736 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12737 && integer_zerop (arg1)
12738 && integer_pow2p (TREE_OPERAND (arg0, 1))
12739 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12740 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12741 {
12742 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12743 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12744 arg000, TREE_OPERAND (arg0, 1));
12745 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12746 tem, build_int_cst (TREE_TYPE (tem), 0));
12747 }
12748
12749 if (integer_zerop (arg1)
12750 && tree_expr_nonzero_p (arg0))
12751 {
12752 tree res = constant_boolean_node (code==NE_EXPR, type);
12753 return omit_one_operand_loc (loc, type, res, arg0);
12754 }
12755
12756 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12757 if (TREE_CODE (arg0) == NEGATE_EXPR
12758 && TREE_CODE (arg1) == NEGATE_EXPR)
12759 return fold_build2_loc (loc, code, type,
12760 TREE_OPERAND (arg0, 0),
12761 fold_convert_loc (loc, TREE_TYPE (arg0),
12762 TREE_OPERAND (arg1, 0)));
12763
12764 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12765 if (TREE_CODE (arg0) == BIT_AND_EXPR
12766 && TREE_CODE (arg1) == BIT_AND_EXPR)
12767 {
12768 tree arg00 = TREE_OPERAND (arg0, 0);
12769 tree arg01 = TREE_OPERAND (arg0, 1);
12770 tree arg10 = TREE_OPERAND (arg1, 0);
12771 tree arg11 = TREE_OPERAND (arg1, 1);
12772 tree itype = TREE_TYPE (arg0);
12773
12774 if (operand_equal_p (arg01, arg11, 0))
12775 return fold_build2_loc (loc, code, type,
12776 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12777 fold_build2_loc (loc,
12778 BIT_XOR_EXPR, itype,
12779 arg00, arg10),
12780 arg01),
12781 build_zero_cst (itype));
12782
12783 if (operand_equal_p (arg01, arg10, 0))
12784 return fold_build2_loc (loc, code, type,
12785 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12786 fold_build2_loc (loc,
12787 BIT_XOR_EXPR, itype,
12788 arg00, arg11),
12789 arg01),
12790 build_zero_cst (itype));
12791
12792 if (operand_equal_p (arg00, arg11, 0))
12793 return fold_build2_loc (loc, code, type,
12794 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12795 fold_build2_loc (loc,
12796 BIT_XOR_EXPR, itype,
12797 arg01, arg10),
12798 arg00),
12799 build_zero_cst (itype));
12800
12801 if (operand_equal_p (arg00, arg10, 0))
12802 return fold_build2_loc (loc, code, type,
12803 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12804 fold_build2_loc (loc,
12805 BIT_XOR_EXPR, itype,
12806 arg01, arg11),
12807 arg00),
12808 build_zero_cst (itype));
12809 }
12810
12811 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12812 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12813 {
12814 tree arg00 = TREE_OPERAND (arg0, 0);
12815 tree arg01 = TREE_OPERAND (arg0, 1);
12816 tree arg10 = TREE_OPERAND (arg1, 0);
12817 tree arg11 = TREE_OPERAND (arg1, 1);
12818 tree itype = TREE_TYPE (arg0);
12819
12820 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12821 operand_equal_p guarantees no side-effects so we don't need
12822 to use omit_one_operand on Z. */
12823 if (operand_equal_p (arg01, arg11, 0))
12824 return fold_build2_loc (loc, code, type, arg00,
12825 fold_convert_loc (loc, TREE_TYPE (arg00),
12826 arg10));
12827 if (operand_equal_p (arg01, arg10, 0))
12828 return fold_build2_loc (loc, code, type, arg00,
12829 fold_convert_loc (loc, TREE_TYPE (arg00),
12830 arg11));
12831 if (operand_equal_p (arg00, arg11, 0))
12832 return fold_build2_loc (loc, code, type, arg01,
12833 fold_convert_loc (loc, TREE_TYPE (arg01),
12834 arg10));
12835 if (operand_equal_p (arg00, arg10, 0))
12836 return fold_build2_loc (loc, code, type, arg01,
12837 fold_convert_loc (loc, TREE_TYPE (arg01),
12838 arg11));
12839
12840 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12841 if (TREE_CODE (arg01) == INTEGER_CST
12842 && TREE_CODE (arg11) == INTEGER_CST)
12843 {
12844 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12845 fold_convert_loc (loc, itype, arg11));
12846 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12847 return fold_build2_loc (loc, code, type, tem,
12848 fold_convert_loc (loc, itype, arg10));
12849 }
12850 }
12851
12852 /* Attempt to simplify equality/inequality comparisons of complex
12853 values. Only lower the comparison if the result is known or
12854 can be simplified to a single scalar comparison. */
12855 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12856 || TREE_CODE (arg0) == COMPLEX_CST)
12857 && (TREE_CODE (arg1) == COMPLEX_EXPR
12858 || TREE_CODE (arg1) == COMPLEX_CST))
12859 {
12860 tree real0, imag0, real1, imag1;
12861 tree rcond, icond;
12862
12863 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12864 {
12865 real0 = TREE_OPERAND (arg0, 0);
12866 imag0 = TREE_OPERAND (arg0, 1);
12867 }
12868 else
12869 {
12870 real0 = TREE_REALPART (arg0);
12871 imag0 = TREE_IMAGPART (arg0);
12872 }
12873
12874 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12875 {
12876 real1 = TREE_OPERAND (arg1, 0);
12877 imag1 = TREE_OPERAND (arg1, 1);
12878 }
12879 else
12880 {
12881 real1 = TREE_REALPART (arg1);
12882 imag1 = TREE_IMAGPART (arg1);
12883 }
12884
12885 rcond = fold_binary_loc (loc, code, type, real0, real1);
12886 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12887 {
12888 if (integer_zerop (rcond))
12889 {
12890 if (code == EQ_EXPR)
12891 return omit_two_operands_loc (loc, type, boolean_false_node,
12892 imag0, imag1);
12893 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12894 }
12895 else
12896 {
12897 if (code == NE_EXPR)
12898 return omit_two_operands_loc (loc, type, boolean_true_node,
12899 imag0, imag1);
12900 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12901 }
12902 }
12903
12904 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12905 if (icond && TREE_CODE (icond) == INTEGER_CST)
12906 {
12907 if (integer_zerop (icond))
12908 {
12909 if (code == EQ_EXPR)
12910 return omit_two_operands_loc (loc, type, boolean_false_node,
12911 real0, real1);
12912 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12913 }
12914 else
12915 {
12916 if (code == NE_EXPR)
12917 return omit_two_operands_loc (loc, type, boolean_true_node,
12918 real0, real1);
12919 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12920 }
12921 }
12922 }
12923
12924 return NULL_TREE;
12925
12926 case LT_EXPR:
12927 case GT_EXPR:
12928 case LE_EXPR:
12929 case GE_EXPR:
12930 tem = fold_comparison (loc, code, type, op0, op1);
12931 if (tem != NULL_TREE)
12932 return tem;
12933
12934 /* Transform comparisons of the form X +- C CMP X. */
12935 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12937 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12938 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12939 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12941 {
12942 tree arg01 = TREE_OPERAND (arg0, 1);
12943 enum tree_code code0 = TREE_CODE (arg0);
12944 int is_positive;
12945
12946 if (TREE_CODE (arg01) == REAL_CST)
12947 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12948 else
12949 is_positive = tree_int_cst_sgn (arg01);
12950
12951 /* (X - c) > X becomes false. */
12952 if (code == GT_EXPR
12953 && ((code0 == MINUS_EXPR && is_positive >= 0)
12954 || (code0 == PLUS_EXPR && is_positive <= 0)))
12955 {
12956 if (TREE_CODE (arg01) == INTEGER_CST
12957 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12958 fold_overflow_warning (("assuming signed overflow does not "
12959 "occur when assuming that (X - c) > X "
12960 "is always false"),
12961 WARN_STRICT_OVERFLOW_ALL);
12962 return constant_boolean_node (0, type);
12963 }
12964
12965 /* Likewise (X + c) < X becomes false. */
12966 if (code == LT_EXPR
12967 && ((code0 == PLUS_EXPR && is_positive >= 0)
12968 || (code0 == MINUS_EXPR && is_positive <= 0)))
12969 {
12970 if (TREE_CODE (arg01) == INTEGER_CST
12971 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12972 fold_overflow_warning (("assuming signed overflow does not "
12973 "occur when assuming that "
12974 "(X + c) < X is always false"),
12975 WARN_STRICT_OVERFLOW_ALL);
12976 return constant_boolean_node (0, type);
12977 }
12978
12979 /* Convert (X - c) <= X to true. */
12980 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12981 && code == LE_EXPR
12982 && ((code0 == MINUS_EXPR && is_positive >= 0)
12983 || (code0 == PLUS_EXPR && is_positive <= 0)))
12984 {
12985 if (TREE_CODE (arg01) == INTEGER_CST
12986 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12987 fold_overflow_warning (("assuming signed overflow does not "
12988 "occur when assuming that "
12989 "(X - c) <= X is always true"),
12990 WARN_STRICT_OVERFLOW_ALL);
12991 return constant_boolean_node (1, type);
12992 }
12993
12994 /* Convert (X + c) >= X to true. */
12995 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12996 && code == GE_EXPR
12997 && ((code0 == PLUS_EXPR && is_positive >= 0)
12998 || (code0 == MINUS_EXPR && is_positive <= 0)))
12999 {
13000 if (TREE_CODE (arg01) == INTEGER_CST
13001 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13002 fold_overflow_warning (("assuming signed overflow does not "
13003 "occur when assuming that "
13004 "(X + c) >= X is always true"),
13005 WARN_STRICT_OVERFLOW_ALL);
13006 return constant_boolean_node (1, type);
13007 }
13008
13009 if (TREE_CODE (arg01) == INTEGER_CST)
13010 {
13011 /* Convert X + c > X and X - c < X to true for integers. */
13012 if (code == GT_EXPR
13013 && ((code0 == PLUS_EXPR && is_positive > 0)
13014 || (code0 == MINUS_EXPR && is_positive < 0)))
13015 {
13016 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13017 fold_overflow_warning (("assuming signed overflow does "
13018 "not occur when assuming that "
13019 "(X + c) > X is always true"),
13020 WARN_STRICT_OVERFLOW_ALL);
13021 return constant_boolean_node (1, type);
13022 }
13023
13024 if (code == LT_EXPR
13025 && ((code0 == MINUS_EXPR && is_positive > 0)
13026 || (code0 == PLUS_EXPR && is_positive < 0)))
13027 {
13028 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13029 fold_overflow_warning (("assuming signed overflow does "
13030 "not occur when assuming that "
13031 "(X - c) < X is always true"),
13032 WARN_STRICT_OVERFLOW_ALL);
13033 return constant_boolean_node (1, type);
13034 }
13035
13036 /* Convert X + c <= X and X - c >= X to false for integers. */
13037 if (code == LE_EXPR
13038 && ((code0 == PLUS_EXPR && is_positive > 0)
13039 || (code0 == MINUS_EXPR && is_positive < 0)))
13040 {
13041 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13042 fold_overflow_warning (("assuming signed overflow does "
13043 "not occur when assuming that "
13044 "(X + c) <= X is always false"),
13045 WARN_STRICT_OVERFLOW_ALL);
13046 return constant_boolean_node (0, type);
13047 }
13048
13049 if (code == GE_EXPR
13050 && ((code0 == MINUS_EXPR && is_positive > 0)
13051 || (code0 == PLUS_EXPR && is_positive < 0)))
13052 {
13053 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13054 fold_overflow_warning (("assuming signed overflow does "
13055 "not occur when assuming that "
13056 "(X - c) >= X is always false"),
13057 WARN_STRICT_OVERFLOW_ALL);
13058 return constant_boolean_node (0, type);
13059 }
13060 }
13061 }
13062
13063 /* Comparisons with the highest or lowest possible integer of
13064 the specified precision will have known values. */
13065 {
13066 tree arg1_type = TREE_TYPE (arg1);
13067 unsigned int prec = TYPE_PRECISION (arg1_type);
13068
13069 if (TREE_CODE (arg1) == INTEGER_CST
13070 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13071 {
13072 wide_int max = wi::max_value (arg1_type);
13073 wide_int signed_max = wi::max_value (prec, SIGNED);
13074 wide_int min = wi::min_value (arg1_type);
13075
13076 if (wi::eq_p (arg1, max))
13077 switch (code)
13078 {
13079 case GT_EXPR:
13080 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13081
13082 case GE_EXPR:
13083 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13084
13085 case LE_EXPR:
13086 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13087
13088 case LT_EXPR:
13089 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13090
13091 /* The GE_EXPR and LT_EXPR cases above are not normally
13092 reached because of previous transformations. */
13093
13094 default:
13095 break;
13096 }
13097 else if (wi::eq_p (arg1, max - 1))
13098 switch (code)
13099 {
13100 case GT_EXPR:
13101 arg1 = const_binop (PLUS_EXPR, arg1,
13102 build_int_cst (TREE_TYPE (arg1), 1));
13103 return fold_build2_loc (loc, EQ_EXPR, type,
13104 fold_convert_loc (loc,
13105 TREE_TYPE (arg1), arg0),
13106 arg1);
13107 case LE_EXPR:
13108 arg1 = const_binop (PLUS_EXPR, arg1,
13109 build_int_cst (TREE_TYPE (arg1), 1));
13110 return fold_build2_loc (loc, NE_EXPR, type,
13111 fold_convert_loc (loc, TREE_TYPE (arg1),
13112 arg0),
13113 arg1);
13114 default:
13115 break;
13116 }
13117 else if (wi::eq_p (arg1, min))
13118 switch (code)
13119 {
13120 case LT_EXPR:
13121 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13122
13123 case LE_EXPR:
13124 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13125
13126 case GE_EXPR:
13127 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13128
13129 case GT_EXPR:
13130 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13131
13132 default:
13133 break;
13134 }
13135 else if (wi::eq_p (arg1, min + 1))
13136 switch (code)
13137 {
13138 case GE_EXPR:
13139 arg1 = const_binop (MINUS_EXPR, arg1,
13140 build_int_cst (TREE_TYPE (arg1), 1));
13141 return fold_build2_loc (loc, NE_EXPR, type,
13142 fold_convert_loc (loc,
13143 TREE_TYPE (arg1), arg0),
13144 arg1);
13145 case LT_EXPR:
13146 arg1 = const_binop (MINUS_EXPR, arg1,
13147 build_int_cst (TREE_TYPE (arg1), 1));
13148 return fold_build2_loc (loc, EQ_EXPR, type,
13149 fold_convert_loc (loc, TREE_TYPE (arg1),
13150 arg0),
13151 arg1);
13152 default:
13153 break;
13154 }
13155
13156 else if (wi::eq_p (arg1, signed_max)
13157 && TYPE_UNSIGNED (arg1_type)
13158 /* We will flip the signedness of the comparison operator
13159 associated with the mode of arg1, so the sign bit is
13160 specified by this mode. Check that arg1 is the signed
13161 max associated with this sign bit. */
13162 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13163 /* signed_type does not work on pointer types. */
13164 && INTEGRAL_TYPE_P (arg1_type))
13165 {
13166 /* The following case also applies to X < signed_max+1
13167 and X >= signed_max+1 because previous transformations. */
13168 if (code == LE_EXPR || code == GT_EXPR)
13169 {
13170 tree st = signed_type_for (arg1_type);
13171 return fold_build2_loc (loc,
13172 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13173 type, fold_convert_loc (loc, st, arg0),
13174 build_int_cst (st, 0));
13175 }
13176 }
13177 }
13178 }
13179
13180 /* If we are comparing an ABS_EXPR with a constant, we can
13181 convert all the cases into explicit comparisons, but they may
13182 well not be faster than doing the ABS and one comparison.
13183 But ABS (X) <= C is a range comparison, which becomes a subtraction
13184 and a comparison, and is probably faster. */
13185 if (code == LE_EXPR
13186 && TREE_CODE (arg1) == INTEGER_CST
13187 && TREE_CODE (arg0) == ABS_EXPR
13188 && ! TREE_SIDE_EFFECTS (arg0)
13189 && (0 != (tem = negate_expr (arg1)))
13190 && TREE_CODE (tem) == INTEGER_CST
13191 && !TREE_OVERFLOW (tem))
13192 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13193 build2 (GE_EXPR, type,
13194 TREE_OPERAND (arg0, 0), tem),
13195 build2 (LE_EXPR, type,
13196 TREE_OPERAND (arg0, 0), arg1));
13197
13198 /* Convert ABS_EXPR<x> >= 0 to true. */
13199 strict_overflow_p = false;
13200 if (code == GE_EXPR
13201 && (integer_zerop (arg1)
13202 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13203 && real_zerop (arg1)))
13204 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13205 {
13206 if (strict_overflow_p)
13207 fold_overflow_warning (("assuming signed overflow does not occur "
13208 "when simplifying comparison of "
13209 "absolute value and zero"),
13210 WARN_STRICT_OVERFLOW_CONDITIONAL);
13211 return omit_one_operand_loc (loc, type,
13212 constant_boolean_node (true, type),
13213 arg0);
13214 }
13215
13216 /* Convert ABS_EXPR<x> < 0 to false. */
13217 strict_overflow_p = false;
13218 if (code == LT_EXPR
13219 && (integer_zerop (arg1) || real_zerop (arg1))
13220 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13221 {
13222 if (strict_overflow_p)
13223 fold_overflow_warning (("assuming signed overflow does not occur "
13224 "when simplifying comparison of "
13225 "absolute value and zero"),
13226 WARN_STRICT_OVERFLOW_CONDITIONAL);
13227 return omit_one_operand_loc (loc, type,
13228 constant_boolean_node (false, type),
13229 arg0);
13230 }
13231
13232 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13233 and similarly for >= into !=. */
13234 if ((code == LT_EXPR || code == GE_EXPR)
13235 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13236 && TREE_CODE (arg1) == LSHIFT_EXPR
13237 && integer_onep (TREE_OPERAND (arg1, 0)))
13238 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13239 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13240 TREE_OPERAND (arg1, 1)),
13241 build_zero_cst (TREE_TYPE (arg0)));
13242
13243 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13244 otherwise Y might be >= # of bits in X's type and thus e.g.
13245 (unsigned char) (1 << Y) for Y 15 might be 0.
13246 If the cast is widening, then 1 << Y should have unsigned type,
13247 otherwise if Y is number of bits in the signed shift type minus 1,
13248 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13249 31 might be 0xffffffff80000000. */
13250 if ((code == LT_EXPR || code == GE_EXPR)
13251 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13252 && CONVERT_EXPR_P (arg1)
13253 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13254 && (TYPE_PRECISION (TREE_TYPE (arg1))
13255 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13256 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13257 || (TYPE_PRECISION (TREE_TYPE (arg1))
13258 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13259 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13260 {
13261 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13262 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13263 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13264 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13265 build_zero_cst (TREE_TYPE (arg0)));
13266 }
13267
13268 return NULL_TREE;
13269
13270 case UNORDERED_EXPR:
13271 case ORDERED_EXPR:
13272 case UNLT_EXPR:
13273 case UNLE_EXPR:
13274 case UNGT_EXPR:
13275 case UNGE_EXPR:
13276 case UNEQ_EXPR:
13277 case LTGT_EXPR:
13278 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13279 {
13280 t1 = fold_relational_const (code, type, arg0, arg1);
13281 if (t1 != NULL_TREE)
13282 return t1;
13283 }
13284
13285 /* If the first operand is NaN, the result is constant. */
13286 if (TREE_CODE (arg0) == REAL_CST
13287 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13288 && (code != LTGT_EXPR || ! flag_trapping_math))
13289 {
13290 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13291 ? integer_zero_node
13292 : integer_one_node;
13293 return omit_one_operand_loc (loc, type, t1, arg1);
13294 }
13295
13296 /* If the second operand is NaN, the result is constant. */
13297 if (TREE_CODE (arg1) == REAL_CST
13298 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13299 && (code != LTGT_EXPR || ! flag_trapping_math))
13300 {
13301 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13302 ? integer_zero_node
13303 : integer_one_node;
13304 return omit_one_operand_loc (loc, type, t1, arg0);
13305 }
13306
13307 /* Simplify unordered comparison of something with itself. */
13308 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13309 && operand_equal_p (arg0, arg1, 0))
13310 return constant_boolean_node (1, type);
13311
13312 if (code == LTGT_EXPR
13313 && !flag_trapping_math
13314 && operand_equal_p (arg0, arg1, 0))
13315 return constant_boolean_node (0, type);
13316
13317 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13318 {
13319 tree targ0 = strip_float_extensions (arg0);
13320 tree targ1 = strip_float_extensions (arg1);
13321 tree newtype = TREE_TYPE (targ0);
13322
13323 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13324 newtype = TREE_TYPE (targ1);
13325
13326 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13327 return fold_build2_loc (loc, code, type,
13328 fold_convert_loc (loc, newtype, targ0),
13329 fold_convert_loc (loc, newtype, targ1));
13330 }
13331
13332 return NULL_TREE;
13333
13334 case COMPOUND_EXPR:
13335 /* When pedantic, a compound expression can be neither an lvalue
13336 nor an integer constant expression. */
13337 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13338 return NULL_TREE;
13339 /* Don't let (0, 0) be null pointer constant. */
13340 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13341 : fold_convert_loc (loc, type, arg1);
13342 return pedantic_non_lvalue_loc (loc, tem);
13343
13344 case COMPLEX_EXPR:
13345 if ((TREE_CODE (arg0) == REAL_CST
13346 && TREE_CODE (arg1) == REAL_CST)
13347 || (TREE_CODE (arg0) == INTEGER_CST
13348 && TREE_CODE (arg1) == INTEGER_CST))
13349 return build_complex (type, arg0, arg1);
13350 return NULL_TREE;
13351
13352 case ASSERT_EXPR:
13353 /* An ASSERT_EXPR should never be passed to fold_binary. */
13354 gcc_unreachable ();
13355
13356 case VEC_PACK_TRUNC_EXPR:
13357 case VEC_PACK_FIX_TRUNC_EXPR:
13358 {
13359 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13360 tree *elts;
13361
13362 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13363 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13364 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13365 return NULL_TREE;
13366
13367 elts = XALLOCAVEC (tree, nelts);
13368 if (!vec_cst_ctor_to_array (arg0, elts)
13369 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13370 return NULL_TREE;
13371
13372 for (i = 0; i < nelts; i++)
13373 {
13374 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13375 ? NOP_EXPR : FIX_TRUNC_EXPR,
13376 TREE_TYPE (type), elts[i]);
13377 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13378 return NULL_TREE;
13379 }
13380
13381 return build_vector (type, elts);
13382 }
13383
13384 case VEC_WIDEN_MULT_LO_EXPR:
13385 case VEC_WIDEN_MULT_HI_EXPR:
13386 case VEC_WIDEN_MULT_EVEN_EXPR:
13387 case VEC_WIDEN_MULT_ODD_EXPR:
13388 {
13389 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13390 unsigned int out, ofs, scale;
13391 tree *elts;
13392
13393 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13394 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13395 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13396 return NULL_TREE;
13397
13398 elts = XALLOCAVEC (tree, nelts * 4);
13399 if (!vec_cst_ctor_to_array (arg0, elts)
13400 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13401 return NULL_TREE;
13402
13403 if (code == VEC_WIDEN_MULT_LO_EXPR)
13404 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13405 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13406 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13407 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13408 scale = 1, ofs = 0;
13409 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13410 scale = 1, ofs = 1;
13411
13412 for (out = 0; out < nelts; out++)
13413 {
13414 unsigned int in1 = (out << scale) + ofs;
13415 unsigned int in2 = in1 + nelts * 2;
13416 tree t1, t2;
13417
13418 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13419 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13420
13421 if (t1 == NULL_TREE || t2 == NULL_TREE)
13422 return NULL_TREE;
13423 elts[out] = const_binop (MULT_EXPR, t1, t2);
13424 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13425 return NULL_TREE;
13426 }
13427
13428 return build_vector (type, elts);
13429 }
13430
13431 default:
13432 return NULL_TREE;
13433 } /* switch (code) */
13434 }
13435
13436 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13437 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13438 of GOTO_EXPR. */
13439
13440 static tree
13441 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13442 {
13443 switch (TREE_CODE (*tp))
13444 {
13445 case LABEL_EXPR:
13446 return *tp;
13447
13448 case GOTO_EXPR:
13449 *walk_subtrees = 0;
13450
13451 /* ... fall through ... */
13452
13453 default:
13454 return NULL_TREE;
13455 }
13456 }
13457
13458 /* Return whether the sub-tree ST contains a label which is accessible from
13459 outside the sub-tree. */
13460
13461 static bool
13462 contains_label_p (tree st)
13463 {
13464 return
13465 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13466 }
13467
13468 /* Fold a ternary expression of code CODE and type TYPE with operands
13469 OP0, OP1, and OP2. Return the folded expression if folding is
13470 successful. Otherwise, return NULL_TREE. */
13471
13472 tree
13473 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13474 tree op0, tree op1, tree op2)
13475 {
13476 tree tem;
13477 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13478 enum tree_code_class kind = TREE_CODE_CLASS (code);
13479
13480 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13481 && TREE_CODE_LENGTH (code) == 3);
13482
13483 /* If this is a commutative operation, and OP0 is a constant, move it
13484 to OP1 to reduce the number of tests below. */
13485 if (commutative_ternary_tree_code (code)
13486 && tree_swap_operands_p (op0, op1, true))
13487 return fold_build3_loc (loc, code, type, op1, op0, op2);
13488
13489 tem = generic_simplify (loc, code, type, op0, op1, op2);
13490 if (tem)
13491 return tem;
13492
13493 /* Strip any conversions that don't change the mode. This is safe
13494 for every expression, except for a comparison expression because
13495 its signedness is derived from its operands. So, in the latter
13496 case, only strip conversions that don't change the signedness.
13497
13498 Note that this is done as an internal manipulation within the
13499 constant folder, in order to find the simplest representation of
13500 the arguments so that their form can be studied. In any cases,
13501 the appropriate type conversions should be put back in the tree
13502 that will get out of the constant folder. */
13503 if (op0)
13504 {
13505 arg0 = op0;
13506 STRIP_NOPS (arg0);
13507 }
13508
13509 if (op1)
13510 {
13511 arg1 = op1;
13512 STRIP_NOPS (arg1);
13513 }
13514
13515 if (op2)
13516 {
13517 arg2 = op2;
13518 STRIP_NOPS (arg2);
13519 }
13520
13521 switch (code)
13522 {
13523 case COMPONENT_REF:
13524 if (TREE_CODE (arg0) == CONSTRUCTOR
13525 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13526 {
13527 unsigned HOST_WIDE_INT idx;
13528 tree field, value;
13529 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13530 if (field == arg1)
13531 return value;
13532 }
13533 return NULL_TREE;
13534
13535 case COND_EXPR:
13536 case VEC_COND_EXPR:
13537 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13538 so all simple results must be passed through pedantic_non_lvalue. */
13539 if (TREE_CODE (arg0) == INTEGER_CST)
13540 {
13541 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13542 tem = integer_zerop (arg0) ? op2 : op1;
13543 /* Only optimize constant conditions when the selected branch
13544 has the same type as the COND_EXPR. This avoids optimizing
13545 away "c ? x : throw", where the throw has a void type.
13546 Avoid throwing away that operand which contains label. */
13547 if ((!TREE_SIDE_EFFECTS (unused_op)
13548 || !contains_label_p (unused_op))
13549 && (! VOID_TYPE_P (TREE_TYPE (tem))
13550 || VOID_TYPE_P (type)))
13551 return pedantic_non_lvalue_loc (loc, tem);
13552 return NULL_TREE;
13553 }
13554 else if (TREE_CODE (arg0) == VECTOR_CST)
13555 {
13556 if (integer_all_onesp (arg0))
13557 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13558 if (integer_zerop (arg0))
13559 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13560
13561 if ((TREE_CODE (arg1) == VECTOR_CST
13562 || TREE_CODE (arg1) == CONSTRUCTOR)
13563 && (TREE_CODE (arg2) == VECTOR_CST
13564 || TREE_CODE (arg2) == CONSTRUCTOR))
13565 {
13566 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13567 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13568 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13569 for (i = 0; i < nelts; i++)
13570 {
13571 tree val = VECTOR_CST_ELT (arg0, i);
13572 if (integer_all_onesp (val))
13573 sel[i] = i;
13574 else if (integer_zerop (val))
13575 sel[i] = nelts + i;
13576 else /* Currently unreachable. */
13577 return NULL_TREE;
13578 }
13579 tree t = fold_vec_perm (type, arg1, arg2, sel);
13580 if (t != NULL_TREE)
13581 return t;
13582 }
13583 }
13584
13585 if (operand_equal_p (arg1, op2, 0))
13586 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13587
13588 /* If we have A op B ? A : C, we may be able to convert this to a
13589 simpler expression, depending on the operation and the values
13590 of B and C. Signed zeros prevent all of these transformations,
13591 for reasons given above each one.
13592
13593 Also try swapping the arguments and inverting the conditional. */
13594 if (COMPARISON_CLASS_P (arg0)
13595 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13596 arg1, TREE_OPERAND (arg0, 1))
13597 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13598 {
13599 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13600 if (tem)
13601 return tem;
13602 }
13603
13604 if (COMPARISON_CLASS_P (arg0)
13605 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13606 op2,
13607 TREE_OPERAND (arg0, 1))
13608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13609 {
13610 location_t loc0 = expr_location_or (arg0, loc);
13611 tem = fold_invert_truthvalue (loc0, arg0);
13612 if (tem && COMPARISON_CLASS_P (tem))
13613 {
13614 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13615 if (tem)
13616 return tem;
13617 }
13618 }
13619
13620 /* If the second operand is simpler than the third, swap them
13621 since that produces better jump optimization results. */
13622 if (truth_value_p (TREE_CODE (arg0))
13623 && tree_swap_operands_p (op1, op2, false))
13624 {
13625 location_t loc0 = expr_location_or (arg0, loc);
13626 /* See if this can be inverted. If it can't, possibly because
13627 it was a floating-point inequality comparison, don't do
13628 anything. */
13629 tem = fold_invert_truthvalue (loc0, arg0);
13630 if (tem)
13631 return fold_build3_loc (loc, code, type, tem, op2, op1);
13632 }
13633
13634 /* Convert A ? 1 : 0 to simply A. */
13635 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13636 : (integer_onep (op1)
13637 && !VECTOR_TYPE_P (type)))
13638 && integer_zerop (op2)
13639 /* If we try to convert OP0 to our type, the
13640 call to fold will try to move the conversion inside
13641 a COND, which will recurse. In that case, the COND_EXPR
13642 is probably the best choice, so leave it alone. */
13643 && type == TREE_TYPE (arg0))
13644 return pedantic_non_lvalue_loc (loc, arg0);
13645
13646 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13647 over COND_EXPR in cases such as floating point comparisons. */
13648 if (integer_zerop (op1)
13649 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13650 : (integer_onep (op2)
13651 && !VECTOR_TYPE_P (type)))
13652 && truth_value_p (TREE_CODE (arg0)))
13653 return pedantic_non_lvalue_loc (loc,
13654 fold_convert_loc (loc, type,
13655 invert_truthvalue_loc (loc,
13656 arg0)));
13657
13658 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13659 if (TREE_CODE (arg0) == LT_EXPR
13660 && integer_zerop (TREE_OPERAND (arg0, 1))
13661 && integer_zerop (op2)
13662 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13663 {
13664 /* sign_bit_p looks through both zero and sign extensions,
13665 but for this optimization only sign extensions are
13666 usable. */
13667 tree tem2 = TREE_OPERAND (arg0, 0);
13668 while (tem != tem2)
13669 {
13670 if (TREE_CODE (tem2) != NOP_EXPR
13671 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13672 {
13673 tem = NULL_TREE;
13674 break;
13675 }
13676 tem2 = TREE_OPERAND (tem2, 0);
13677 }
13678 /* sign_bit_p only checks ARG1 bits within A's precision.
13679 If <sign bit of A> has wider type than A, bits outside
13680 of A's precision in <sign bit of A> need to be checked.
13681 If they are all 0, this optimization needs to be done
13682 in unsigned A's type, if they are all 1 in signed A's type,
13683 otherwise this can't be done. */
13684 if (tem
13685 && TYPE_PRECISION (TREE_TYPE (tem))
13686 < TYPE_PRECISION (TREE_TYPE (arg1))
13687 && TYPE_PRECISION (TREE_TYPE (tem))
13688 < TYPE_PRECISION (type))
13689 {
13690 int inner_width, outer_width;
13691 tree tem_type;
13692
13693 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13694 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13695 if (outer_width > TYPE_PRECISION (type))
13696 outer_width = TYPE_PRECISION (type);
13697
13698 wide_int mask = wi::shifted_mask
13699 (inner_width, outer_width - inner_width, false,
13700 TYPE_PRECISION (TREE_TYPE (arg1)));
13701
13702 wide_int common = mask & arg1;
13703 if (common == mask)
13704 {
13705 tem_type = signed_type_for (TREE_TYPE (tem));
13706 tem = fold_convert_loc (loc, tem_type, tem);
13707 }
13708 else if (common == 0)
13709 {
13710 tem_type = unsigned_type_for (TREE_TYPE (tem));
13711 tem = fold_convert_loc (loc, tem_type, tem);
13712 }
13713 else
13714 tem = NULL;
13715 }
13716
13717 if (tem)
13718 return
13719 fold_convert_loc (loc, type,
13720 fold_build2_loc (loc, BIT_AND_EXPR,
13721 TREE_TYPE (tem), tem,
13722 fold_convert_loc (loc,
13723 TREE_TYPE (tem),
13724 arg1)));
13725 }
13726
13727 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13728 already handled above. */
13729 if (TREE_CODE (arg0) == BIT_AND_EXPR
13730 && integer_onep (TREE_OPERAND (arg0, 1))
13731 && integer_zerop (op2)
13732 && integer_pow2p (arg1))
13733 {
13734 tree tem = TREE_OPERAND (arg0, 0);
13735 STRIP_NOPS (tem);
13736 if (TREE_CODE (tem) == RSHIFT_EXPR
13737 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13738 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13739 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13740 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13741 TREE_OPERAND (tem, 0), arg1);
13742 }
13743
13744 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13745 is probably obsolete because the first operand should be a
13746 truth value (that's why we have the two cases above), but let's
13747 leave it in until we can confirm this for all front-ends. */
13748 if (integer_zerop (op2)
13749 && TREE_CODE (arg0) == NE_EXPR
13750 && integer_zerop (TREE_OPERAND (arg0, 1))
13751 && integer_pow2p (arg1)
13752 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13753 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13754 arg1, OEP_ONLY_CONST))
13755 return pedantic_non_lvalue_loc (loc,
13756 fold_convert_loc (loc, type,
13757 TREE_OPERAND (arg0, 0)));
13758
13759 /* Disable the transformations below for vectors, since
13760 fold_binary_op_with_conditional_arg may undo them immediately,
13761 yielding an infinite loop. */
13762 if (code == VEC_COND_EXPR)
13763 return NULL_TREE;
13764
13765 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13766 if (integer_zerop (op2)
13767 && truth_value_p (TREE_CODE (arg0))
13768 && truth_value_p (TREE_CODE (arg1))
13769 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13770 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13771 : TRUTH_ANDIF_EXPR,
13772 type, fold_convert_loc (loc, type, arg0), arg1);
13773
13774 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13775 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13776 && truth_value_p (TREE_CODE (arg0))
13777 && truth_value_p (TREE_CODE (arg1))
13778 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13779 {
13780 location_t loc0 = expr_location_or (arg0, loc);
13781 /* Only perform transformation if ARG0 is easily inverted. */
13782 tem = fold_invert_truthvalue (loc0, arg0);
13783 if (tem)
13784 return fold_build2_loc (loc, code == VEC_COND_EXPR
13785 ? BIT_IOR_EXPR
13786 : TRUTH_ORIF_EXPR,
13787 type, fold_convert_loc (loc, type, tem),
13788 arg1);
13789 }
13790
13791 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13792 if (integer_zerop (arg1)
13793 && truth_value_p (TREE_CODE (arg0))
13794 && truth_value_p (TREE_CODE (op2))
13795 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13796 {
13797 location_t loc0 = expr_location_or (arg0, loc);
13798 /* Only perform transformation if ARG0 is easily inverted. */
13799 tem = fold_invert_truthvalue (loc0, arg0);
13800 if (tem)
13801 return fold_build2_loc (loc, code == VEC_COND_EXPR
13802 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13803 type, fold_convert_loc (loc, type, tem),
13804 op2);
13805 }
13806
13807 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13808 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13809 && truth_value_p (TREE_CODE (arg0))
13810 && truth_value_p (TREE_CODE (op2))
13811 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13812 return fold_build2_loc (loc, code == VEC_COND_EXPR
13813 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13814 type, fold_convert_loc (loc, type, arg0), op2);
13815
13816 return NULL_TREE;
13817
13818 case CALL_EXPR:
13819 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13820 of fold_ternary on them. */
13821 gcc_unreachable ();
13822
13823 case BIT_FIELD_REF:
13824 if ((TREE_CODE (arg0) == VECTOR_CST
13825 || (TREE_CODE (arg0) == CONSTRUCTOR
13826 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13827 && (type == TREE_TYPE (TREE_TYPE (arg0))
13828 || (TREE_CODE (type) == VECTOR_TYPE
13829 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13830 {
13831 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13832 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13833 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13834 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13835
13836 if (n != 0
13837 && (idx % width) == 0
13838 && (n % width) == 0
13839 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13840 {
13841 idx = idx / width;
13842 n = n / width;
13843
13844 if (TREE_CODE (arg0) == VECTOR_CST)
13845 {
13846 if (n == 1)
13847 return VECTOR_CST_ELT (arg0, idx);
13848
13849 tree *vals = XALLOCAVEC (tree, n);
13850 for (unsigned i = 0; i < n; ++i)
13851 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13852 return build_vector (type, vals);
13853 }
13854
13855 /* Constructor elements can be subvectors. */
13856 unsigned HOST_WIDE_INT k = 1;
13857 if (CONSTRUCTOR_NELTS (arg0) != 0)
13858 {
13859 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13860 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13861 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13862 }
13863
13864 /* We keep an exact subset of the constructor elements. */
13865 if ((idx % k) == 0 && (n % k) == 0)
13866 {
13867 if (CONSTRUCTOR_NELTS (arg0) == 0)
13868 return build_constructor (type, NULL);
13869 idx /= k;
13870 n /= k;
13871 if (n == 1)
13872 {
13873 if (idx < CONSTRUCTOR_NELTS (arg0))
13874 return CONSTRUCTOR_ELT (arg0, idx)->value;
13875 return build_zero_cst (type);
13876 }
13877
13878 vec<constructor_elt, va_gc> *vals;
13879 vec_alloc (vals, n);
13880 for (unsigned i = 0;
13881 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13882 ++i)
13883 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13884 CONSTRUCTOR_ELT
13885 (arg0, idx + i)->value);
13886 return build_constructor (type, vals);
13887 }
13888 /* The bitfield references a single constructor element. */
13889 else if (idx + n <= (idx / k + 1) * k)
13890 {
13891 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13892 return build_zero_cst (type);
13893 else if (n == k)
13894 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13895 else
13896 return fold_build3_loc (loc, code, type,
13897 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13898 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13899 }
13900 }
13901 }
13902
13903 /* A bit-field-ref that referenced the full argument can be stripped. */
13904 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13905 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13906 && integer_zerop (op2))
13907 return fold_convert_loc (loc, type, arg0);
13908
13909 /* On constants we can use native encode/interpret to constant
13910 fold (nearly) all BIT_FIELD_REFs. */
13911 if (CONSTANT_CLASS_P (arg0)
13912 && can_native_interpret_type_p (type)
13913 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13914 /* This limitation should not be necessary, we just need to
13915 round this up to mode size. */
13916 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13917 /* Need bit-shifting of the buffer to relax the following. */
13918 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13919 {
13920 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13921 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13922 unsigned HOST_WIDE_INT clen;
13923 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13924 /* ??? We cannot tell native_encode_expr to start at
13925 some random byte only. So limit us to a reasonable amount
13926 of work. */
13927 if (clen <= 4096)
13928 {
13929 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13930 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13931 if (len > 0
13932 && len * BITS_PER_UNIT >= bitpos + bitsize)
13933 {
13934 tree v = native_interpret_expr (type,
13935 b + bitpos / BITS_PER_UNIT,
13936 bitsize / BITS_PER_UNIT);
13937 if (v)
13938 return v;
13939 }
13940 }
13941 }
13942
13943 return NULL_TREE;
13944
13945 case FMA_EXPR:
13946 /* For integers we can decompose the FMA if possible. */
13947 if (TREE_CODE (arg0) == INTEGER_CST
13948 && TREE_CODE (arg1) == INTEGER_CST)
13949 return fold_build2_loc (loc, PLUS_EXPR, type,
13950 const_binop (MULT_EXPR, arg0, arg1), arg2);
13951 if (integer_zerop (arg2))
13952 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13953
13954 return fold_fma (loc, type, arg0, arg1, arg2);
13955
13956 case VEC_PERM_EXPR:
13957 if (TREE_CODE (arg2) == VECTOR_CST)
13958 {
13959 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13960 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13961 unsigned char *sel2 = sel + nelts;
13962 bool need_mask_canon = false;
13963 bool need_mask_canon2 = false;
13964 bool all_in_vec0 = true;
13965 bool all_in_vec1 = true;
13966 bool maybe_identity = true;
13967 bool single_arg = (op0 == op1);
13968 bool changed = false;
13969
13970 mask2 = 2 * nelts - 1;
13971 mask = single_arg ? (nelts - 1) : mask2;
13972 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13973 for (i = 0; i < nelts; i++)
13974 {
13975 tree val = VECTOR_CST_ELT (arg2, i);
13976 if (TREE_CODE (val) != INTEGER_CST)
13977 return NULL_TREE;
13978
13979 /* Make sure that the perm value is in an acceptable
13980 range. */
13981 wide_int t = val;
13982 need_mask_canon |= wi::gtu_p (t, mask);
13983 need_mask_canon2 |= wi::gtu_p (t, mask2);
13984 sel[i] = t.to_uhwi () & mask;
13985 sel2[i] = t.to_uhwi () & mask2;
13986
13987 if (sel[i] < nelts)
13988 all_in_vec1 = false;
13989 else
13990 all_in_vec0 = false;
13991
13992 if ((sel[i] & (nelts-1)) != i)
13993 maybe_identity = false;
13994 }
13995
13996 if (maybe_identity)
13997 {
13998 if (all_in_vec0)
13999 return op0;
14000 if (all_in_vec1)
14001 return op1;
14002 }
14003
14004 if (all_in_vec0)
14005 op1 = op0;
14006 else if (all_in_vec1)
14007 {
14008 op0 = op1;
14009 for (i = 0; i < nelts; i++)
14010 sel[i] -= nelts;
14011 need_mask_canon = true;
14012 }
14013
14014 if ((TREE_CODE (op0) == VECTOR_CST
14015 || TREE_CODE (op0) == CONSTRUCTOR)
14016 && (TREE_CODE (op1) == VECTOR_CST
14017 || TREE_CODE (op1) == CONSTRUCTOR))
14018 {
14019 tree t = fold_vec_perm (type, op0, op1, sel);
14020 if (t != NULL_TREE)
14021 return t;
14022 }
14023
14024 if (op0 == op1 && !single_arg)
14025 changed = true;
14026
14027 /* Some targets are deficient and fail to expand a single
14028 argument permutation while still allowing an equivalent
14029 2-argument version. */
14030 if (need_mask_canon && arg2 == op2
14031 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
14032 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
14033 {
14034 need_mask_canon = need_mask_canon2;
14035 sel = sel2;
14036 }
14037
14038 if (need_mask_canon && arg2 == op2)
14039 {
14040 tree *tsel = XALLOCAVEC (tree, nelts);
14041 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14042 for (i = 0; i < nelts; i++)
14043 tsel[i] = build_int_cst (eltype, sel[i]);
14044 op2 = build_vector (TREE_TYPE (arg2), tsel);
14045 changed = true;
14046 }
14047
14048 if (changed)
14049 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14050 }
14051 return NULL_TREE;
14052
14053 default:
14054 return NULL_TREE;
14055 } /* switch (code) */
14056 }
14057
14058 /* Perform constant folding and related simplification of EXPR.
14059 The related simplifications include x*1 => x, x*0 => 0, etc.,
14060 and application of the associative law.
14061 NOP_EXPR conversions may be removed freely (as long as we
14062 are careful not to change the type of the overall expression).
14063 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14064 but we can constant-fold them if they have constant operands. */
14065
14066 #ifdef ENABLE_FOLD_CHECKING
14067 # define fold(x) fold_1 (x)
14068 static tree fold_1 (tree);
14069 static
14070 #endif
14071 tree
14072 fold (tree expr)
14073 {
14074 const tree t = expr;
14075 enum tree_code code = TREE_CODE (t);
14076 enum tree_code_class kind = TREE_CODE_CLASS (code);
14077 tree tem;
14078 location_t loc = EXPR_LOCATION (expr);
14079
14080 /* Return right away if a constant. */
14081 if (kind == tcc_constant)
14082 return t;
14083
14084 /* CALL_EXPR-like objects with variable numbers of operands are
14085 treated specially. */
14086 if (kind == tcc_vl_exp)
14087 {
14088 if (code == CALL_EXPR)
14089 {
14090 tem = fold_call_expr (loc, expr, false);
14091 return tem ? tem : expr;
14092 }
14093 return expr;
14094 }
14095
14096 if (IS_EXPR_CODE_CLASS (kind))
14097 {
14098 tree type = TREE_TYPE (t);
14099 tree op0, op1, op2;
14100
14101 switch (TREE_CODE_LENGTH (code))
14102 {
14103 case 1:
14104 op0 = TREE_OPERAND (t, 0);
14105 tem = fold_unary_loc (loc, code, type, op0);
14106 return tem ? tem : expr;
14107 case 2:
14108 op0 = TREE_OPERAND (t, 0);
14109 op1 = TREE_OPERAND (t, 1);
14110 tem = fold_binary_loc (loc, code, type, op0, op1);
14111 return tem ? tem : expr;
14112 case 3:
14113 op0 = TREE_OPERAND (t, 0);
14114 op1 = TREE_OPERAND (t, 1);
14115 op2 = TREE_OPERAND (t, 2);
14116 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14117 return tem ? tem : expr;
14118 default:
14119 break;
14120 }
14121 }
14122
14123 switch (code)
14124 {
14125 case ARRAY_REF:
14126 {
14127 tree op0 = TREE_OPERAND (t, 0);
14128 tree op1 = TREE_OPERAND (t, 1);
14129
14130 if (TREE_CODE (op1) == INTEGER_CST
14131 && TREE_CODE (op0) == CONSTRUCTOR
14132 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14133 {
14134 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14135 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14136 unsigned HOST_WIDE_INT begin = 0;
14137
14138 /* Find a matching index by means of a binary search. */
14139 while (begin != end)
14140 {
14141 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14142 tree index = (*elts)[middle].index;
14143
14144 if (TREE_CODE (index) == INTEGER_CST
14145 && tree_int_cst_lt (index, op1))
14146 begin = middle + 1;
14147 else if (TREE_CODE (index) == INTEGER_CST
14148 && tree_int_cst_lt (op1, index))
14149 end = middle;
14150 else if (TREE_CODE (index) == RANGE_EXPR
14151 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14152 begin = middle + 1;
14153 else if (TREE_CODE (index) == RANGE_EXPR
14154 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14155 end = middle;
14156 else
14157 return (*elts)[middle].value;
14158 }
14159 }
14160
14161 return t;
14162 }
14163
14164 /* Return a VECTOR_CST if possible. */
14165 case CONSTRUCTOR:
14166 {
14167 tree type = TREE_TYPE (t);
14168 if (TREE_CODE (type) != VECTOR_TYPE)
14169 return t;
14170
14171 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14172 unsigned HOST_WIDE_INT idx, pos = 0;
14173 tree value;
14174
14175 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14176 {
14177 if (!CONSTANT_CLASS_P (value))
14178 return t;
14179 if (TREE_CODE (value) == VECTOR_CST)
14180 {
14181 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14182 vec[pos++] = VECTOR_CST_ELT (value, i);
14183 }
14184 else
14185 vec[pos++] = value;
14186 }
14187 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14188 vec[pos] = build_zero_cst (TREE_TYPE (type));
14189
14190 return build_vector (type, vec);
14191 }
14192
14193 case CONST_DECL:
14194 return fold (DECL_INITIAL (t));
14195
14196 default:
14197 return t;
14198 } /* switch (code) */
14199 }
14200
14201 #ifdef ENABLE_FOLD_CHECKING
14202 #undef fold
14203
14204 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14205 hash_table<pointer_hash<const tree_node> > *);
14206 static void fold_check_failed (const_tree, const_tree);
14207 void print_fold_checksum (const_tree);
14208
14209 /* When --enable-checking=fold, compute a digest of expr before
14210 and after actual fold call to see if fold did not accidentally
14211 change original expr. */
14212
14213 tree
14214 fold (tree expr)
14215 {
14216 tree ret;
14217 struct md5_ctx ctx;
14218 unsigned char checksum_before[16], checksum_after[16];
14219 hash_table<pointer_hash<const tree_node> > ht (32);
14220
14221 md5_init_ctx (&ctx);
14222 fold_checksum_tree (expr, &ctx, &ht);
14223 md5_finish_ctx (&ctx, checksum_before);
14224 ht.empty ();
14225
14226 ret = fold_1 (expr);
14227
14228 md5_init_ctx (&ctx);
14229 fold_checksum_tree (expr, &ctx, &ht);
14230 md5_finish_ctx (&ctx, checksum_after);
14231
14232 if (memcmp (checksum_before, checksum_after, 16))
14233 fold_check_failed (expr, ret);
14234
14235 return ret;
14236 }
14237
14238 void
14239 print_fold_checksum (const_tree expr)
14240 {
14241 struct md5_ctx ctx;
14242 unsigned char checksum[16], cnt;
14243 hash_table<pointer_hash<const tree_node> > ht (32);
14244
14245 md5_init_ctx (&ctx);
14246 fold_checksum_tree (expr, &ctx, &ht);
14247 md5_finish_ctx (&ctx, checksum);
14248 for (cnt = 0; cnt < 16; ++cnt)
14249 fprintf (stderr, "%02x", checksum[cnt]);
14250 putc ('\n', stderr);
14251 }
14252
14253 static void
14254 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14255 {
14256 internal_error ("fold check: original tree changed by fold");
14257 }
14258
14259 static void
14260 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14261 hash_table<pointer_hash <const tree_node> > *ht)
14262 {
14263 const tree_node **slot;
14264 enum tree_code code;
14265 union tree_node buf;
14266 int i, len;
14267
14268 recursive_label:
14269 if (expr == NULL)
14270 return;
14271 slot = ht->find_slot (expr, INSERT);
14272 if (*slot != NULL)
14273 return;
14274 *slot = expr;
14275 code = TREE_CODE (expr);
14276 if (TREE_CODE_CLASS (code) == tcc_declaration
14277 && DECL_ASSEMBLER_NAME_SET_P (expr))
14278 {
14279 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14280 memcpy ((char *) &buf, expr, tree_size (expr));
14281 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14282 expr = (tree) &buf;
14283 }
14284 else if (TREE_CODE_CLASS (code) == tcc_type
14285 && (TYPE_POINTER_TO (expr)
14286 || TYPE_REFERENCE_TO (expr)
14287 || TYPE_CACHED_VALUES_P (expr)
14288 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14289 || TYPE_NEXT_VARIANT (expr)))
14290 {
14291 /* Allow these fields to be modified. */
14292 tree tmp;
14293 memcpy ((char *) &buf, expr, tree_size (expr));
14294 expr = tmp = (tree) &buf;
14295 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14296 TYPE_POINTER_TO (tmp) = NULL;
14297 TYPE_REFERENCE_TO (tmp) = NULL;
14298 TYPE_NEXT_VARIANT (tmp) = NULL;
14299 if (TYPE_CACHED_VALUES_P (tmp))
14300 {
14301 TYPE_CACHED_VALUES_P (tmp) = 0;
14302 TYPE_CACHED_VALUES (tmp) = NULL;
14303 }
14304 }
14305 md5_process_bytes (expr, tree_size (expr), ctx);
14306 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14307 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14308 if (TREE_CODE_CLASS (code) != tcc_type
14309 && TREE_CODE_CLASS (code) != tcc_declaration
14310 && code != TREE_LIST
14311 && code != SSA_NAME
14312 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14313 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14314 switch (TREE_CODE_CLASS (code))
14315 {
14316 case tcc_constant:
14317 switch (code)
14318 {
14319 case STRING_CST:
14320 md5_process_bytes (TREE_STRING_POINTER (expr),
14321 TREE_STRING_LENGTH (expr), ctx);
14322 break;
14323 case COMPLEX_CST:
14324 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14325 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14326 break;
14327 case VECTOR_CST:
14328 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14329 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14330 break;
14331 default:
14332 break;
14333 }
14334 break;
14335 case tcc_exceptional:
14336 switch (code)
14337 {
14338 case TREE_LIST:
14339 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14340 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14341 expr = TREE_CHAIN (expr);
14342 goto recursive_label;
14343 break;
14344 case TREE_VEC:
14345 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14346 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14347 break;
14348 default:
14349 break;
14350 }
14351 break;
14352 case tcc_expression:
14353 case tcc_reference:
14354 case tcc_comparison:
14355 case tcc_unary:
14356 case tcc_binary:
14357 case tcc_statement:
14358 case tcc_vl_exp:
14359 len = TREE_OPERAND_LENGTH (expr);
14360 for (i = 0; i < len; ++i)
14361 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14362 break;
14363 case tcc_declaration:
14364 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14365 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14366 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14367 {
14368 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14369 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14370 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14371 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14372 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14373 }
14374
14375 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14376 {
14377 if (TREE_CODE (expr) == FUNCTION_DECL)
14378 {
14379 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14380 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14381 }
14382 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14383 }
14384 break;
14385 case tcc_type:
14386 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14387 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14388 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14389 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14390 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14391 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14392 if (INTEGRAL_TYPE_P (expr)
14393 || SCALAR_FLOAT_TYPE_P (expr))
14394 {
14395 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14396 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14397 }
14398 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14399 if (TREE_CODE (expr) == RECORD_TYPE
14400 || TREE_CODE (expr) == UNION_TYPE
14401 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14402 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14403 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14404 break;
14405 default:
14406 break;
14407 }
14408 }
14409
14410 /* Helper function for outputting the checksum of a tree T. When
14411 debugging with gdb, you can "define mynext" to be "next" followed
14412 by "call debug_fold_checksum (op0)", then just trace down till the
14413 outputs differ. */
14414
14415 DEBUG_FUNCTION void
14416 debug_fold_checksum (const_tree t)
14417 {
14418 int i;
14419 unsigned char checksum[16];
14420 struct md5_ctx ctx;
14421 hash_table<pointer_hash<const tree_node> > ht (32);
14422
14423 md5_init_ctx (&ctx);
14424 fold_checksum_tree (t, &ctx, &ht);
14425 md5_finish_ctx (&ctx, checksum);
14426 ht.empty ();
14427
14428 for (i = 0; i < 16; i++)
14429 fprintf (stderr, "%d ", checksum[i]);
14430
14431 fprintf (stderr, "\n");
14432 }
14433
14434 #endif
14435
14436 /* Fold a unary tree expression with code CODE of type TYPE with an
14437 operand OP0. LOC is the location of the resulting expression.
14438 Return a folded expression if successful. Otherwise, return a tree
14439 expression with code CODE of type TYPE with an operand OP0. */
14440
14441 tree
14442 fold_build1_stat_loc (location_t loc,
14443 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14444 {
14445 tree tem;
14446 #ifdef ENABLE_FOLD_CHECKING
14447 unsigned char checksum_before[16], checksum_after[16];
14448 struct md5_ctx ctx;
14449 hash_table<pointer_hash<const tree_node> > ht (32);
14450
14451 md5_init_ctx (&ctx);
14452 fold_checksum_tree (op0, &ctx, &ht);
14453 md5_finish_ctx (&ctx, checksum_before);
14454 ht.empty ();
14455 #endif
14456
14457 tem = fold_unary_loc (loc, code, type, op0);
14458 if (!tem)
14459 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14460
14461 #ifdef ENABLE_FOLD_CHECKING
14462 md5_init_ctx (&ctx);
14463 fold_checksum_tree (op0, &ctx, &ht);
14464 md5_finish_ctx (&ctx, checksum_after);
14465
14466 if (memcmp (checksum_before, checksum_after, 16))
14467 fold_check_failed (op0, tem);
14468 #endif
14469 return tem;
14470 }
14471
14472 /* Fold a binary tree expression with code CODE of type TYPE with
14473 operands OP0 and OP1. LOC is the location of the resulting
14474 expression. Return a folded expression if successful. Otherwise,
14475 return a tree expression with code CODE of type TYPE with operands
14476 OP0 and OP1. */
14477
14478 tree
14479 fold_build2_stat_loc (location_t loc,
14480 enum tree_code code, tree type, tree op0, tree op1
14481 MEM_STAT_DECL)
14482 {
14483 tree tem;
14484 #ifdef ENABLE_FOLD_CHECKING
14485 unsigned char checksum_before_op0[16],
14486 checksum_before_op1[16],
14487 checksum_after_op0[16],
14488 checksum_after_op1[16];
14489 struct md5_ctx ctx;
14490 hash_table<pointer_hash<const tree_node> > ht (32);
14491
14492 md5_init_ctx (&ctx);
14493 fold_checksum_tree (op0, &ctx, &ht);
14494 md5_finish_ctx (&ctx, checksum_before_op0);
14495 ht.empty ();
14496
14497 md5_init_ctx (&ctx);
14498 fold_checksum_tree (op1, &ctx, &ht);
14499 md5_finish_ctx (&ctx, checksum_before_op1);
14500 ht.empty ();
14501 #endif
14502
14503 tem = fold_binary_loc (loc, code, type, op0, op1);
14504 if (!tem)
14505 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14506
14507 #ifdef ENABLE_FOLD_CHECKING
14508 md5_init_ctx (&ctx);
14509 fold_checksum_tree (op0, &ctx, &ht);
14510 md5_finish_ctx (&ctx, checksum_after_op0);
14511 ht.empty ();
14512
14513 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14514 fold_check_failed (op0, tem);
14515
14516 md5_init_ctx (&ctx);
14517 fold_checksum_tree (op1, &ctx, &ht);
14518 md5_finish_ctx (&ctx, checksum_after_op1);
14519
14520 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14521 fold_check_failed (op1, tem);
14522 #endif
14523 return tem;
14524 }
14525
14526 /* Fold a ternary tree expression with code CODE of type TYPE with
14527 operands OP0, OP1, and OP2. Return a folded expression if
14528 successful. Otherwise, return a tree expression with code CODE of
14529 type TYPE with operands OP0, OP1, and OP2. */
14530
14531 tree
14532 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14533 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14534 {
14535 tree tem;
14536 #ifdef ENABLE_FOLD_CHECKING
14537 unsigned char checksum_before_op0[16],
14538 checksum_before_op1[16],
14539 checksum_before_op2[16],
14540 checksum_after_op0[16],
14541 checksum_after_op1[16],
14542 checksum_after_op2[16];
14543 struct md5_ctx ctx;
14544 hash_table<pointer_hash<const tree_node> > ht (32);
14545
14546 md5_init_ctx (&ctx);
14547 fold_checksum_tree (op0, &ctx, &ht);
14548 md5_finish_ctx (&ctx, checksum_before_op0);
14549 ht.empty ();
14550
14551 md5_init_ctx (&ctx);
14552 fold_checksum_tree (op1, &ctx, &ht);
14553 md5_finish_ctx (&ctx, checksum_before_op1);
14554 ht.empty ();
14555
14556 md5_init_ctx (&ctx);
14557 fold_checksum_tree (op2, &ctx, &ht);
14558 md5_finish_ctx (&ctx, checksum_before_op2);
14559 ht.empty ();
14560 #endif
14561
14562 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14563 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14564 if (!tem)
14565 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14566
14567 #ifdef ENABLE_FOLD_CHECKING
14568 md5_init_ctx (&ctx);
14569 fold_checksum_tree (op0, &ctx, &ht);
14570 md5_finish_ctx (&ctx, checksum_after_op0);
14571 ht.empty ();
14572
14573 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14574 fold_check_failed (op0, tem);
14575
14576 md5_init_ctx (&ctx);
14577 fold_checksum_tree (op1, &ctx, &ht);
14578 md5_finish_ctx (&ctx, checksum_after_op1);
14579 ht.empty ();
14580
14581 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14582 fold_check_failed (op1, tem);
14583
14584 md5_init_ctx (&ctx);
14585 fold_checksum_tree (op2, &ctx, &ht);
14586 md5_finish_ctx (&ctx, checksum_after_op2);
14587
14588 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14589 fold_check_failed (op2, tem);
14590 #endif
14591 return tem;
14592 }
14593
14594 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14595 arguments in ARGARRAY, and a null static chain.
14596 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14597 of type TYPE from the given operands as constructed by build_call_array. */
14598
14599 tree
14600 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14601 int nargs, tree *argarray)
14602 {
14603 tree tem;
14604 #ifdef ENABLE_FOLD_CHECKING
14605 unsigned char checksum_before_fn[16],
14606 checksum_before_arglist[16],
14607 checksum_after_fn[16],
14608 checksum_after_arglist[16];
14609 struct md5_ctx ctx;
14610 hash_table<pointer_hash<const tree_node> > ht (32);
14611 int i;
14612
14613 md5_init_ctx (&ctx);
14614 fold_checksum_tree (fn, &ctx, &ht);
14615 md5_finish_ctx (&ctx, checksum_before_fn);
14616 ht.empty ();
14617
14618 md5_init_ctx (&ctx);
14619 for (i = 0; i < nargs; i++)
14620 fold_checksum_tree (argarray[i], &ctx, &ht);
14621 md5_finish_ctx (&ctx, checksum_before_arglist);
14622 ht.empty ();
14623 #endif
14624
14625 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14626
14627 #ifdef ENABLE_FOLD_CHECKING
14628 md5_init_ctx (&ctx);
14629 fold_checksum_tree (fn, &ctx, &ht);
14630 md5_finish_ctx (&ctx, checksum_after_fn);
14631 ht.empty ();
14632
14633 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14634 fold_check_failed (fn, tem);
14635
14636 md5_init_ctx (&ctx);
14637 for (i = 0; i < nargs; i++)
14638 fold_checksum_tree (argarray[i], &ctx, &ht);
14639 md5_finish_ctx (&ctx, checksum_after_arglist);
14640
14641 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14642 fold_check_failed (NULL_TREE, tem);
14643 #endif
14644 return tem;
14645 }
14646
14647 /* Perform constant folding and related simplification of initializer
14648 expression EXPR. These behave identically to "fold_buildN" but ignore
14649 potential run-time traps and exceptions that fold must preserve. */
14650
14651 #define START_FOLD_INIT \
14652 int saved_signaling_nans = flag_signaling_nans;\
14653 int saved_trapping_math = flag_trapping_math;\
14654 int saved_rounding_math = flag_rounding_math;\
14655 int saved_trapv = flag_trapv;\
14656 int saved_folding_initializer = folding_initializer;\
14657 flag_signaling_nans = 0;\
14658 flag_trapping_math = 0;\
14659 flag_rounding_math = 0;\
14660 flag_trapv = 0;\
14661 folding_initializer = 1;
14662
14663 #define END_FOLD_INIT \
14664 flag_signaling_nans = saved_signaling_nans;\
14665 flag_trapping_math = saved_trapping_math;\
14666 flag_rounding_math = saved_rounding_math;\
14667 flag_trapv = saved_trapv;\
14668 folding_initializer = saved_folding_initializer;
14669
14670 tree
14671 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14672 tree type, tree op)
14673 {
14674 tree result;
14675 START_FOLD_INIT;
14676
14677 result = fold_build1_loc (loc, code, type, op);
14678
14679 END_FOLD_INIT;
14680 return result;
14681 }
14682
14683 tree
14684 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14685 tree type, tree op0, tree op1)
14686 {
14687 tree result;
14688 START_FOLD_INIT;
14689
14690 result = fold_build2_loc (loc, code, type, op0, op1);
14691
14692 END_FOLD_INIT;
14693 return result;
14694 }
14695
14696 tree
14697 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14698 int nargs, tree *argarray)
14699 {
14700 tree result;
14701 START_FOLD_INIT;
14702
14703 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14704
14705 END_FOLD_INIT;
14706 return result;
14707 }
14708
14709 #undef START_FOLD_INIT
14710 #undef END_FOLD_INIT
14711
14712 /* Determine if first argument is a multiple of second argument. Return 0 if
14713 it is not, or we cannot easily determined it to be.
14714
14715 An example of the sort of thing we care about (at this point; this routine
14716 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14717 fold cases do now) is discovering that
14718
14719 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14720
14721 is a multiple of
14722
14723 SAVE_EXPR (J * 8)
14724
14725 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14726
14727 This code also handles discovering that
14728
14729 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14730
14731 is a multiple of 8 so we don't have to worry about dealing with a
14732 possible remainder.
14733
14734 Note that we *look* inside a SAVE_EXPR only to determine how it was
14735 calculated; it is not safe for fold to do much of anything else with the
14736 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14737 at run time. For example, the latter example above *cannot* be implemented
14738 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14739 evaluation time of the original SAVE_EXPR is not necessarily the same at
14740 the time the new expression is evaluated. The only optimization of this
14741 sort that would be valid is changing
14742
14743 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14744
14745 divided by 8 to
14746
14747 SAVE_EXPR (I) * SAVE_EXPR (J)
14748
14749 (where the same SAVE_EXPR (J) is used in the original and the
14750 transformed version). */
14751
14752 int
14753 multiple_of_p (tree type, const_tree top, const_tree bottom)
14754 {
14755 if (operand_equal_p (top, bottom, 0))
14756 return 1;
14757
14758 if (TREE_CODE (type) != INTEGER_TYPE)
14759 return 0;
14760
14761 switch (TREE_CODE (top))
14762 {
14763 case BIT_AND_EXPR:
14764 /* Bitwise and provides a power of two multiple. If the mask is
14765 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14766 if (!integer_pow2p (bottom))
14767 return 0;
14768 /* FALLTHRU */
14769
14770 case MULT_EXPR:
14771 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14772 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14773
14774 case PLUS_EXPR:
14775 case MINUS_EXPR:
14776 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14777 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14778
14779 case LSHIFT_EXPR:
14780 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14781 {
14782 tree op1, t1;
14783
14784 op1 = TREE_OPERAND (top, 1);
14785 /* const_binop may not detect overflow correctly,
14786 so check for it explicitly here. */
14787 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14788 && 0 != (t1 = fold_convert (type,
14789 const_binop (LSHIFT_EXPR,
14790 size_one_node,
14791 op1)))
14792 && !TREE_OVERFLOW (t1))
14793 return multiple_of_p (type, t1, bottom);
14794 }
14795 return 0;
14796
14797 case NOP_EXPR:
14798 /* Can't handle conversions from non-integral or wider integral type. */
14799 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14800 || (TYPE_PRECISION (type)
14801 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14802 return 0;
14803
14804 /* .. fall through ... */
14805
14806 case SAVE_EXPR:
14807 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14808
14809 case COND_EXPR:
14810 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14811 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14812
14813 case INTEGER_CST:
14814 if (TREE_CODE (bottom) != INTEGER_CST
14815 || integer_zerop (bottom)
14816 || (TYPE_UNSIGNED (type)
14817 && (tree_int_cst_sgn (top) < 0
14818 || tree_int_cst_sgn (bottom) < 0)))
14819 return 0;
14820 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14821 SIGNED);
14822
14823 default:
14824 return 0;
14825 }
14826 }
14827
14828 /* Return true if CODE or TYPE is known to be non-negative. */
14829
14830 static bool
14831 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14832 {
14833 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14834 && truth_value_p (code))
14835 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14836 have a signed:1 type (where the value is -1 and 0). */
14837 return true;
14838 return false;
14839 }
14840
14841 /* Return true if (CODE OP0) is known to be non-negative. If the return
14842 value is based on the assumption that signed overflow is undefined,
14843 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14844 *STRICT_OVERFLOW_P. */
14845
14846 bool
14847 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14848 bool *strict_overflow_p)
14849 {
14850 if (TYPE_UNSIGNED (type))
14851 return true;
14852
14853 switch (code)
14854 {
14855 case ABS_EXPR:
14856 /* We can't return 1 if flag_wrapv is set because
14857 ABS_EXPR<INT_MIN> = INT_MIN. */
14858 if (!INTEGRAL_TYPE_P (type))
14859 return true;
14860 if (TYPE_OVERFLOW_UNDEFINED (type))
14861 {
14862 *strict_overflow_p = true;
14863 return true;
14864 }
14865 break;
14866
14867 case NON_LVALUE_EXPR:
14868 case FLOAT_EXPR:
14869 case FIX_TRUNC_EXPR:
14870 return tree_expr_nonnegative_warnv_p (op0,
14871 strict_overflow_p);
14872
14873 case NOP_EXPR:
14874 {
14875 tree inner_type = TREE_TYPE (op0);
14876 tree outer_type = type;
14877
14878 if (TREE_CODE (outer_type) == REAL_TYPE)
14879 {
14880 if (TREE_CODE (inner_type) == REAL_TYPE)
14881 return tree_expr_nonnegative_warnv_p (op0,
14882 strict_overflow_p);
14883 if (INTEGRAL_TYPE_P (inner_type))
14884 {
14885 if (TYPE_UNSIGNED (inner_type))
14886 return true;
14887 return tree_expr_nonnegative_warnv_p (op0,
14888 strict_overflow_p);
14889 }
14890 }
14891 else if (INTEGRAL_TYPE_P (outer_type))
14892 {
14893 if (TREE_CODE (inner_type) == REAL_TYPE)
14894 return tree_expr_nonnegative_warnv_p (op0,
14895 strict_overflow_p);
14896 if (INTEGRAL_TYPE_P (inner_type))
14897 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14898 && TYPE_UNSIGNED (inner_type);
14899 }
14900 }
14901 break;
14902
14903 default:
14904 return tree_simple_nonnegative_warnv_p (code, type);
14905 }
14906
14907 /* We don't know sign of `t', so be conservative and return false. */
14908 return false;
14909 }
14910
14911 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14912 value is based on the assumption that signed overflow is undefined,
14913 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14914 *STRICT_OVERFLOW_P. */
14915
14916 bool
14917 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14918 tree op1, bool *strict_overflow_p)
14919 {
14920 if (TYPE_UNSIGNED (type))
14921 return true;
14922
14923 switch (code)
14924 {
14925 case POINTER_PLUS_EXPR:
14926 case PLUS_EXPR:
14927 if (FLOAT_TYPE_P (type))
14928 return (tree_expr_nonnegative_warnv_p (op0,
14929 strict_overflow_p)
14930 && tree_expr_nonnegative_warnv_p (op1,
14931 strict_overflow_p));
14932
14933 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14934 both unsigned and at least 2 bits shorter than the result. */
14935 if (TREE_CODE (type) == INTEGER_TYPE
14936 && TREE_CODE (op0) == NOP_EXPR
14937 && TREE_CODE (op1) == NOP_EXPR)
14938 {
14939 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14940 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14941 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14942 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14943 {
14944 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14945 TYPE_PRECISION (inner2)) + 1;
14946 return prec < TYPE_PRECISION (type);
14947 }
14948 }
14949 break;
14950
14951 case MULT_EXPR:
14952 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14953 {
14954 /* x * x is always non-negative for floating point x
14955 or without overflow. */
14956 if (operand_equal_p (op0, op1, 0)
14957 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14958 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14959 {
14960 if (TYPE_OVERFLOW_UNDEFINED (type))
14961 *strict_overflow_p = true;
14962 return true;
14963 }
14964 }
14965
14966 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14967 both unsigned and their total bits is shorter than the result. */
14968 if (TREE_CODE (type) == INTEGER_TYPE
14969 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14970 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14971 {
14972 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14973 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14974 : TREE_TYPE (op0);
14975 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14976 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14977 : TREE_TYPE (op1);
14978
14979 bool unsigned0 = TYPE_UNSIGNED (inner0);
14980 bool unsigned1 = TYPE_UNSIGNED (inner1);
14981
14982 if (TREE_CODE (op0) == INTEGER_CST)
14983 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14984
14985 if (TREE_CODE (op1) == INTEGER_CST)
14986 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14987
14988 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14989 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14990 {
14991 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14992 ? tree_int_cst_min_precision (op0, UNSIGNED)
14993 : TYPE_PRECISION (inner0);
14994
14995 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14996 ? tree_int_cst_min_precision (op1, UNSIGNED)
14997 : TYPE_PRECISION (inner1);
14998
14999 return precision0 + precision1 < TYPE_PRECISION (type);
15000 }
15001 }
15002 return false;
15003
15004 case BIT_AND_EXPR:
15005 case MAX_EXPR:
15006 return (tree_expr_nonnegative_warnv_p (op0,
15007 strict_overflow_p)
15008 || tree_expr_nonnegative_warnv_p (op1,
15009 strict_overflow_p));
15010
15011 case BIT_IOR_EXPR:
15012 case BIT_XOR_EXPR:
15013 case MIN_EXPR:
15014 case RDIV_EXPR:
15015 case TRUNC_DIV_EXPR:
15016 case CEIL_DIV_EXPR:
15017 case FLOOR_DIV_EXPR:
15018 case ROUND_DIV_EXPR:
15019 return (tree_expr_nonnegative_warnv_p (op0,
15020 strict_overflow_p)
15021 && tree_expr_nonnegative_warnv_p (op1,
15022 strict_overflow_p));
15023
15024 case TRUNC_MOD_EXPR:
15025 case CEIL_MOD_EXPR:
15026 case FLOOR_MOD_EXPR:
15027 case ROUND_MOD_EXPR:
15028 return tree_expr_nonnegative_warnv_p (op0,
15029 strict_overflow_p);
15030 default:
15031 return tree_simple_nonnegative_warnv_p (code, type);
15032 }
15033
15034 /* We don't know sign of `t', so be conservative and return false. */
15035 return false;
15036 }
15037
15038 /* Return true if T is known to be non-negative. If the return
15039 value is based on the assumption that signed overflow is undefined,
15040 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15041 *STRICT_OVERFLOW_P. */
15042
15043 bool
15044 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15045 {
15046 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15047 return true;
15048
15049 switch (TREE_CODE (t))
15050 {
15051 case INTEGER_CST:
15052 return tree_int_cst_sgn (t) >= 0;
15053
15054 case REAL_CST:
15055 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15056
15057 case FIXED_CST:
15058 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15059
15060 case COND_EXPR:
15061 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15062 strict_overflow_p)
15063 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15064 strict_overflow_p));
15065 default:
15066 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15067 TREE_TYPE (t));
15068 }
15069 /* We don't know sign of `t', so be conservative and return false. */
15070 return false;
15071 }
15072
15073 /* Return true if T is known to be non-negative. If the return
15074 value is based on the assumption that signed overflow is undefined,
15075 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15076 *STRICT_OVERFLOW_P. */
15077
15078 bool
15079 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15080 tree arg0, tree arg1, bool *strict_overflow_p)
15081 {
15082 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15083 switch (DECL_FUNCTION_CODE (fndecl))
15084 {
15085 CASE_FLT_FN (BUILT_IN_ACOS):
15086 CASE_FLT_FN (BUILT_IN_ACOSH):
15087 CASE_FLT_FN (BUILT_IN_CABS):
15088 CASE_FLT_FN (BUILT_IN_COSH):
15089 CASE_FLT_FN (BUILT_IN_ERFC):
15090 CASE_FLT_FN (BUILT_IN_EXP):
15091 CASE_FLT_FN (BUILT_IN_EXP10):
15092 CASE_FLT_FN (BUILT_IN_EXP2):
15093 CASE_FLT_FN (BUILT_IN_FABS):
15094 CASE_FLT_FN (BUILT_IN_FDIM):
15095 CASE_FLT_FN (BUILT_IN_HYPOT):
15096 CASE_FLT_FN (BUILT_IN_POW10):
15097 CASE_INT_FN (BUILT_IN_FFS):
15098 CASE_INT_FN (BUILT_IN_PARITY):
15099 CASE_INT_FN (BUILT_IN_POPCOUNT):
15100 CASE_INT_FN (BUILT_IN_CLZ):
15101 CASE_INT_FN (BUILT_IN_CLRSB):
15102 case BUILT_IN_BSWAP32:
15103 case BUILT_IN_BSWAP64:
15104 /* Always true. */
15105 return true;
15106
15107 CASE_FLT_FN (BUILT_IN_SQRT):
15108 /* sqrt(-0.0) is -0.0. */
15109 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15110 return true;
15111 return tree_expr_nonnegative_warnv_p (arg0,
15112 strict_overflow_p);
15113
15114 CASE_FLT_FN (BUILT_IN_ASINH):
15115 CASE_FLT_FN (BUILT_IN_ATAN):
15116 CASE_FLT_FN (BUILT_IN_ATANH):
15117 CASE_FLT_FN (BUILT_IN_CBRT):
15118 CASE_FLT_FN (BUILT_IN_CEIL):
15119 CASE_FLT_FN (BUILT_IN_ERF):
15120 CASE_FLT_FN (BUILT_IN_EXPM1):
15121 CASE_FLT_FN (BUILT_IN_FLOOR):
15122 CASE_FLT_FN (BUILT_IN_FMOD):
15123 CASE_FLT_FN (BUILT_IN_FREXP):
15124 CASE_FLT_FN (BUILT_IN_ICEIL):
15125 CASE_FLT_FN (BUILT_IN_IFLOOR):
15126 CASE_FLT_FN (BUILT_IN_IRINT):
15127 CASE_FLT_FN (BUILT_IN_IROUND):
15128 CASE_FLT_FN (BUILT_IN_LCEIL):
15129 CASE_FLT_FN (BUILT_IN_LDEXP):
15130 CASE_FLT_FN (BUILT_IN_LFLOOR):
15131 CASE_FLT_FN (BUILT_IN_LLCEIL):
15132 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15133 CASE_FLT_FN (BUILT_IN_LLRINT):
15134 CASE_FLT_FN (BUILT_IN_LLROUND):
15135 CASE_FLT_FN (BUILT_IN_LRINT):
15136 CASE_FLT_FN (BUILT_IN_LROUND):
15137 CASE_FLT_FN (BUILT_IN_MODF):
15138 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15139 CASE_FLT_FN (BUILT_IN_RINT):
15140 CASE_FLT_FN (BUILT_IN_ROUND):
15141 CASE_FLT_FN (BUILT_IN_SCALB):
15142 CASE_FLT_FN (BUILT_IN_SCALBLN):
15143 CASE_FLT_FN (BUILT_IN_SCALBN):
15144 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15145 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15146 CASE_FLT_FN (BUILT_IN_SINH):
15147 CASE_FLT_FN (BUILT_IN_TANH):
15148 CASE_FLT_FN (BUILT_IN_TRUNC):
15149 /* True if the 1st argument is nonnegative. */
15150 return tree_expr_nonnegative_warnv_p (arg0,
15151 strict_overflow_p);
15152
15153 CASE_FLT_FN (BUILT_IN_FMAX):
15154 /* True if the 1st OR 2nd arguments are nonnegative. */
15155 return (tree_expr_nonnegative_warnv_p (arg0,
15156 strict_overflow_p)
15157 || (tree_expr_nonnegative_warnv_p (arg1,
15158 strict_overflow_p)));
15159
15160 CASE_FLT_FN (BUILT_IN_FMIN):
15161 /* True if the 1st AND 2nd arguments are nonnegative. */
15162 return (tree_expr_nonnegative_warnv_p (arg0,
15163 strict_overflow_p)
15164 && (tree_expr_nonnegative_warnv_p (arg1,
15165 strict_overflow_p)));
15166
15167 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15168 /* True if the 2nd argument is nonnegative. */
15169 return tree_expr_nonnegative_warnv_p (arg1,
15170 strict_overflow_p);
15171
15172 CASE_FLT_FN (BUILT_IN_POWI):
15173 /* True if the 1st argument is nonnegative or the second
15174 argument is an even integer. */
15175 if (TREE_CODE (arg1) == INTEGER_CST
15176 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15177 return true;
15178 return tree_expr_nonnegative_warnv_p (arg0,
15179 strict_overflow_p);
15180
15181 CASE_FLT_FN (BUILT_IN_POW):
15182 /* True if the 1st argument is nonnegative or the second
15183 argument is an even integer valued real. */
15184 if (TREE_CODE (arg1) == REAL_CST)
15185 {
15186 REAL_VALUE_TYPE c;
15187 HOST_WIDE_INT n;
15188
15189 c = TREE_REAL_CST (arg1);
15190 n = real_to_integer (&c);
15191 if ((n & 1) == 0)
15192 {
15193 REAL_VALUE_TYPE cint;
15194 real_from_integer (&cint, VOIDmode, n, SIGNED);
15195 if (real_identical (&c, &cint))
15196 return true;
15197 }
15198 }
15199 return tree_expr_nonnegative_warnv_p (arg0,
15200 strict_overflow_p);
15201
15202 default:
15203 break;
15204 }
15205 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15206 type);
15207 }
15208
15209 /* Return true if T is known to be non-negative. If the return
15210 value is based on the assumption that signed overflow is undefined,
15211 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15212 *STRICT_OVERFLOW_P. */
15213
15214 static bool
15215 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15216 {
15217 enum tree_code code = TREE_CODE (t);
15218 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15219 return true;
15220
15221 switch (code)
15222 {
15223 case TARGET_EXPR:
15224 {
15225 tree temp = TARGET_EXPR_SLOT (t);
15226 t = TARGET_EXPR_INITIAL (t);
15227
15228 /* If the initializer is non-void, then it's a normal expression
15229 that will be assigned to the slot. */
15230 if (!VOID_TYPE_P (t))
15231 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15232
15233 /* Otherwise, the initializer sets the slot in some way. One common
15234 way is an assignment statement at the end of the initializer. */
15235 while (1)
15236 {
15237 if (TREE_CODE (t) == BIND_EXPR)
15238 t = expr_last (BIND_EXPR_BODY (t));
15239 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15240 || TREE_CODE (t) == TRY_CATCH_EXPR)
15241 t = expr_last (TREE_OPERAND (t, 0));
15242 else if (TREE_CODE (t) == STATEMENT_LIST)
15243 t = expr_last (t);
15244 else
15245 break;
15246 }
15247 if (TREE_CODE (t) == MODIFY_EXPR
15248 && TREE_OPERAND (t, 0) == temp)
15249 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15250 strict_overflow_p);
15251
15252 return false;
15253 }
15254
15255 case CALL_EXPR:
15256 {
15257 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15258 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15259
15260 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15261 get_callee_fndecl (t),
15262 arg0,
15263 arg1,
15264 strict_overflow_p);
15265 }
15266 case COMPOUND_EXPR:
15267 case MODIFY_EXPR:
15268 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15269 strict_overflow_p);
15270 case BIND_EXPR:
15271 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15272 strict_overflow_p);
15273 case SAVE_EXPR:
15274 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15275 strict_overflow_p);
15276
15277 default:
15278 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15279 TREE_TYPE (t));
15280 }
15281
15282 /* We don't know sign of `t', so be conservative and return false. */
15283 return false;
15284 }
15285
15286 /* Return true if T is known to be non-negative. If the return
15287 value is based on the assumption that signed overflow is undefined,
15288 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15289 *STRICT_OVERFLOW_P. */
15290
15291 bool
15292 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15293 {
15294 enum tree_code code;
15295 if (t == error_mark_node)
15296 return false;
15297
15298 code = TREE_CODE (t);
15299 switch (TREE_CODE_CLASS (code))
15300 {
15301 case tcc_binary:
15302 case tcc_comparison:
15303 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15304 TREE_TYPE (t),
15305 TREE_OPERAND (t, 0),
15306 TREE_OPERAND (t, 1),
15307 strict_overflow_p);
15308
15309 case tcc_unary:
15310 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15311 TREE_TYPE (t),
15312 TREE_OPERAND (t, 0),
15313 strict_overflow_p);
15314
15315 case tcc_constant:
15316 case tcc_declaration:
15317 case tcc_reference:
15318 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15319
15320 default:
15321 break;
15322 }
15323
15324 switch (code)
15325 {
15326 case TRUTH_AND_EXPR:
15327 case TRUTH_OR_EXPR:
15328 case TRUTH_XOR_EXPR:
15329 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15330 TREE_TYPE (t),
15331 TREE_OPERAND (t, 0),
15332 TREE_OPERAND (t, 1),
15333 strict_overflow_p);
15334 case TRUTH_NOT_EXPR:
15335 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15336 TREE_TYPE (t),
15337 TREE_OPERAND (t, 0),
15338 strict_overflow_p);
15339
15340 case COND_EXPR:
15341 case CONSTRUCTOR:
15342 case OBJ_TYPE_REF:
15343 case ASSERT_EXPR:
15344 case ADDR_EXPR:
15345 case WITH_SIZE_EXPR:
15346 case SSA_NAME:
15347 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15348
15349 default:
15350 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15351 }
15352 }
15353
15354 /* Return true if `t' is known to be non-negative. Handle warnings
15355 about undefined signed overflow. */
15356
15357 bool
15358 tree_expr_nonnegative_p (tree t)
15359 {
15360 bool ret, strict_overflow_p;
15361
15362 strict_overflow_p = false;
15363 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15364 if (strict_overflow_p)
15365 fold_overflow_warning (("assuming signed overflow does not occur when "
15366 "determining that expression is always "
15367 "non-negative"),
15368 WARN_STRICT_OVERFLOW_MISC);
15369 return ret;
15370 }
15371
15372
15373 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15374 For floating point we further ensure that T is not denormal.
15375 Similar logic is present in nonzero_address in rtlanal.h.
15376
15377 If the return value is based on the assumption that signed overflow
15378 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15379 change *STRICT_OVERFLOW_P. */
15380
15381 bool
15382 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15383 bool *strict_overflow_p)
15384 {
15385 switch (code)
15386 {
15387 case ABS_EXPR:
15388 return tree_expr_nonzero_warnv_p (op0,
15389 strict_overflow_p);
15390
15391 case NOP_EXPR:
15392 {
15393 tree inner_type = TREE_TYPE (op0);
15394 tree outer_type = type;
15395
15396 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15397 && tree_expr_nonzero_warnv_p (op0,
15398 strict_overflow_p));
15399 }
15400 break;
15401
15402 case NON_LVALUE_EXPR:
15403 return tree_expr_nonzero_warnv_p (op0,
15404 strict_overflow_p);
15405
15406 default:
15407 break;
15408 }
15409
15410 return false;
15411 }
15412
15413 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15414 For floating point we further ensure that T is not denormal.
15415 Similar logic is present in nonzero_address in rtlanal.h.
15416
15417 If the return value is based on the assumption that signed overflow
15418 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15419 change *STRICT_OVERFLOW_P. */
15420
15421 bool
15422 tree_binary_nonzero_warnv_p (enum tree_code code,
15423 tree type,
15424 tree op0,
15425 tree op1, bool *strict_overflow_p)
15426 {
15427 bool sub_strict_overflow_p;
15428 switch (code)
15429 {
15430 case POINTER_PLUS_EXPR:
15431 case PLUS_EXPR:
15432 if (TYPE_OVERFLOW_UNDEFINED (type))
15433 {
15434 /* With the presence of negative values it is hard
15435 to say something. */
15436 sub_strict_overflow_p = false;
15437 if (!tree_expr_nonnegative_warnv_p (op0,
15438 &sub_strict_overflow_p)
15439 || !tree_expr_nonnegative_warnv_p (op1,
15440 &sub_strict_overflow_p))
15441 return false;
15442 /* One of operands must be positive and the other non-negative. */
15443 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15444 overflows, on a twos-complement machine the sum of two
15445 nonnegative numbers can never be zero. */
15446 return (tree_expr_nonzero_warnv_p (op0,
15447 strict_overflow_p)
15448 || tree_expr_nonzero_warnv_p (op1,
15449 strict_overflow_p));
15450 }
15451 break;
15452
15453 case MULT_EXPR:
15454 if (TYPE_OVERFLOW_UNDEFINED (type))
15455 {
15456 if (tree_expr_nonzero_warnv_p (op0,
15457 strict_overflow_p)
15458 && tree_expr_nonzero_warnv_p (op1,
15459 strict_overflow_p))
15460 {
15461 *strict_overflow_p = true;
15462 return true;
15463 }
15464 }
15465 break;
15466
15467 case MIN_EXPR:
15468 sub_strict_overflow_p = false;
15469 if (tree_expr_nonzero_warnv_p (op0,
15470 &sub_strict_overflow_p)
15471 && tree_expr_nonzero_warnv_p (op1,
15472 &sub_strict_overflow_p))
15473 {
15474 if (sub_strict_overflow_p)
15475 *strict_overflow_p = true;
15476 }
15477 break;
15478
15479 case MAX_EXPR:
15480 sub_strict_overflow_p = false;
15481 if (tree_expr_nonzero_warnv_p (op0,
15482 &sub_strict_overflow_p))
15483 {
15484 if (sub_strict_overflow_p)
15485 *strict_overflow_p = true;
15486
15487 /* When both operands are nonzero, then MAX must be too. */
15488 if (tree_expr_nonzero_warnv_p (op1,
15489 strict_overflow_p))
15490 return true;
15491
15492 /* MAX where operand 0 is positive is positive. */
15493 return tree_expr_nonnegative_warnv_p (op0,
15494 strict_overflow_p);
15495 }
15496 /* MAX where operand 1 is positive is positive. */
15497 else if (tree_expr_nonzero_warnv_p (op1,
15498 &sub_strict_overflow_p)
15499 && tree_expr_nonnegative_warnv_p (op1,
15500 &sub_strict_overflow_p))
15501 {
15502 if (sub_strict_overflow_p)
15503 *strict_overflow_p = true;
15504 return true;
15505 }
15506 break;
15507
15508 case BIT_IOR_EXPR:
15509 return (tree_expr_nonzero_warnv_p (op1,
15510 strict_overflow_p)
15511 || tree_expr_nonzero_warnv_p (op0,
15512 strict_overflow_p));
15513
15514 default:
15515 break;
15516 }
15517
15518 return false;
15519 }
15520
15521 /* Return true when T is an address and is known to be nonzero.
15522 For floating point we further ensure that T is not denormal.
15523 Similar logic is present in nonzero_address in rtlanal.h.
15524
15525 If the return value is based on the assumption that signed overflow
15526 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15527 change *STRICT_OVERFLOW_P. */
15528
15529 bool
15530 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15531 {
15532 bool sub_strict_overflow_p;
15533 switch (TREE_CODE (t))
15534 {
15535 case INTEGER_CST:
15536 return !integer_zerop (t);
15537
15538 case ADDR_EXPR:
15539 {
15540 tree base = TREE_OPERAND (t, 0);
15541
15542 if (!DECL_P (base))
15543 base = get_base_address (base);
15544
15545 if (!base)
15546 return false;
15547
15548 /* For objects in symbol table check if we know they are non-zero.
15549 Don't do anything for variables and functions before symtab is built;
15550 it is quite possible that they will be declared weak later. */
15551 if (DECL_P (base) && decl_in_symtab_p (base))
15552 {
15553 struct symtab_node *symbol;
15554
15555 symbol = symtab_node::get_create (base);
15556 if (symbol)
15557 return symbol->nonzero_address ();
15558 else
15559 return false;
15560 }
15561
15562 /* Function local objects are never NULL. */
15563 if (DECL_P (base)
15564 && (DECL_CONTEXT (base)
15565 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15566 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15567 return true;
15568
15569 /* Constants are never weak. */
15570 if (CONSTANT_CLASS_P (base))
15571 return true;
15572
15573 return false;
15574 }
15575
15576 case COND_EXPR:
15577 sub_strict_overflow_p = false;
15578 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15579 &sub_strict_overflow_p)
15580 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15581 &sub_strict_overflow_p))
15582 {
15583 if (sub_strict_overflow_p)
15584 *strict_overflow_p = true;
15585 return true;
15586 }
15587 break;
15588
15589 default:
15590 break;
15591 }
15592 return false;
15593 }
15594
15595 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15596 attempt to fold the expression to a constant without modifying TYPE,
15597 OP0 or OP1.
15598
15599 If the expression could be simplified to a constant, then return
15600 the constant. If the expression would not be simplified to a
15601 constant, then return NULL_TREE. */
15602
15603 tree
15604 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15605 {
15606 tree tem = fold_binary (code, type, op0, op1);
15607 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15608 }
15609
15610 /* Given the components of a unary expression CODE, TYPE and OP0,
15611 attempt to fold the expression to a constant without modifying
15612 TYPE or OP0.
15613
15614 If the expression could be simplified to a constant, then return
15615 the constant. If the expression would not be simplified to a
15616 constant, then return NULL_TREE. */
15617
15618 tree
15619 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15620 {
15621 tree tem = fold_unary (code, type, op0);
15622 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15623 }
15624
15625 /* If EXP represents referencing an element in a constant string
15626 (either via pointer arithmetic or array indexing), return the
15627 tree representing the value accessed, otherwise return NULL. */
15628
15629 tree
15630 fold_read_from_constant_string (tree exp)
15631 {
15632 if ((TREE_CODE (exp) == INDIRECT_REF
15633 || TREE_CODE (exp) == ARRAY_REF)
15634 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15635 {
15636 tree exp1 = TREE_OPERAND (exp, 0);
15637 tree index;
15638 tree string;
15639 location_t loc = EXPR_LOCATION (exp);
15640
15641 if (TREE_CODE (exp) == INDIRECT_REF)
15642 string = string_constant (exp1, &index);
15643 else
15644 {
15645 tree low_bound = array_ref_low_bound (exp);
15646 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15647
15648 /* Optimize the special-case of a zero lower bound.
15649
15650 We convert the low_bound to sizetype to avoid some problems
15651 with constant folding. (E.g. suppose the lower bound is 1,
15652 and its mode is QI. Without the conversion,l (ARRAY
15653 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15654 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15655 if (! integer_zerop (low_bound))
15656 index = size_diffop_loc (loc, index,
15657 fold_convert_loc (loc, sizetype, low_bound));
15658
15659 string = exp1;
15660 }
15661
15662 if (string
15663 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15664 && TREE_CODE (string) == STRING_CST
15665 && TREE_CODE (index) == INTEGER_CST
15666 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15667 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15668 == MODE_INT)
15669 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15670 return build_int_cst_type (TREE_TYPE (exp),
15671 (TREE_STRING_POINTER (string)
15672 [TREE_INT_CST_LOW (index)]));
15673 }
15674 return NULL;
15675 }
15676
15677 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15678 an integer constant, real, or fixed-point constant.
15679
15680 TYPE is the type of the result. */
15681
15682 static tree
15683 fold_negate_const (tree arg0, tree type)
15684 {
15685 tree t = NULL_TREE;
15686
15687 switch (TREE_CODE (arg0))
15688 {
15689 case INTEGER_CST:
15690 {
15691 bool overflow;
15692 wide_int val = wi::neg (arg0, &overflow);
15693 t = force_fit_type (type, val, 1,
15694 (overflow | TREE_OVERFLOW (arg0))
15695 && !TYPE_UNSIGNED (type));
15696 break;
15697 }
15698
15699 case REAL_CST:
15700 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15701 break;
15702
15703 case FIXED_CST:
15704 {
15705 FIXED_VALUE_TYPE f;
15706 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15707 &(TREE_FIXED_CST (arg0)), NULL,
15708 TYPE_SATURATING (type));
15709 t = build_fixed (type, f);
15710 /* Propagate overflow flags. */
15711 if (overflow_p | TREE_OVERFLOW (arg0))
15712 TREE_OVERFLOW (t) = 1;
15713 break;
15714 }
15715
15716 default:
15717 gcc_unreachable ();
15718 }
15719
15720 return t;
15721 }
15722
15723 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15724 an integer constant or real constant.
15725
15726 TYPE is the type of the result. */
15727
15728 tree
15729 fold_abs_const (tree arg0, tree type)
15730 {
15731 tree t = NULL_TREE;
15732
15733 switch (TREE_CODE (arg0))
15734 {
15735 case INTEGER_CST:
15736 {
15737 /* If the value is unsigned or non-negative, then the absolute value
15738 is the same as the ordinary value. */
15739 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15740 t = arg0;
15741
15742 /* If the value is negative, then the absolute value is
15743 its negation. */
15744 else
15745 {
15746 bool overflow;
15747 wide_int val = wi::neg (arg0, &overflow);
15748 t = force_fit_type (type, val, -1,
15749 overflow | TREE_OVERFLOW (arg0));
15750 }
15751 }
15752 break;
15753
15754 case REAL_CST:
15755 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15756 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15757 else
15758 t = arg0;
15759 break;
15760
15761 default:
15762 gcc_unreachable ();
15763 }
15764
15765 return t;
15766 }
15767
15768 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15769 constant. TYPE is the type of the result. */
15770
15771 static tree
15772 fold_not_const (const_tree arg0, tree type)
15773 {
15774 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15775
15776 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15777 }
15778
15779 /* Given CODE, a relational operator, the target type, TYPE and two
15780 constant operands OP0 and OP1, return the result of the
15781 relational operation. If the result is not a compile time
15782 constant, then return NULL_TREE. */
15783
15784 static tree
15785 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15786 {
15787 int result, invert;
15788
15789 /* From here on, the only cases we handle are when the result is
15790 known to be a constant. */
15791
15792 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15793 {
15794 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15795 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15796
15797 /* Handle the cases where either operand is a NaN. */
15798 if (real_isnan (c0) || real_isnan (c1))
15799 {
15800 switch (code)
15801 {
15802 case EQ_EXPR:
15803 case ORDERED_EXPR:
15804 result = 0;
15805 break;
15806
15807 case NE_EXPR:
15808 case UNORDERED_EXPR:
15809 case UNLT_EXPR:
15810 case UNLE_EXPR:
15811 case UNGT_EXPR:
15812 case UNGE_EXPR:
15813 case UNEQ_EXPR:
15814 result = 1;
15815 break;
15816
15817 case LT_EXPR:
15818 case LE_EXPR:
15819 case GT_EXPR:
15820 case GE_EXPR:
15821 case LTGT_EXPR:
15822 if (flag_trapping_math)
15823 return NULL_TREE;
15824 result = 0;
15825 break;
15826
15827 default:
15828 gcc_unreachable ();
15829 }
15830
15831 return constant_boolean_node (result, type);
15832 }
15833
15834 return constant_boolean_node (real_compare (code, c0, c1), type);
15835 }
15836
15837 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15838 {
15839 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15840 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15841 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15842 }
15843
15844 /* Handle equality/inequality of complex constants. */
15845 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15846 {
15847 tree rcond = fold_relational_const (code, type,
15848 TREE_REALPART (op0),
15849 TREE_REALPART (op1));
15850 tree icond = fold_relational_const (code, type,
15851 TREE_IMAGPART (op0),
15852 TREE_IMAGPART (op1));
15853 if (code == EQ_EXPR)
15854 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15855 else if (code == NE_EXPR)
15856 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15857 else
15858 return NULL_TREE;
15859 }
15860
15861 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15862 {
15863 unsigned count = VECTOR_CST_NELTS (op0);
15864 tree *elts = XALLOCAVEC (tree, count);
15865 gcc_assert (VECTOR_CST_NELTS (op1) == count
15866 && TYPE_VECTOR_SUBPARTS (type) == count);
15867
15868 for (unsigned i = 0; i < count; i++)
15869 {
15870 tree elem_type = TREE_TYPE (type);
15871 tree elem0 = VECTOR_CST_ELT (op0, i);
15872 tree elem1 = VECTOR_CST_ELT (op1, i);
15873
15874 tree tem = fold_relational_const (code, elem_type,
15875 elem0, elem1);
15876
15877 if (tem == NULL_TREE)
15878 return NULL_TREE;
15879
15880 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15881 }
15882
15883 return build_vector (type, elts);
15884 }
15885
15886 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15887
15888 To compute GT, swap the arguments and do LT.
15889 To compute GE, do LT and invert the result.
15890 To compute LE, swap the arguments, do LT and invert the result.
15891 To compute NE, do EQ and invert the result.
15892
15893 Therefore, the code below must handle only EQ and LT. */
15894
15895 if (code == LE_EXPR || code == GT_EXPR)
15896 {
15897 tree tem = op0;
15898 op0 = op1;
15899 op1 = tem;
15900 code = swap_tree_comparison (code);
15901 }
15902
15903 /* Note that it is safe to invert for real values here because we
15904 have already handled the one case that it matters. */
15905
15906 invert = 0;
15907 if (code == NE_EXPR || code == GE_EXPR)
15908 {
15909 invert = 1;
15910 code = invert_tree_comparison (code, false);
15911 }
15912
15913 /* Compute a result for LT or EQ if args permit;
15914 Otherwise return T. */
15915 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15916 {
15917 if (code == EQ_EXPR)
15918 result = tree_int_cst_equal (op0, op1);
15919 else
15920 result = tree_int_cst_lt (op0, op1);
15921 }
15922 else
15923 return NULL_TREE;
15924
15925 if (invert)
15926 result ^= 1;
15927 return constant_boolean_node (result, type);
15928 }
15929
15930 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15931 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15932 itself. */
15933
15934 tree
15935 fold_build_cleanup_point_expr (tree type, tree expr)
15936 {
15937 /* If the expression does not have side effects then we don't have to wrap
15938 it with a cleanup point expression. */
15939 if (!TREE_SIDE_EFFECTS (expr))
15940 return expr;
15941
15942 /* If the expression is a return, check to see if the expression inside the
15943 return has no side effects or the right hand side of the modify expression
15944 inside the return. If either don't have side effects set we don't need to
15945 wrap the expression in a cleanup point expression. Note we don't check the
15946 left hand side of the modify because it should always be a return decl. */
15947 if (TREE_CODE (expr) == RETURN_EXPR)
15948 {
15949 tree op = TREE_OPERAND (expr, 0);
15950 if (!op || !TREE_SIDE_EFFECTS (op))
15951 return expr;
15952 op = TREE_OPERAND (op, 1);
15953 if (!TREE_SIDE_EFFECTS (op))
15954 return expr;
15955 }
15956
15957 return build1 (CLEANUP_POINT_EXPR, type, expr);
15958 }
15959
15960 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15961 of an indirection through OP0, or NULL_TREE if no simplification is
15962 possible. */
15963
15964 tree
15965 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15966 {
15967 tree sub = op0;
15968 tree subtype;
15969
15970 STRIP_NOPS (sub);
15971 subtype = TREE_TYPE (sub);
15972 if (!POINTER_TYPE_P (subtype))
15973 return NULL_TREE;
15974
15975 if (TREE_CODE (sub) == ADDR_EXPR)
15976 {
15977 tree op = TREE_OPERAND (sub, 0);
15978 tree optype = TREE_TYPE (op);
15979 /* *&CONST_DECL -> to the value of the const decl. */
15980 if (TREE_CODE (op) == CONST_DECL)
15981 return DECL_INITIAL (op);
15982 /* *&p => p; make sure to handle *&"str"[cst] here. */
15983 if (type == optype)
15984 {
15985 tree fop = fold_read_from_constant_string (op);
15986 if (fop)
15987 return fop;
15988 else
15989 return op;
15990 }
15991 /* *(foo *)&fooarray => fooarray[0] */
15992 else if (TREE_CODE (optype) == ARRAY_TYPE
15993 && type == TREE_TYPE (optype)
15994 && (!in_gimple_form
15995 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15996 {
15997 tree type_domain = TYPE_DOMAIN (optype);
15998 tree min_val = size_zero_node;
15999 if (type_domain && TYPE_MIN_VALUE (type_domain))
16000 min_val = TYPE_MIN_VALUE (type_domain);
16001 if (in_gimple_form
16002 && TREE_CODE (min_val) != INTEGER_CST)
16003 return NULL_TREE;
16004 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16005 NULL_TREE, NULL_TREE);
16006 }
16007 /* *(foo *)&complexfoo => __real__ complexfoo */
16008 else if (TREE_CODE (optype) == COMPLEX_TYPE
16009 && type == TREE_TYPE (optype))
16010 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16011 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16012 else if (TREE_CODE (optype) == VECTOR_TYPE
16013 && type == TREE_TYPE (optype))
16014 {
16015 tree part_width = TYPE_SIZE (type);
16016 tree index = bitsize_int (0);
16017 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16018 }
16019 }
16020
16021 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16022 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16023 {
16024 tree op00 = TREE_OPERAND (sub, 0);
16025 tree op01 = TREE_OPERAND (sub, 1);
16026
16027 STRIP_NOPS (op00);
16028 if (TREE_CODE (op00) == ADDR_EXPR)
16029 {
16030 tree op00type;
16031 op00 = TREE_OPERAND (op00, 0);
16032 op00type = TREE_TYPE (op00);
16033
16034 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16035 if (TREE_CODE (op00type) == VECTOR_TYPE
16036 && type == TREE_TYPE (op00type))
16037 {
16038 HOST_WIDE_INT offset = tree_to_shwi (op01);
16039 tree part_width = TYPE_SIZE (type);
16040 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16041 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16042 tree index = bitsize_int (indexi);
16043
16044 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16045 return fold_build3_loc (loc,
16046 BIT_FIELD_REF, type, op00,
16047 part_width, index);
16048
16049 }
16050 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16051 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16052 && type == TREE_TYPE (op00type))
16053 {
16054 tree size = TYPE_SIZE_UNIT (type);
16055 if (tree_int_cst_equal (size, op01))
16056 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16057 }
16058 /* ((foo *)&fooarray)[1] => fooarray[1] */
16059 else if (TREE_CODE (op00type) == ARRAY_TYPE
16060 && type == TREE_TYPE (op00type))
16061 {
16062 tree type_domain = TYPE_DOMAIN (op00type);
16063 tree min_val = size_zero_node;
16064 if (type_domain && TYPE_MIN_VALUE (type_domain))
16065 min_val = TYPE_MIN_VALUE (type_domain);
16066 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16067 TYPE_SIZE_UNIT (type));
16068 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16069 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16070 NULL_TREE, NULL_TREE);
16071 }
16072 }
16073 }
16074
16075 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16076 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16077 && type == TREE_TYPE (TREE_TYPE (subtype))
16078 && (!in_gimple_form
16079 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16080 {
16081 tree type_domain;
16082 tree min_val = size_zero_node;
16083 sub = build_fold_indirect_ref_loc (loc, sub);
16084 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16085 if (type_domain && TYPE_MIN_VALUE (type_domain))
16086 min_val = TYPE_MIN_VALUE (type_domain);
16087 if (in_gimple_form
16088 && TREE_CODE (min_val) != INTEGER_CST)
16089 return NULL_TREE;
16090 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16091 NULL_TREE);
16092 }
16093
16094 return NULL_TREE;
16095 }
16096
16097 /* Builds an expression for an indirection through T, simplifying some
16098 cases. */
16099
16100 tree
16101 build_fold_indirect_ref_loc (location_t loc, tree t)
16102 {
16103 tree type = TREE_TYPE (TREE_TYPE (t));
16104 tree sub = fold_indirect_ref_1 (loc, type, t);
16105
16106 if (sub)
16107 return sub;
16108
16109 return build1_loc (loc, INDIRECT_REF, type, t);
16110 }
16111
16112 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16113
16114 tree
16115 fold_indirect_ref_loc (location_t loc, tree t)
16116 {
16117 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16118
16119 if (sub)
16120 return sub;
16121 else
16122 return t;
16123 }
16124
16125 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16126 whose result is ignored. The type of the returned tree need not be
16127 the same as the original expression. */
16128
16129 tree
16130 fold_ignored_result (tree t)
16131 {
16132 if (!TREE_SIDE_EFFECTS (t))
16133 return integer_zero_node;
16134
16135 for (;;)
16136 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16137 {
16138 case tcc_unary:
16139 t = TREE_OPERAND (t, 0);
16140 break;
16141
16142 case tcc_binary:
16143 case tcc_comparison:
16144 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16145 t = TREE_OPERAND (t, 0);
16146 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16147 t = TREE_OPERAND (t, 1);
16148 else
16149 return t;
16150 break;
16151
16152 case tcc_expression:
16153 switch (TREE_CODE (t))
16154 {
16155 case COMPOUND_EXPR:
16156 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16157 return t;
16158 t = TREE_OPERAND (t, 0);
16159 break;
16160
16161 case COND_EXPR:
16162 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16163 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16164 return t;
16165 t = TREE_OPERAND (t, 0);
16166 break;
16167
16168 default:
16169 return t;
16170 }
16171 break;
16172
16173 default:
16174 return t;
16175 }
16176 }
16177
16178 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16179
16180 tree
16181 round_up_loc (location_t loc, tree value, unsigned int divisor)
16182 {
16183 tree div = NULL_TREE;
16184
16185 if (divisor == 1)
16186 return value;
16187
16188 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16189 have to do anything. Only do this when we are not given a const,
16190 because in that case, this check is more expensive than just
16191 doing it. */
16192 if (TREE_CODE (value) != INTEGER_CST)
16193 {
16194 div = build_int_cst (TREE_TYPE (value), divisor);
16195
16196 if (multiple_of_p (TREE_TYPE (value), value, div))
16197 return value;
16198 }
16199
16200 /* If divisor is a power of two, simplify this to bit manipulation. */
16201 if (divisor == (divisor & -divisor))
16202 {
16203 if (TREE_CODE (value) == INTEGER_CST)
16204 {
16205 wide_int val = value;
16206 bool overflow_p;
16207
16208 if ((val & (divisor - 1)) == 0)
16209 return value;
16210
16211 overflow_p = TREE_OVERFLOW (value);
16212 val &= ~(divisor - 1);
16213 val += divisor;
16214 if (val == 0)
16215 overflow_p = true;
16216
16217 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16218 }
16219 else
16220 {
16221 tree t;
16222
16223 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16224 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16225 t = build_int_cst (TREE_TYPE (value), -divisor);
16226 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16227 }
16228 }
16229 else
16230 {
16231 if (!div)
16232 div = build_int_cst (TREE_TYPE (value), divisor);
16233 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16234 value = size_binop_loc (loc, MULT_EXPR, value, div);
16235 }
16236
16237 return value;
16238 }
16239
16240 /* Likewise, but round down. */
16241
16242 tree
16243 round_down_loc (location_t loc, tree value, int divisor)
16244 {
16245 tree div = NULL_TREE;
16246
16247 gcc_assert (divisor > 0);
16248 if (divisor == 1)
16249 return value;
16250
16251 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16252 have to do anything. Only do this when we are not given a const,
16253 because in that case, this check is more expensive than just
16254 doing it. */
16255 if (TREE_CODE (value) != INTEGER_CST)
16256 {
16257 div = build_int_cst (TREE_TYPE (value), divisor);
16258
16259 if (multiple_of_p (TREE_TYPE (value), value, div))
16260 return value;
16261 }
16262
16263 /* If divisor is a power of two, simplify this to bit manipulation. */
16264 if (divisor == (divisor & -divisor))
16265 {
16266 tree t;
16267
16268 t = build_int_cst (TREE_TYPE (value), -divisor);
16269 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16270 }
16271 else
16272 {
16273 if (!div)
16274 div = build_int_cst (TREE_TYPE (value), divisor);
16275 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16276 value = size_binop_loc (loc, MULT_EXPR, value, div);
16277 }
16278
16279 return value;
16280 }
16281
16282 /* Returns the pointer to the base of the object addressed by EXP and
16283 extracts the information about the offset of the access, storing it
16284 to PBITPOS and POFFSET. */
16285
16286 static tree
16287 split_address_to_core_and_offset (tree exp,
16288 HOST_WIDE_INT *pbitpos, tree *poffset)
16289 {
16290 tree core;
16291 machine_mode mode;
16292 int unsignedp, volatilep;
16293 HOST_WIDE_INT bitsize;
16294 location_t loc = EXPR_LOCATION (exp);
16295
16296 if (TREE_CODE (exp) == ADDR_EXPR)
16297 {
16298 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16299 poffset, &mode, &unsignedp, &volatilep,
16300 false);
16301 core = build_fold_addr_expr_loc (loc, core);
16302 }
16303 else
16304 {
16305 core = exp;
16306 *pbitpos = 0;
16307 *poffset = NULL_TREE;
16308 }
16309
16310 return core;
16311 }
16312
16313 /* Returns true if addresses of E1 and E2 differ by a constant, false
16314 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16315
16316 bool
16317 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16318 {
16319 tree core1, core2;
16320 HOST_WIDE_INT bitpos1, bitpos2;
16321 tree toffset1, toffset2, tdiff, type;
16322
16323 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16324 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16325
16326 if (bitpos1 % BITS_PER_UNIT != 0
16327 || bitpos2 % BITS_PER_UNIT != 0
16328 || !operand_equal_p (core1, core2, 0))
16329 return false;
16330
16331 if (toffset1 && toffset2)
16332 {
16333 type = TREE_TYPE (toffset1);
16334 if (type != TREE_TYPE (toffset2))
16335 toffset2 = fold_convert (type, toffset2);
16336
16337 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16338 if (!cst_and_fits_in_hwi (tdiff))
16339 return false;
16340
16341 *diff = int_cst_value (tdiff);
16342 }
16343 else if (toffset1 || toffset2)
16344 {
16345 /* If only one of the offsets is non-constant, the difference cannot
16346 be a constant. */
16347 return false;
16348 }
16349 else
16350 *diff = 0;
16351
16352 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16353 return true;
16354 }
16355
16356 /* Simplify the floating point expression EXP when the sign of the
16357 result is not significant. Return NULL_TREE if no simplification
16358 is possible. */
16359
16360 tree
16361 fold_strip_sign_ops (tree exp)
16362 {
16363 tree arg0, arg1;
16364 location_t loc = EXPR_LOCATION (exp);
16365
16366 switch (TREE_CODE (exp))
16367 {
16368 case ABS_EXPR:
16369 case NEGATE_EXPR:
16370 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16371 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16372
16373 case MULT_EXPR:
16374 case RDIV_EXPR:
16375 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16376 return NULL_TREE;
16377 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16378 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16379 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16380 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16381 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16382 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16383 break;
16384
16385 case COMPOUND_EXPR:
16386 arg0 = TREE_OPERAND (exp, 0);
16387 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16388 if (arg1)
16389 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16390 break;
16391
16392 case COND_EXPR:
16393 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16394 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16395 if (arg0 || arg1)
16396 return fold_build3_loc (loc,
16397 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16398 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16399 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16400 break;
16401
16402 case CALL_EXPR:
16403 {
16404 const enum built_in_function fcode = builtin_mathfn_code (exp);
16405 switch (fcode)
16406 {
16407 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16408 /* Strip copysign function call, return the 1st argument. */
16409 arg0 = CALL_EXPR_ARG (exp, 0);
16410 arg1 = CALL_EXPR_ARG (exp, 1);
16411 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16412
16413 default:
16414 /* Strip sign ops from the argument of "odd" math functions. */
16415 if (negate_mathfn_p (fcode))
16416 {
16417 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16418 if (arg0)
16419 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16420 }
16421 break;
16422 }
16423 }
16424 break;
16425
16426 default:
16427 break;
16428 }
16429 return NULL_TREE;
16430 }