match.pd: Implement patterns from associate_plusminus and factor in differences from...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
125 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
126 static tree make_bit_field_ref (location_t, tree, tree,
127 HOST_WIDE_INT, HOST_WIDE_INT, int);
128 static tree optimize_bit_field_compare (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
131 HOST_WIDE_INT *,
132 machine_mode *, int *, int *,
133 tree *, tree *);
134 static tree sign_bit_p (tree, const_tree);
135 static int simple_operand_p (const_tree);
136 static bool simple_operand_p_2 (tree);
137 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
138 static tree range_predecessor (tree);
139 static tree range_successor (tree);
140 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
142 static tree unextend (tree, int, int, tree);
143 static tree optimize_minmax_comparison (location_t, enum tree_code,
144 tree, tree, tree);
145 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
146 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
147 static tree fold_binary_op_with_conditional_arg (location_t,
148 enum tree_code, tree,
149 tree, tree,
150 tree, tree, int);
151 static tree fold_mathfn_compare (location_t,
152 enum built_in_function, enum tree_code,
153 tree, tree, tree);
154 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
155 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
156 static bool reorder_operands_p (const_tree, const_tree);
157 static tree fold_negate_const (tree, tree);
158 static tree fold_not_const (const_tree, tree);
159 static tree fold_relational_const (enum tree_code, tree, tree, tree);
160 static tree fold_convert_const (enum tree_code, tree, tree);
161
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
164
165 static location_t
166 expr_location_or (tree t, location_t loc)
167 {
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
170 }
171
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
174
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
177 {
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
183 {
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
186 }
187 return x;
188 }
189 \f
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
193
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 {
197 widest_int quo;
198
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202
203 return NULL_TREE;
204 }
205 \f
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
214
215 static int fold_deferring_overflow_warnings;
216
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
221
222 static const char* fold_deferred_overflow_warning;
223
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
226
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
231
232 void
233 fold_defer_overflow_warnings (void)
234 {
235 ++fold_deferring_overflow_warnings;
236 }
237
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
246
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 {
250 const char *warnmsg;
251 location_t locus;
252
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
256 {
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
262 }
263
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
266
267 if (!issue || warnmsg == NULL)
268 return;
269
270 if (gimple_no_warning_p (stmt))
271 return;
272
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
277
278 if (!issue_strict_overflow_warning (code))
279 return;
280
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
286 }
287
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
290
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
293 {
294 fold_undefer_overflow_warnings (false, NULL, 0);
295 }
296
297 /* Whether we are deferring overflow warnings. */
298
299 bool
300 fold_deferring_overflow_warnings_p (void)
301 {
302 return fold_deferring_overflow_warnings > 0;
303 }
304
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
307
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 {
311 if (fold_deferring_overflow_warnings > 0)
312 {
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
315 {
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
318 }
319 }
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
322 }
323 \f
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
326
327 static bool
328 negate_mathfn_p (enum built_in_function code)
329 {
330 switch (code)
331 {
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
356
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
362
363 default:
364 break;
365 }
366 return false;
367 }
368
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
371
372 bool
373 may_negate_without_overflow_p (const_tree t)
374 {
375 tree type;
376
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
382
383 return !wi::only_sign_bit_p (t);
384 }
385
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
388
389 static bool
390 negate_expr_p (tree t)
391 {
392 tree type;
393
394 if (t == 0)
395 return false;
396
397 type = TREE_TYPE (t);
398
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
401 {
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
405
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
411
412 case FIXED_CST:
413 case NEGATE_EXPR:
414 return true;
415
416 case REAL_CST:
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420
421 case COMPLEX_CST:
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
424
425 case VECTOR_CST:
426 {
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 return true;
429
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
431
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
434 return false;
435
436 return true;
437 }
438
439 case COMPLEX_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
442
443 case CONJ_EXPR:
444 return negate_expr_p (TREE_OPERAND (t, 0));
445
446 case PLUS_EXPR:
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
457
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
464
465 case MULT_EXPR:
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 break;
468
469 /* Fall through. */
470
471 case RDIV_EXPR:
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
475 break;
476
477 case TRUNC_DIV_EXPR:
478 case ROUND_DIV_EXPR:
479 case EXACT_DIV_EXPR:
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
484 overflow. */
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 {
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
498 return true;
499 }
500 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return true;
502 return negate_expr_p (TREE_OPERAND (t, 1));
503
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
507 {
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
511 }
512 break;
513
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
519
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 {
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
526 return true;
527 }
528 break;
529
530 default:
531 break;
532 }
533 return false;
534 }
535
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
540
541 static tree
542 fold_negate_expr (location_t loc, tree t)
543 {
544 tree type = TREE_TYPE (t);
545 tree tem;
546
547 switch (TREE_CODE (t))
548 {
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
555
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || !TYPE_OVERFLOW_TRAPS (type))
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 /* Two's complement FP formats, such as c4x, may overflow. */
566 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 return tem;
568 break;
569
570 case FIXED_CST:
571 tem = fold_negate_const (t, type);
572 return tem;
573
574 case COMPLEX_CST:
575 {
576 tree rpart = negate_expr (TREE_REALPART (t));
577 tree ipart = negate_expr (TREE_IMAGPART (t));
578
579 if ((TREE_CODE (rpart) == REAL_CST
580 && TREE_CODE (ipart) == REAL_CST)
581 || (TREE_CODE (rpart) == INTEGER_CST
582 && TREE_CODE (ipart) == INTEGER_CST))
583 return build_complex (type, rpart, ipart);
584 }
585 break;
586
587 case VECTOR_CST:
588 {
589 int count = TYPE_VECTOR_SUBPARTS (type), i;
590 tree *elts = XALLOCAVEC (tree, count);
591
592 for (i = 0; i < count; i++)
593 {
594 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
595 if (elts[i] == NULL_TREE)
596 return NULL_TREE;
597 }
598
599 return build_vector (type, elts);
600 }
601
602 case COMPLEX_EXPR:
603 if (negate_expr_p (t))
604 return fold_build2_loc (loc, COMPLEX_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
606 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
607 break;
608
609 case CONJ_EXPR:
610 if (negate_expr_p (t))
611 return fold_build1_loc (loc, CONJ_EXPR, type,
612 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
613 break;
614
615 case NEGATE_EXPR:
616 return TREE_OPERAND (t, 0);
617
618 case PLUS_EXPR:
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 {
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t, 1))
624 && reorder_operands_p (TREE_OPERAND (t, 0),
625 TREE_OPERAND (t, 1)))
626 {
627 tem = negate_expr (TREE_OPERAND (t, 1));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 0));
630 }
631
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 {
635 tem = negate_expr (TREE_OPERAND (t, 0));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 1));
638 }
639 }
640 break;
641
642 case MINUS_EXPR:
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
646 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
649 break;
650
651 case MULT_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
654
655 /* Fall through. */
656
657 case RDIV_EXPR:
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 {
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
664 tem = TREE_OPERAND (t, 0);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (tem), TREE_OPERAND (t, 1));
668 }
669 break;
670
671 case TRUNC_DIV_EXPR:
672 case ROUND_DIV_EXPR:
673 case EXACT_DIV_EXPR:
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
678 overflow. */
679 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 {
681 const char * const warnmsg = G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem = TREE_OPERAND (t, 1);
684 if (negate_expr_p (tem))
685 {
686 if (INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) != INTEGER_CST
688 || integer_onep (tem)))
689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 TREE_OPERAND (t, 0), negate_expr (tem));
692 }
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem = TREE_OPERAND (t, 0);
700 if ((INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) == NEGATE_EXPR
702 || (TREE_CODE (tem) == INTEGER_CST
703 && may_negate_without_overflow_p (tem))))
704 || !INTEGRAL_TYPE_P (type))
705 return fold_build2_loc (loc, TREE_CODE (t), type,
706 negate_expr (tem), TREE_OPERAND (t, 1));
707 }
708 break;
709
710 case NOP_EXPR:
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type) == REAL_TYPE)
713 {
714 tem = strip_float_extensions (t);
715 if (tem != t && negate_expr_p (tem))
716 return fold_convert_loc (loc, type, negate_expr (tem));
717 }
718 break;
719
720 case CALL_EXPR:
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t))
723 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 {
725 tree fndecl, arg;
726
727 fndecl = get_callee_fndecl (t);
728 arg = negate_expr (CALL_EXPR_ARG (t, 0));
729 return build_call_expr_loc (loc, fndecl, 1, arg);
730 }
731 break;
732
733 case RSHIFT_EXPR:
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 {
737 tree op1 = TREE_OPERAND (t, 1);
738 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 {
740 tree ntype = TYPE_UNSIGNED (type)
741 ? signed_type_for (type)
742 : unsigned_type_for (type);
743 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
744 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
745 return fold_convert_loc (loc, type, temp);
746 }
747 }
748 break;
749
750 default:
751 break;
752 }
753
754 return NULL_TREE;
755 }
756
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
759 return NULL_TREE. */
760
761 static tree
762 negate_expr (tree t)
763 {
764 tree type, tem;
765 location_t loc;
766
767 if (t == NULL_TREE)
768 return NULL_TREE;
769
770 loc = EXPR_LOCATION (t);
771 type = TREE_TYPE (t);
772 STRIP_SIGN_NOPS (t);
773
774 tem = fold_negate_expr (loc, t);
775 if (!tem)
776 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
777 return fold_convert_loc (loc, type, tem);
778 }
779 \f
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
787
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
791
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
794
795 If IN is itself a literal or constant, return it as appropriate.
796
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
799
800 static tree
801 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
802 tree *minus_litp, int negate_p)
803 {
804 tree var = 0;
805
806 *conp = 0;
807 *litp = 0;
808 *minus_litp = 0;
809
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in);
812
813 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
814 || TREE_CODE (in) == FIXED_CST)
815 *litp = in;
816 else if (TREE_CODE (in) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 {
826 tree op0 = TREE_OPERAND (in, 0);
827 tree op1 = TREE_OPERAND (in, 1);
828 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
829 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
833 || TREE_CODE (op0) == FIXED_CST)
834 *litp = op0, op0 = 0;
835 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
836 || TREE_CODE (op1) == FIXED_CST)
837 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838
839 if (op0 != 0 && TREE_CONSTANT (op0))
840 *conp = op0, op0 = 0;
841 else if (op1 != 0 && TREE_CONSTANT (op1))
842 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0 != 0 && op1 != 0)
847 var = in;
848 else if (op0 != 0)
849 var = op0;
850 else
851 var = op1, neg_var_p = neg1_p;
852
853 /* Now do any needed negations. */
854 if (neg_litp_p)
855 *minus_litp = *litp, *litp = 0;
856 if (neg_conp_p)
857 *conp = negate_expr (*conp);
858 if (neg_var_p)
859 var = negate_expr (var);
860 }
861 else if (TREE_CODE (in) == BIT_NOT_EXPR
862 && code == PLUS_EXPR)
863 {
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp = build_one_cst (TREE_TYPE (in));
866 var = negate_expr (TREE_OPERAND (in, 0));
867 }
868 else if (TREE_CONSTANT (in))
869 *conp = in;
870 else
871 var = in;
872
873 if (negate_p)
874 {
875 if (*litp)
876 *minus_litp = *litp, *litp = 0;
877 else if (*minus_litp)
878 *litp = *minus_litp, *minus_litp = 0;
879 *conp = negate_expr (*conp);
880 var = negate_expr (var);
881 }
882
883 return var;
884 }
885
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
890
891 static tree
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 {
894 if (t1 == 0)
895 return t2;
896 else if (t2 == 0)
897 return t1;
898
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 {
905 if (code == PLUS_EXPR)
906 {
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
919 }
920 else if (code == MINUS_EXPR)
921 {
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
924 }
925
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
928 }
929
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
932 }
933 \f
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
936
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 {
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
944
945 switch (code)
946 {
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
952
953 default:
954 break;
955 }
956
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
960 }
961
962
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
966
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
970 {
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
976
977 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
978 TYPE_SIGN (TREE_TYPE (parg2)));
979
980 switch (code)
981 {
982 case BIT_IOR_EXPR:
983 res = wi::bit_or (arg1, arg2);
984 break;
985
986 case BIT_XOR_EXPR:
987 res = wi::bit_xor (arg1, arg2);
988 break;
989
990 case BIT_AND_EXPR:
991 res = wi::bit_and (arg1, arg2);
992 break;
993
994 case RSHIFT_EXPR:
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
997 {
998 arg2 = -arg2;
999 if (code == RSHIFT_EXPR)
1000 code = LSHIFT_EXPR;
1001 else
1002 code = RSHIFT_EXPR;
1003 }
1004
1005 if (code == RSHIFT_EXPR)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = wi::rshift (arg1, arg2, sign);
1010 else
1011 res = wi::lshift (arg1, arg2);
1012 break;
1013
1014 case RROTATE_EXPR:
1015 case LROTATE_EXPR:
1016 if (wi::neg_p (arg2))
1017 {
1018 arg2 = -arg2;
1019 if (code == RROTATE_EXPR)
1020 code = LROTATE_EXPR;
1021 else
1022 code = RROTATE_EXPR;
1023 }
1024
1025 if (code == RROTATE_EXPR)
1026 res = wi::rrotate (arg1, arg2);
1027 else
1028 res = wi::lrotate (arg1, arg2);
1029 break;
1030
1031 case PLUS_EXPR:
1032 res = wi::add (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case MINUS_EXPR:
1036 res = wi::sub (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case MULT_EXPR:
1040 res = wi::mul (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case MULT_HIGHPART_EXPR:
1044 res = wi::mul_high (arg1, arg2, sign);
1045 break;
1046
1047 case TRUNC_DIV_EXPR:
1048 case EXACT_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1052 break;
1053
1054 case FLOOR_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_floor (arg1, arg2, sign, &overflow);
1058 break;
1059
1060 case CEIL_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1064 break;
1065
1066 case ROUND_DIV_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::div_round (arg1, arg2, sign, &overflow);
1070 break;
1071
1072 case TRUNC_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1076 break;
1077
1078 case FLOOR_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1082 break;
1083
1084 case CEIL_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1088 break;
1089
1090 case ROUND_MOD_EXPR:
1091 if (arg2 == 0)
1092 return NULL_TREE;
1093 res = wi::mod_round (arg1, arg2, sign, &overflow);
1094 break;
1095
1096 case MIN_EXPR:
1097 res = wi::min (arg1, arg2, sign);
1098 break;
1099
1100 case MAX_EXPR:
1101 res = wi::max (arg1, arg2, sign);
1102 break;
1103
1104 default:
1105 return NULL_TREE;
1106 }
1107
1108 t = force_fit_type (type, res, overflowable,
1109 (((sign == SIGNED || overflowable == -1)
1110 && overflow)
1111 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112
1113 return t;
1114 }
1115
1116 tree
1117 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 {
1119 return int_const_binop_1 (code, arg1, arg2, 1);
1120 }
1121
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1126
1127 static tree
1128 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 {
1130 /* Sanity check for the recursive cases. */
1131 if (!arg1 || !arg2)
1132 return NULL_TREE;
1133
1134 STRIP_NOPS (arg1);
1135 STRIP_NOPS (arg2);
1136
1137 if (TREE_CODE (arg1) == INTEGER_CST)
1138 return int_const_binop (code, arg1, arg2);
1139
1140 if (TREE_CODE (arg1) == REAL_CST)
1141 {
1142 machine_mode mode;
1143 REAL_VALUE_TYPE d1;
1144 REAL_VALUE_TYPE d2;
1145 REAL_VALUE_TYPE value;
1146 REAL_VALUE_TYPE result;
1147 bool inexact;
1148 tree t, type;
1149
1150 /* The following codes are handled by real_arithmetic. */
1151 switch (code)
1152 {
1153 case PLUS_EXPR:
1154 case MINUS_EXPR:
1155 case MULT_EXPR:
1156 case RDIV_EXPR:
1157 case MIN_EXPR:
1158 case MAX_EXPR:
1159 break;
1160
1161 default:
1162 return NULL_TREE;
1163 }
1164
1165 d1 = TREE_REAL_CST (arg1);
1166 d2 = TREE_REAL_CST (arg2);
1167
1168 type = TREE_TYPE (arg1);
1169 mode = TYPE_MODE (type);
1170
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode)
1174 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1175 return NULL_TREE;
1176
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code == RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2, dconst0)
1181 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1182 return NULL_TREE;
1183
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1187 return arg1;
1188 else if (REAL_VALUE_ISNAN (d2))
1189 return arg2;
1190
1191 inexact = real_arithmetic (&value, code, &d1, &d2);
1192 real_convert (&result, mode, &value);
1193
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode)
1198 && REAL_VALUE_ISINF (result)
1199 && !REAL_VALUE_ISINF (d1)
1200 && !REAL_VALUE_ISINF (d2))
1201 return NULL_TREE;
1202
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1209 && (inexact || !real_identical (&result, &value)))
1210 return NULL_TREE;
1211
1212 t = build_real (type, result);
1213
1214 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1215 return t;
1216 }
1217
1218 if (TREE_CODE (arg1) == FIXED_CST)
1219 {
1220 FIXED_VALUE_TYPE f1;
1221 FIXED_VALUE_TYPE f2;
1222 FIXED_VALUE_TYPE result;
1223 tree t, type;
1224 int sat_p;
1225 bool overflow_p;
1226
1227 /* The following codes are handled by fixed_arithmetic. */
1228 switch (code)
1229 {
1230 case PLUS_EXPR:
1231 case MINUS_EXPR:
1232 case MULT_EXPR:
1233 case TRUNC_DIV_EXPR:
1234 f2 = TREE_FIXED_CST (arg2);
1235 break;
1236
1237 case LSHIFT_EXPR:
1238 case RSHIFT_EXPR:
1239 {
1240 wide_int w2 = arg2;
1241 f2.data.high = w2.elt (1);
1242 f2.data.low = w2.elt (0);
1243 f2.mode = SImode;
1244 }
1245 break;
1246
1247 default:
1248 return NULL_TREE;
1249 }
1250
1251 f1 = TREE_FIXED_CST (arg1);
1252 type = TREE_TYPE (arg1);
1253 sat_p = TYPE_SATURATING (type);
1254 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1255 t = build_fixed (type, result);
1256 /* Propagate overflow flags. */
1257 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1258 TREE_OVERFLOW (t) = 1;
1259 return t;
1260 }
1261
1262 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 {
1264 tree type = TREE_TYPE (arg1);
1265 tree r1 = TREE_REALPART (arg1);
1266 tree i1 = TREE_IMAGPART (arg1);
1267 tree r2 = TREE_REALPART (arg2);
1268 tree i2 = TREE_IMAGPART (arg2);
1269 tree real, imag;
1270
1271 switch (code)
1272 {
1273 case PLUS_EXPR:
1274 case MINUS_EXPR:
1275 real = const_binop (code, r1, r2);
1276 imag = const_binop (code, i1, i2);
1277 break;
1278
1279 case MULT_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_mul);
1284
1285 real = const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, r1, r2),
1287 const_binop (MULT_EXPR, i1, i2));
1288 imag = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, i2),
1290 const_binop (MULT_EXPR, i1, r2));
1291 break;
1292
1293 case RDIV_EXPR:
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1297 mpc_div);
1298 /* Fallthru ... */
1299 case TRUNC_DIV_EXPR:
1300 case CEIL_DIV_EXPR:
1301 case FLOOR_DIV_EXPR:
1302 case ROUND_DIV_EXPR:
1303 if (flag_complex_method == 0)
1304 {
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1307
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1310 t = br*br + bi*bi
1311 */
1312 tree magsquared
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r2, r2),
1315 const_binop (MULT_EXPR, i2, i2));
1316 tree t1
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, r2),
1319 const_binop (MULT_EXPR, i1, i2));
1320 tree t2
1321 = const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2),
1323 const_binop (MULT_EXPR, r1, i2));
1324
1325 real = const_binop (code, t1, magsquared);
1326 imag = const_binop (code, t2, magsquared);
1327 }
1328 else
1329 {
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1332
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1336 fold_abs_const (r2, TREE_TYPE (type)),
1337 fold_abs_const (i2, TREE_TYPE (type)));
1338
1339 if (integer_nonzerop (compare))
1340 {
1341 /* In the TRUE branch, we compute
1342 ratio = br/bi;
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1346 tr = tr / div;
1347 ti = ti / div; */
1348 tree ratio = const_binop (code, r2, i2);
1349 tree div = const_binop (PLUS_EXPR, i2,
1350 const_binop (MULT_EXPR, r2, ratio));
1351 real = const_binop (MULT_EXPR, r1, ratio);
1352 real = const_binop (PLUS_EXPR, real, i1);
1353 real = const_binop (code, real, div);
1354
1355 imag = const_binop (MULT_EXPR, i1, ratio);
1356 imag = const_binop (MINUS_EXPR, imag, r1);
1357 imag = const_binop (code, imag, div);
1358 }
1359 else
1360 {
1361 /* In the FALSE branch, we compute
1362 ratio = d/c;
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1366 tr = tr / div;
1367 ti = ti / div; */
1368 tree ratio = const_binop (code, i2, r2);
1369 tree div = const_binop (PLUS_EXPR, r2,
1370 const_binop (MULT_EXPR, i2, ratio));
1371
1372 real = const_binop (MULT_EXPR, i1, ratio);
1373 real = const_binop (PLUS_EXPR, real, r1);
1374 real = const_binop (code, real, div);
1375
1376 imag = const_binop (MULT_EXPR, r1, ratio);
1377 imag = const_binop (MINUS_EXPR, i1, imag);
1378 imag = const_binop (code, imag, div);
1379 }
1380 }
1381 break;
1382
1383 default:
1384 return NULL_TREE;
1385 }
1386
1387 if (real && imag)
1388 return build_complex (type, real, imag);
1389 }
1390
1391 if (TREE_CODE (arg1) == VECTOR_CST
1392 && TREE_CODE (arg2) == VECTOR_CST)
1393 {
1394 tree type = TREE_TYPE (arg1);
1395 int count = TYPE_VECTOR_SUBPARTS (type), i;
1396 tree *elts = XALLOCAVEC (tree, count);
1397
1398 for (i = 0; i < count; i++)
1399 {
1400 tree elem1 = VECTOR_CST_ELT (arg1, i);
1401 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402
1403 elts[i] = const_binop (code, elem1, elem2);
1404
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts[i] == NULL_TREE)
1408 return NULL_TREE;
1409 }
1410
1411 return build_vector (type, elts);
1412 }
1413
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == INTEGER_CST)
1417 {
1418 tree type = TREE_TYPE (arg1);
1419 int count = TYPE_VECTOR_SUBPARTS (type), i;
1420 tree *elts = XALLOCAVEC (tree, count);
1421
1422 if (code == VEC_RSHIFT_EXPR)
1423 {
1424 if (!tree_fits_uhwi_p (arg2))
1425 return NULL_TREE;
1426
1427 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1428 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1429 unsigned HOST_WIDE_INT innerc
1430 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1431 if (shiftc >= outerc || (shiftc % innerc) != 0)
1432 return NULL_TREE;
1433 int offset = shiftc / innerc;
1434 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1435 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1436 vector element, but last element if BYTES_BIG_ENDIAN. */
1437 if (BYTES_BIG_ENDIAN)
1438 offset = -offset;
1439 tree zero = build_zero_cst (TREE_TYPE (type));
1440 for (i = 0; i < count; i++)
1441 {
1442 if (i + offset < 0 || i + offset >= count)
1443 elts[i] = zero;
1444 else
1445 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1446 }
1447 }
1448 else
1449 for (i = 0; i < count; i++)
1450 {
1451 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452
1453 elts[i] = const_binop (code, elem1, arg2);
1454
1455 /* It is possible that const_binop cannot handle the given
1456 code and return NULL_TREE */
1457 if (elts[i] == NULL_TREE)
1458 return NULL_TREE;
1459 }
1460
1461 return build_vector (type, elts);
1462 }
1463 return NULL_TREE;
1464 }
1465
1466 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1467 indicates which particular sizetype to create. */
1468
1469 tree
1470 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1471 {
1472 return build_int_cst (sizetype_tab[(int) kind], number);
1473 }
1474 \f
1475 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1476 is a tree code. The type of the result is taken from the operands.
1477 Both must be equivalent integer types, ala int_binop_types_match_p.
1478 If the operands are constant, so is the result. */
1479
1480 tree
1481 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1482 {
1483 tree type = TREE_TYPE (arg0);
1484
1485 if (arg0 == error_mark_node || arg1 == error_mark_node)
1486 return error_mark_node;
1487
1488 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1489 TREE_TYPE (arg1)));
1490
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 {
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR)
1496 {
1497 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1498 return arg1;
1499 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1500 return arg0;
1501 }
1502 else if (code == MINUS_EXPR)
1503 {
1504 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1505 return arg0;
1506 }
1507 else if (code == MULT_EXPR)
1508 {
1509 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1510 return arg1;
1511 }
1512
1513 /* Handle general case of two integer constants. For sizetype
1514 constant calculations we always want to know about overflow,
1515 even in the unsigned case. */
1516 return int_const_binop_1 (code, arg0, arg1, -1);
1517 }
1518
1519 return fold_build2_loc (loc, code, type, arg0, arg1);
1520 }
1521
1522 /* Given two values, either both of sizetype or both of bitsizetype,
1523 compute the difference between the two values. Return the value
1524 in signed type corresponding to the type of the operands. */
1525
1526 tree
1527 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1528 {
1529 tree type = TREE_TYPE (arg0);
1530 tree ctype;
1531
1532 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1533 TREE_TYPE (arg1)));
1534
1535 /* If the type is already signed, just do the simple thing. */
1536 if (!TYPE_UNSIGNED (type))
1537 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1538
1539 if (type == sizetype)
1540 ctype = ssizetype;
1541 else if (type == bitsizetype)
1542 ctype = sbitsizetype;
1543 else
1544 ctype = signed_type_for (type);
1545
1546 /* If either operand is not a constant, do the conversions to the signed
1547 type and subtract. The hardware will do the right thing with any
1548 overflow in the subtraction. */
1549 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1550 return size_binop_loc (loc, MINUS_EXPR,
1551 fold_convert_loc (loc, ctype, arg0),
1552 fold_convert_loc (loc, ctype, arg1));
1553
1554 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1555 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1556 overflow) and negate (which can't either). Special-case a result
1557 of zero while we're here. */
1558 if (tree_int_cst_equal (arg0, arg1))
1559 return build_int_cst (ctype, 0);
1560 else if (tree_int_cst_lt (arg1, arg0))
1561 return fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1563 else
1564 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1565 fold_convert_loc (loc, ctype,
1566 size_binop_loc (loc,
1567 MINUS_EXPR,
1568 arg1, arg0)));
1569 }
1570 \f
1571 /* A subroutine of fold_convert_const handling conversions of an
1572 INTEGER_CST to another integer type. */
1573
1574 static tree
1575 fold_convert_const_int_from_int (tree type, const_tree arg1)
1576 {
1577 /* Given an integer constant, make new constant with new type,
1578 appropriately sign-extended or truncated. Use widest_int
1579 so that any extension is done according ARG1's type. */
1580 return force_fit_type (type, wi::to_widest (arg1),
1581 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1582 TREE_OVERFLOW (arg1));
1583 }
1584
1585 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1586 to an integer type. */
1587
1588 static tree
1589 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1590 {
1591 bool overflow = false;
1592 tree t;
1593
1594 /* The following code implements the floating point to integer
1595 conversion rules required by the Java Language Specification,
1596 that IEEE NaNs are mapped to zero and values that overflow
1597 the target precision saturate, i.e. values greater than
1598 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1599 are mapped to INT_MIN. These semantics are allowed by the
1600 C and C++ standards that simply state that the behavior of
1601 FP-to-integer conversion is unspecified upon overflow. */
1602
1603 wide_int val;
1604 REAL_VALUE_TYPE r;
1605 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1606
1607 switch (code)
1608 {
1609 case FIX_TRUNC_EXPR:
1610 real_trunc (&r, VOIDmode, &x);
1611 break;
1612
1613 default:
1614 gcc_unreachable ();
1615 }
1616
1617 /* If R is NaN, return zero and show we have an overflow. */
1618 if (REAL_VALUE_ISNAN (r))
1619 {
1620 overflow = true;
1621 val = wi::zero (TYPE_PRECISION (type));
1622 }
1623
1624 /* See if R is less than the lower bound or greater than the
1625 upper bound. */
1626
1627 if (! overflow)
1628 {
1629 tree lt = TYPE_MIN_VALUE (type);
1630 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1631 if (REAL_VALUES_LESS (r, l))
1632 {
1633 overflow = true;
1634 val = lt;
1635 }
1636 }
1637
1638 if (! overflow)
1639 {
1640 tree ut = TYPE_MAX_VALUE (type);
1641 if (ut)
1642 {
1643 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1644 if (REAL_VALUES_LESS (u, r))
1645 {
1646 overflow = true;
1647 val = ut;
1648 }
1649 }
1650 }
1651
1652 if (! overflow)
1653 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1654
1655 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1656 return t;
1657 }
1658
1659 /* A subroutine of fold_convert_const handling conversions of a
1660 FIXED_CST to an integer type. */
1661
1662 static tree
1663 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1664 {
1665 tree t;
1666 double_int temp, temp_trunc;
1667 unsigned int mode;
1668
1669 /* Right shift FIXED_CST to temp by fbit. */
1670 temp = TREE_FIXED_CST (arg1).data;
1671 mode = TREE_FIXED_CST (arg1).mode;
1672 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1673 {
1674 temp = temp.rshift (GET_MODE_FBIT (mode),
1675 HOST_BITS_PER_DOUBLE_INT,
1676 SIGNED_FIXED_POINT_MODE_P (mode));
1677
1678 /* Left shift temp to temp_trunc by fbit. */
1679 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1680 HOST_BITS_PER_DOUBLE_INT,
1681 SIGNED_FIXED_POINT_MODE_P (mode));
1682 }
1683 else
1684 {
1685 temp = double_int_zero;
1686 temp_trunc = double_int_zero;
1687 }
1688
1689 /* If FIXED_CST is negative, we need to round the value toward 0.
1690 By checking if the fractional bits are not zero to add 1 to temp. */
1691 if (SIGNED_FIXED_POINT_MODE_P (mode)
1692 && temp_trunc.is_negative ()
1693 && TREE_FIXED_CST (arg1).data != temp_trunc)
1694 temp += double_int_one;
1695
1696 /* Given a fixed-point constant, make new constant with new type,
1697 appropriately sign-extended or truncated. */
1698 t = force_fit_type (type, temp, -1,
1699 (temp.is_negative ()
1700 && (TYPE_UNSIGNED (type)
1701 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1702 | TREE_OVERFLOW (arg1));
1703
1704 return t;
1705 }
1706
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to another floating point type. */
1709
1710 static tree
1711 fold_convert_const_real_from_real (tree type, const_tree arg1)
1712 {
1713 REAL_VALUE_TYPE value;
1714 tree t;
1715
1716 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1717 t = build_real (type, value);
1718
1719 /* If converting an infinity or NAN to a representation that doesn't
1720 have one, set the overflow bit so that we can produce some kind of
1721 error message at the appropriate point if necessary. It's not the
1722 most user-friendly message, but it's better than nothing. */
1723 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1724 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1725 TREE_OVERFLOW (t) = 1;
1726 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1727 && !MODE_HAS_NANS (TYPE_MODE (type)))
1728 TREE_OVERFLOW (t) = 1;
1729 /* Regular overflow, conversion produced an infinity in a mode that
1730 can't represent them. */
1731 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1732 && REAL_VALUE_ISINF (value)
1733 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1734 TREE_OVERFLOW (t) = 1;
1735 else
1736 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1737 return t;
1738 }
1739
1740 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1741 to a floating point type. */
1742
1743 static tree
1744 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1745 {
1746 REAL_VALUE_TYPE value;
1747 tree t;
1748
1749 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1750 t = build_real (type, value);
1751
1752 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1753 return t;
1754 }
1755
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to another fixed-point type. */
1758
1759 static tree
1760 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1761 {
1762 FIXED_VALUE_TYPE value;
1763 tree t;
1764 bool overflow_p;
1765
1766 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1769
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1774 }
1775
1776 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1777 to a fixed-point type. */
1778
1779 static tree
1780 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1781 {
1782 FIXED_VALUE_TYPE value;
1783 tree t;
1784 bool overflow_p;
1785 double_int di;
1786
1787 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1788
1789 di.low = TREE_INT_CST_ELT (arg1, 0);
1790 if (TREE_INT_CST_NUNITS (arg1) == 1)
1791 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1792 else
1793 di.high = TREE_INT_CST_ELT (arg1, 1);
1794
1795 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1796 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1797 TYPE_SATURATING (type));
1798 t = build_fixed (type, value);
1799
1800 /* Propagate overflow flags. */
1801 if (overflow_p | TREE_OVERFLOW (arg1))
1802 TREE_OVERFLOW (t) = 1;
1803 return t;
1804 }
1805
1806 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1807 to a fixed-point type. */
1808
1809 static tree
1810 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1811 {
1812 FIXED_VALUE_TYPE value;
1813 tree t;
1814 bool overflow_p;
1815
1816 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1817 &TREE_REAL_CST (arg1),
1818 TYPE_SATURATING (type));
1819 t = build_fixed (type, value);
1820
1821 /* Propagate overflow flags. */
1822 if (overflow_p | TREE_OVERFLOW (arg1))
1823 TREE_OVERFLOW (t) = 1;
1824 return t;
1825 }
1826
1827 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1828 type TYPE. If no simplification can be done return NULL_TREE. */
1829
1830 static tree
1831 fold_convert_const (enum tree_code code, tree type, tree arg1)
1832 {
1833 if (TREE_TYPE (arg1) == type)
1834 return arg1;
1835
1836 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1837 || TREE_CODE (type) == OFFSET_TYPE)
1838 {
1839 if (TREE_CODE (arg1) == INTEGER_CST)
1840 return fold_convert_const_int_from_int (type, arg1);
1841 else if (TREE_CODE (arg1) == REAL_CST)
1842 return fold_convert_const_int_from_real (code, type, arg1);
1843 else if (TREE_CODE (arg1) == FIXED_CST)
1844 return fold_convert_const_int_from_fixed (type, arg1);
1845 }
1846 else if (TREE_CODE (type) == REAL_TYPE)
1847 {
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_real_from_real (type, arg1);
1852 else if (TREE_CODE (arg1) == FIXED_CST)
1853 return fold_convert_const_real_from_fixed (type, arg1);
1854 }
1855 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1856 {
1857 if (TREE_CODE (arg1) == FIXED_CST)
1858 return fold_convert_const_fixed_from_fixed (type, arg1);
1859 else if (TREE_CODE (arg1) == INTEGER_CST)
1860 return fold_convert_const_fixed_from_int (type, arg1);
1861 else if (TREE_CODE (arg1) == REAL_CST)
1862 return fold_convert_const_fixed_from_real (type, arg1);
1863 }
1864 return NULL_TREE;
1865 }
1866
1867 /* Construct a vector of zero elements of vector type TYPE. */
1868
1869 static tree
1870 build_zero_vector (tree type)
1871 {
1872 tree t;
1873
1874 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1875 return build_vector_from_val (type, t);
1876 }
1877
1878 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1879
1880 bool
1881 fold_convertible_p (const_tree type, const_tree arg)
1882 {
1883 tree orig = TREE_TYPE (arg);
1884
1885 if (type == orig)
1886 return true;
1887
1888 if (TREE_CODE (arg) == ERROR_MARK
1889 || TREE_CODE (type) == ERROR_MARK
1890 || TREE_CODE (orig) == ERROR_MARK)
1891 return false;
1892
1893 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1894 return true;
1895
1896 switch (TREE_CODE (type))
1897 {
1898 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1899 case POINTER_TYPE: case REFERENCE_TYPE:
1900 case OFFSET_TYPE:
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1902 || TREE_CODE (orig) == OFFSET_TYPE)
1903 return true;
1904 return (TREE_CODE (orig) == VECTOR_TYPE
1905 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1906
1907 case REAL_TYPE:
1908 case FIXED_POINT_TYPE:
1909 case COMPLEX_TYPE:
1910 case VECTOR_TYPE:
1911 case VOID_TYPE:
1912 return TREE_CODE (type) == TREE_CODE (orig);
1913
1914 default:
1915 return false;
1916 }
1917 }
1918
1919 /* Convert expression ARG to type TYPE. Used by the middle-end for
1920 simple conversions in preference to calling the front-end's convert. */
1921
1922 tree
1923 fold_convert_loc (location_t loc, tree type, tree arg)
1924 {
1925 tree orig = TREE_TYPE (arg);
1926 tree tem;
1927
1928 if (type == orig)
1929 return arg;
1930
1931 if (TREE_CODE (arg) == ERROR_MARK
1932 || TREE_CODE (type) == ERROR_MARK
1933 || TREE_CODE (orig) == ERROR_MARK)
1934 return error_mark_node;
1935
1936 switch (TREE_CODE (type))
1937 {
1938 case POINTER_TYPE:
1939 case REFERENCE_TYPE:
1940 /* Handle conversions between pointers to different address spaces. */
1941 if (POINTER_TYPE_P (orig)
1942 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1943 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1944 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1945 /* fall through */
1946
1947 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1948 case OFFSET_TYPE:
1949 if (TREE_CODE (arg) == INTEGER_CST)
1950 {
1951 tem = fold_convert_const (NOP_EXPR, type, arg);
1952 if (tem != NULL_TREE)
1953 return tem;
1954 }
1955 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1956 || TREE_CODE (orig) == OFFSET_TYPE)
1957 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 return fold_convert_loc (loc, type,
1960 fold_build1_loc (loc, REALPART_EXPR,
1961 TREE_TYPE (orig), arg));
1962 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965
1966 case REAL_TYPE:
1967 if (TREE_CODE (arg) == INTEGER_CST)
1968 {
1969 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1972 }
1973 else if (TREE_CODE (arg) == REAL_CST)
1974 {
1975 tem = fold_convert_const (NOP_EXPR, type, arg);
1976 if (tem != NULL_TREE)
1977 return tem;
1978 }
1979 else if (TREE_CODE (arg) == FIXED_CST)
1980 {
1981 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1982 if (tem != NULL_TREE)
1983 return tem;
1984 }
1985
1986 switch (TREE_CODE (orig))
1987 {
1988 case INTEGER_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1992
1993 case REAL_TYPE:
1994 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1995
1996 case FIXED_POINT_TYPE:
1997 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1998
1999 case COMPLEX_TYPE:
2000 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert_loc (loc, type, tem);
2002
2003 default:
2004 gcc_unreachable ();
2005 }
2006
2007 case FIXED_POINT_TYPE:
2008 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2009 || TREE_CODE (arg) == REAL_CST)
2010 {
2011 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2012 if (tem != NULL_TREE)
2013 goto fold_convert_exit;
2014 }
2015
2016 switch (TREE_CODE (orig))
2017 {
2018 case FIXED_POINT_TYPE:
2019 case INTEGER_TYPE:
2020 case ENUMERAL_TYPE:
2021 case BOOLEAN_TYPE:
2022 case REAL_TYPE:
2023 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2024
2025 case COMPLEX_TYPE:
2026 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert_loc (loc, type, tem);
2028
2029 default:
2030 gcc_unreachable ();
2031 }
2032
2033 case COMPLEX_TYPE:
2034 switch (TREE_CODE (orig))
2035 {
2036 case INTEGER_TYPE:
2037 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2038 case POINTER_TYPE: case REFERENCE_TYPE:
2039 case REAL_TYPE:
2040 case FIXED_POINT_TYPE:
2041 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2042 fold_convert_loc (loc, TREE_TYPE (type), arg),
2043 fold_convert_loc (loc, TREE_TYPE (type),
2044 integer_zero_node));
2045 case COMPLEX_TYPE:
2046 {
2047 tree rpart, ipart;
2048
2049 if (TREE_CODE (arg) == COMPLEX_EXPR)
2050 {
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2052 TREE_OPERAND (arg, 0));
2053 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2054 TREE_OPERAND (arg, 1));
2055 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2056 }
2057
2058 arg = save_expr (arg);
2059 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2060 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2061 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2062 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2064 }
2065
2066 default:
2067 gcc_unreachable ();
2068 }
2069
2070 case VECTOR_TYPE:
2071 if (integer_zerop (arg))
2072 return build_zero_vector (type);
2073 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2074 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2075 || TREE_CODE (orig) == VECTOR_TYPE);
2076 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2077
2078 case VOID_TYPE:
2079 tem = fold_ignored_result (arg);
2080 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2081
2082 default:
2083 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2084 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2085 gcc_unreachable ();
2086 }
2087 fold_convert_exit:
2088 protected_set_expr_location_unshare (tem, loc);
2089 return tem;
2090 }
2091 \f
2092 /* Return false if expr can be assumed not to be an lvalue, true
2093 otherwise. */
2094
2095 static bool
2096 maybe_lvalue_p (const_tree x)
2097 {
2098 /* We only need to wrap lvalue tree codes. */
2099 switch (TREE_CODE (x))
2100 {
2101 case VAR_DECL:
2102 case PARM_DECL:
2103 case RESULT_DECL:
2104 case LABEL_DECL:
2105 case FUNCTION_DECL:
2106 case SSA_NAME:
2107
2108 case COMPONENT_REF:
2109 case MEM_REF:
2110 case INDIRECT_REF:
2111 case ARRAY_REF:
2112 case ARRAY_RANGE_REF:
2113 case BIT_FIELD_REF:
2114 case OBJ_TYPE_REF:
2115
2116 case REALPART_EXPR:
2117 case IMAGPART_EXPR:
2118 case PREINCREMENT_EXPR:
2119 case PREDECREMENT_EXPR:
2120 case SAVE_EXPR:
2121 case TRY_CATCH_EXPR:
2122 case WITH_CLEANUP_EXPR:
2123 case COMPOUND_EXPR:
2124 case MODIFY_EXPR:
2125 case TARGET_EXPR:
2126 case COND_EXPR:
2127 case BIND_EXPR:
2128 break;
2129
2130 default:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2133 break;
2134 return false;
2135 }
2136
2137 return true;
2138 }
2139
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2141
2142 tree
2143 non_lvalue_loc (location_t loc, tree x)
2144 {
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2146 us. */
2147 if (in_gimple_form)
2148 return x;
2149
2150 if (! maybe_lvalue_p (x))
2151 return x;
2152 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2153 }
2154
2155 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2156 Zero means allow extended lvalues. */
2157
2158 int pedantic_lvalues;
2159
2160 /* When pedantic, return an expr equal to X but certainly not valid as a
2161 pedantic lvalue. Otherwise, return X. */
2162
2163 static tree
2164 pedantic_non_lvalue_loc (location_t loc, tree x)
2165 {
2166 if (pedantic_lvalues)
2167 return non_lvalue_loc (loc, x);
2168
2169 return protected_set_expr_location_unshare (x, loc);
2170 }
2171 \f
2172 /* Given a tree comparison code, return the code that is the logical inverse.
2173 It is generally not safe to do this for floating-point comparisons, except
2174 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2175 ERROR_MARK in this case. */
2176
2177 enum tree_code
2178 invert_tree_comparison (enum tree_code code, bool honor_nans)
2179 {
2180 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2181 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2182 return ERROR_MARK;
2183
2184 switch (code)
2185 {
2186 case EQ_EXPR:
2187 return NE_EXPR;
2188 case NE_EXPR:
2189 return EQ_EXPR;
2190 case GT_EXPR:
2191 return honor_nans ? UNLE_EXPR : LE_EXPR;
2192 case GE_EXPR:
2193 return honor_nans ? UNLT_EXPR : LT_EXPR;
2194 case LT_EXPR:
2195 return honor_nans ? UNGE_EXPR : GE_EXPR;
2196 case LE_EXPR:
2197 return honor_nans ? UNGT_EXPR : GT_EXPR;
2198 case LTGT_EXPR:
2199 return UNEQ_EXPR;
2200 case UNEQ_EXPR:
2201 return LTGT_EXPR;
2202 case UNGT_EXPR:
2203 return LE_EXPR;
2204 case UNGE_EXPR:
2205 return LT_EXPR;
2206 case UNLT_EXPR:
2207 return GE_EXPR;
2208 case UNLE_EXPR:
2209 return GT_EXPR;
2210 case ORDERED_EXPR:
2211 return UNORDERED_EXPR;
2212 case UNORDERED_EXPR:
2213 return ORDERED_EXPR;
2214 default:
2215 gcc_unreachable ();
2216 }
2217 }
2218
2219 /* Similar, but return the comparison that results if the operands are
2220 swapped. This is safe for floating-point. */
2221
2222 enum tree_code
2223 swap_tree_comparison (enum tree_code code)
2224 {
2225 switch (code)
2226 {
2227 case EQ_EXPR:
2228 case NE_EXPR:
2229 case ORDERED_EXPR:
2230 case UNORDERED_EXPR:
2231 case LTGT_EXPR:
2232 case UNEQ_EXPR:
2233 return code;
2234 case GT_EXPR:
2235 return LT_EXPR;
2236 case GE_EXPR:
2237 return LE_EXPR;
2238 case LT_EXPR:
2239 return GT_EXPR;
2240 case LE_EXPR:
2241 return GE_EXPR;
2242 case UNGT_EXPR:
2243 return UNLT_EXPR;
2244 case UNGE_EXPR:
2245 return UNLE_EXPR;
2246 case UNLT_EXPR:
2247 return UNGT_EXPR;
2248 case UNLE_EXPR:
2249 return UNGE_EXPR;
2250 default:
2251 gcc_unreachable ();
2252 }
2253 }
2254
2255
2256 /* Convert a comparison tree code from an enum tree_code representation
2257 into a compcode bit-based encoding. This function is the inverse of
2258 compcode_to_comparison. */
2259
2260 static enum comparison_code
2261 comparison_to_compcode (enum tree_code code)
2262 {
2263 switch (code)
2264 {
2265 case LT_EXPR:
2266 return COMPCODE_LT;
2267 case EQ_EXPR:
2268 return COMPCODE_EQ;
2269 case LE_EXPR:
2270 return COMPCODE_LE;
2271 case GT_EXPR:
2272 return COMPCODE_GT;
2273 case NE_EXPR:
2274 return COMPCODE_NE;
2275 case GE_EXPR:
2276 return COMPCODE_GE;
2277 case ORDERED_EXPR:
2278 return COMPCODE_ORD;
2279 case UNORDERED_EXPR:
2280 return COMPCODE_UNORD;
2281 case UNLT_EXPR:
2282 return COMPCODE_UNLT;
2283 case UNEQ_EXPR:
2284 return COMPCODE_UNEQ;
2285 case UNLE_EXPR:
2286 return COMPCODE_UNLE;
2287 case UNGT_EXPR:
2288 return COMPCODE_UNGT;
2289 case LTGT_EXPR:
2290 return COMPCODE_LTGT;
2291 case UNGE_EXPR:
2292 return COMPCODE_UNGE;
2293 default:
2294 gcc_unreachable ();
2295 }
2296 }
2297
2298 /* Convert a compcode bit-based encoding of a comparison operator back
2299 to GCC's enum tree_code representation. This function is the
2300 inverse of comparison_to_compcode. */
2301
2302 static enum tree_code
2303 compcode_to_comparison (enum comparison_code code)
2304 {
2305 switch (code)
2306 {
2307 case COMPCODE_LT:
2308 return LT_EXPR;
2309 case COMPCODE_EQ:
2310 return EQ_EXPR;
2311 case COMPCODE_LE:
2312 return LE_EXPR;
2313 case COMPCODE_GT:
2314 return GT_EXPR;
2315 case COMPCODE_NE:
2316 return NE_EXPR;
2317 case COMPCODE_GE:
2318 return GE_EXPR;
2319 case COMPCODE_ORD:
2320 return ORDERED_EXPR;
2321 case COMPCODE_UNORD:
2322 return UNORDERED_EXPR;
2323 case COMPCODE_UNLT:
2324 return UNLT_EXPR;
2325 case COMPCODE_UNEQ:
2326 return UNEQ_EXPR;
2327 case COMPCODE_UNLE:
2328 return UNLE_EXPR;
2329 case COMPCODE_UNGT:
2330 return UNGT_EXPR;
2331 case COMPCODE_LTGT:
2332 return LTGT_EXPR;
2333 case COMPCODE_UNGE:
2334 return UNGE_EXPR;
2335 default:
2336 gcc_unreachable ();
2337 }
2338 }
2339
2340 /* Return a tree for the comparison which is the combination of
2341 doing the AND or OR (depending on CODE) of the two operations LCODE
2342 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2343 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2344 if this makes the transformation invalid. */
2345
2346 tree
2347 combine_comparisons (location_t loc,
2348 enum tree_code code, enum tree_code lcode,
2349 enum tree_code rcode, tree truth_type,
2350 tree ll_arg, tree lr_arg)
2351 {
2352 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2353 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2354 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2355 int compcode;
2356
2357 switch (code)
2358 {
2359 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2360 compcode = lcompcode & rcompcode;
2361 break;
2362
2363 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2364 compcode = lcompcode | rcompcode;
2365 break;
2366
2367 default:
2368 return NULL_TREE;
2369 }
2370
2371 if (!honor_nans)
2372 {
2373 /* Eliminate unordered comparisons, as well as LTGT and ORD
2374 which are not used unless the mode has NaNs. */
2375 compcode &= ~COMPCODE_UNORD;
2376 if (compcode == COMPCODE_LTGT)
2377 compcode = COMPCODE_NE;
2378 else if (compcode == COMPCODE_ORD)
2379 compcode = COMPCODE_TRUE;
2380 }
2381 else if (flag_trapping_math)
2382 {
2383 /* Check that the original operation and the optimized ones will trap
2384 under the same condition. */
2385 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2386 && (lcompcode != COMPCODE_EQ)
2387 && (lcompcode != COMPCODE_ORD);
2388 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2389 && (rcompcode != COMPCODE_EQ)
2390 && (rcompcode != COMPCODE_ORD);
2391 bool trap = (compcode & COMPCODE_UNORD) == 0
2392 && (compcode != COMPCODE_EQ)
2393 && (compcode != COMPCODE_ORD);
2394
2395 /* In a short-circuited boolean expression the LHS might be
2396 such that the RHS, if evaluated, will never trap. For
2397 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2398 if neither x nor y is NaN. (This is a mixed blessing: for
2399 example, the expression above will never trap, hence
2400 optimizing it to x < y would be invalid). */
2401 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2402 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2403 rtrap = false;
2404
2405 /* If the comparison was short-circuited, and only the RHS
2406 trapped, we may now generate a spurious trap. */
2407 if (rtrap && !ltrap
2408 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2409 return NULL_TREE;
2410
2411 /* If we changed the conditions that cause a trap, we lose. */
2412 if ((ltrap || rtrap) != trap)
2413 return NULL_TREE;
2414 }
2415
2416 if (compcode == COMPCODE_TRUE)
2417 return constant_boolean_node (true, truth_type);
2418 else if (compcode == COMPCODE_FALSE)
2419 return constant_boolean_node (false, truth_type);
2420 else
2421 {
2422 enum tree_code tcode;
2423
2424 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2425 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2426 }
2427 }
2428 \f
2429 /* Return nonzero if two operands (typically of the same tree node)
2430 are necessarily equal. If either argument has side-effects this
2431 function returns zero. FLAGS modifies behavior as follows:
2432
2433 If OEP_ONLY_CONST is set, only return nonzero for constants.
2434 This function tests whether the operands are indistinguishable;
2435 it does not test whether they are equal using C's == operation.
2436 The distinction is important for IEEE floating point, because
2437 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2438 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2439
2440 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2441 even though it may hold multiple values during a function.
2442 This is because a GCC tree node guarantees that nothing else is
2443 executed between the evaluation of its "operands" (which may often
2444 be evaluated in arbitrary order). Hence if the operands themselves
2445 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2446 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2447 unset means assuming isochronic (or instantaneous) tree equivalence.
2448 Unless comparing arbitrary expression trees, such as from different
2449 statements, this flag can usually be left unset.
2450
2451 If OEP_PURE_SAME is set, then pure functions with identical arguments
2452 are considered the same. It is used when the caller has other ways
2453 to ensure that global memory is unchanged in between. */
2454
2455 int
2456 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2457 {
2458 /* If either is ERROR_MARK, they aren't equal. */
2459 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2460 || TREE_TYPE (arg0) == error_mark_node
2461 || TREE_TYPE (arg1) == error_mark_node)
2462 return 0;
2463
2464 /* Similar, if either does not have a type (like a released SSA name),
2465 they aren't equal. */
2466 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2467 return 0;
2468
2469 /* Check equality of integer constants before bailing out due to
2470 precision differences. */
2471 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2472 return tree_int_cst_equal (arg0, arg1);
2473
2474 /* If both types don't have the same signedness, then we can't consider
2475 them equal. We must check this before the STRIP_NOPS calls
2476 because they may change the signedness of the arguments. As pointers
2477 strictly don't have a signedness, require either two pointers or
2478 two non-pointers as well. */
2479 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2480 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2481 return 0;
2482
2483 /* We cannot consider pointers to different address space equal. */
2484 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2485 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2486 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2487 return 0;
2488
2489 /* If both types don't have the same precision, then it is not safe
2490 to strip NOPs. */
2491 if (element_precision (TREE_TYPE (arg0))
2492 != element_precision (TREE_TYPE (arg1)))
2493 return 0;
2494
2495 STRIP_NOPS (arg0);
2496 STRIP_NOPS (arg1);
2497
2498 /* In case both args are comparisons but with different comparison
2499 code, try to swap the comparison operands of one arg to produce
2500 a match and compare that variant. */
2501 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2502 && COMPARISON_CLASS_P (arg0)
2503 && COMPARISON_CLASS_P (arg1))
2504 {
2505 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2506
2507 if (TREE_CODE (arg0) == swap_code)
2508 return operand_equal_p (TREE_OPERAND (arg0, 0),
2509 TREE_OPERAND (arg1, 1), flags)
2510 && operand_equal_p (TREE_OPERAND (arg0, 1),
2511 TREE_OPERAND (arg1, 0), flags);
2512 }
2513
2514 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2515 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2516 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2517 return 0;
2518
2519 /* This is needed for conversions and for COMPONENT_REF.
2520 Might as well play it safe and always test this. */
2521 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2522 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2523 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2524 return 0;
2525
2526 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2527 We don't care about side effects in that case because the SAVE_EXPR
2528 takes care of that for us. In all other cases, two expressions are
2529 equal if they have no side effects. If we have two identical
2530 expressions with side effects that should be treated the same due
2531 to the only side effects being identical SAVE_EXPR's, that will
2532 be detected in the recursive calls below.
2533 If we are taking an invariant address of two identical objects
2534 they are necessarily equal as well. */
2535 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2536 && (TREE_CODE (arg0) == SAVE_EXPR
2537 || (flags & OEP_CONSTANT_ADDRESS_OF)
2538 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2539 return 1;
2540
2541 /* Next handle constant cases, those for which we can return 1 even
2542 if ONLY_CONST is set. */
2543 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2544 switch (TREE_CODE (arg0))
2545 {
2546 case INTEGER_CST:
2547 return tree_int_cst_equal (arg0, arg1);
2548
2549 case FIXED_CST:
2550 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2551 TREE_FIXED_CST (arg1));
2552
2553 case REAL_CST:
2554 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2555 TREE_REAL_CST (arg1)))
2556 return 1;
2557
2558
2559 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2560 {
2561 /* If we do not distinguish between signed and unsigned zero,
2562 consider them equal. */
2563 if (real_zerop (arg0) && real_zerop (arg1))
2564 return 1;
2565 }
2566 return 0;
2567
2568 case VECTOR_CST:
2569 {
2570 unsigned i;
2571
2572 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2573 return 0;
2574
2575 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2576 {
2577 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2578 VECTOR_CST_ELT (arg1, i), flags))
2579 return 0;
2580 }
2581 return 1;
2582 }
2583
2584 case COMPLEX_CST:
2585 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2586 flags)
2587 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2588 flags));
2589
2590 case STRING_CST:
2591 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2592 && ! memcmp (TREE_STRING_POINTER (arg0),
2593 TREE_STRING_POINTER (arg1),
2594 TREE_STRING_LENGTH (arg0)));
2595
2596 case ADDR_EXPR:
2597 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2598 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2599 ? OEP_CONSTANT_ADDRESS_OF : 0);
2600 default:
2601 break;
2602 }
2603
2604 if (flags & OEP_ONLY_CONST)
2605 return 0;
2606
2607 /* Define macros to test an operand from arg0 and arg1 for equality and a
2608 variant that allows null and views null as being different from any
2609 non-null value. In the latter case, if either is null, the both
2610 must be; otherwise, do the normal comparison. */
2611 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2612 TREE_OPERAND (arg1, N), flags)
2613
2614 #define OP_SAME_WITH_NULL(N) \
2615 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2616 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2617
2618 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2619 {
2620 case tcc_unary:
2621 /* Two conversions are equal only if signedness and modes match. */
2622 switch (TREE_CODE (arg0))
2623 {
2624 CASE_CONVERT:
2625 case FIX_TRUNC_EXPR:
2626 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2627 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2628 return 0;
2629 break;
2630 default:
2631 break;
2632 }
2633
2634 return OP_SAME (0);
2635
2636
2637 case tcc_comparison:
2638 case tcc_binary:
2639 if (OP_SAME (0) && OP_SAME (1))
2640 return 1;
2641
2642 /* For commutative ops, allow the other order. */
2643 return (commutative_tree_code (TREE_CODE (arg0))
2644 && operand_equal_p (TREE_OPERAND (arg0, 0),
2645 TREE_OPERAND (arg1, 1), flags)
2646 && operand_equal_p (TREE_OPERAND (arg0, 1),
2647 TREE_OPERAND (arg1, 0), flags));
2648
2649 case tcc_reference:
2650 /* If either of the pointer (or reference) expressions we are
2651 dereferencing contain a side effect, these cannot be equal,
2652 but their addresses can be. */
2653 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2654 && (TREE_SIDE_EFFECTS (arg0)
2655 || TREE_SIDE_EFFECTS (arg1)))
2656 return 0;
2657
2658 switch (TREE_CODE (arg0))
2659 {
2660 case INDIRECT_REF:
2661 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2662 return OP_SAME (0);
2663
2664 case REALPART_EXPR:
2665 case IMAGPART_EXPR:
2666 return OP_SAME (0);
2667
2668 case TARGET_MEM_REF:
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 /* Require equal extra operands and then fall through to MEM_REF
2671 handling of the two common operands. */
2672 if (!OP_SAME_WITH_NULL (2)
2673 || !OP_SAME_WITH_NULL (3)
2674 || !OP_SAME_WITH_NULL (4))
2675 return 0;
2676 /* Fallthru. */
2677 case MEM_REF:
2678 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2679 /* Require equal access sizes, and similar pointer types.
2680 We can have incomplete types for array references of
2681 variable-sized arrays from the Fortran frontend
2682 though. Also verify the types are compatible. */
2683 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2684 || (TYPE_SIZE (TREE_TYPE (arg0))
2685 && TYPE_SIZE (TREE_TYPE (arg1))
2686 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2687 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2688 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2689 && alias_ptr_types_compatible_p
2690 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2691 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2692 && OP_SAME (0) && OP_SAME (1));
2693
2694 case ARRAY_REF:
2695 case ARRAY_RANGE_REF:
2696 /* Operands 2 and 3 may be null.
2697 Compare the array index by value if it is constant first as we
2698 may have different types but same value here. */
2699 if (!OP_SAME (0))
2700 return 0;
2701 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2702 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2703 TREE_OPERAND (arg1, 1))
2704 || OP_SAME (1))
2705 && OP_SAME_WITH_NULL (2)
2706 && OP_SAME_WITH_NULL (3));
2707
2708 case COMPONENT_REF:
2709 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2710 may be NULL when we're called to compare MEM_EXPRs. */
2711 if (!OP_SAME_WITH_NULL (0)
2712 || !OP_SAME (1))
2713 return 0;
2714 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2715 return OP_SAME_WITH_NULL (2);
2716
2717 case BIT_FIELD_REF:
2718 if (!OP_SAME (0))
2719 return 0;
2720 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2721 return OP_SAME (1) && OP_SAME (2);
2722
2723 default:
2724 return 0;
2725 }
2726
2727 case tcc_expression:
2728 switch (TREE_CODE (arg0))
2729 {
2730 case ADDR_EXPR:
2731 case TRUTH_NOT_EXPR:
2732 return OP_SAME (0);
2733
2734 case TRUTH_ANDIF_EXPR:
2735 case TRUTH_ORIF_EXPR:
2736 return OP_SAME (0) && OP_SAME (1);
2737
2738 case FMA_EXPR:
2739 case WIDEN_MULT_PLUS_EXPR:
2740 case WIDEN_MULT_MINUS_EXPR:
2741 if (!OP_SAME (2))
2742 return 0;
2743 /* The multiplcation operands are commutative. */
2744 /* FALLTHRU */
2745
2746 case TRUTH_AND_EXPR:
2747 case TRUTH_OR_EXPR:
2748 case TRUTH_XOR_EXPR:
2749 if (OP_SAME (0) && OP_SAME (1))
2750 return 1;
2751
2752 /* Otherwise take into account this is a commutative operation. */
2753 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2754 TREE_OPERAND (arg1, 1), flags)
2755 && operand_equal_p (TREE_OPERAND (arg0, 1),
2756 TREE_OPERAND (arg1, 0), flags));
2757
2758 case COND_EXPR:
2759 case VEC_COND_EXPR:
2760 case DOT_PROD_EXPR:
2761 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2762
2763 default:
2764 return 0;
2765 }
2766
2767 case tcc_vl_exp:
2768 switch (TREE_CODE (arg0))
2769 {
2770 case CALL_EXPR:
2771 /* If the CALL_EXPRs call different functions, then they
2772 clearly can not be equal. */
2773 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2774 flags))
2775 return 0;
2776
2777 {
2778 unsigned int cef = call_expr_flags (arg0);
2779 if (flags & OEP_PURE_SAME)
2780 cef &= ECF_CONST | ECF_PURE;
2781 else
2782 cef &= ECF_CONST;
2783 if (!cef)
2784 return 0;
2785 }
2786
2787 /* Now see if all the arguments are the same. */
2788 {
2789 const_call_expr_arg_iterator iter0, iter1;
2790 const_tree a0, a1;
2791 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2792 a1 = first_const_call_expr_arg (arg1, &iter1);
2793 a0 && a1;
2794 a0 = next_const_call_expr_arg (&iter0),
2795 a1 = next_const_call_expr_arg (&iter1))
2796 if (! operand_equal_p (a0, a1, flags))
2797 return 0;
2798
2799 /* If we get here and both argument lists are exhausted
2800 then the CALL_EXPRs are equal. */
2801 return ! (a0 || a1);
2802 }
2803 default:
2804 return 0;
2805 }
2806
2807 case tcc_declaration:
2808 /* Consider __builtin_sqrt equal to sqrt. */
2809 return (TREE_CODE (arg0) == FUNCTION_DECL
2810 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2811 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2812 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2813
2814 default:
2815 return 0;
2816 }
2817
2818 #undef OP_SAME
2819 #undef OP_SAME_WITH_NULL
2820 }
2821 \f
2822 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2823 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2824
2825 When in doubt, return 0. */
2826
2827 static int
2828 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2829 {
2830 int unsignedp1, unsignedpo;
2831 tree primarg0, primarg1, primother;
2832 unsigned int correct_width;
2833
2834 if (operand_equal_p (arg0, arg1, 0))
2835 return 1;
2836
2837 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2838 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2839 return 0;
2840
2841 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2842 and see if the inner values are the same. This removes any
2843 signedness comparison, which doesn't matter here. */
2844 primarg0 = arg0, primarg1 = arg1;
2845 STRIP_NOPS (primarg0);
2846 STRIP_NOPS (primarg1);
2847 if (operand_equal_p (primarg0, primarg1, 0))
2848 return 1;
2849
2850 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2851 actual comparison operand, ARG0.
2852
2853 First throw away any conversions to wider types
2854 already present in the operands. */
2855
2856 primarg1 = get_narrower (arg1, &unsignedp1);
2857 primother = get_narrower (other, &unsignedpo);
2858
2859 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2860 if (unsignedp1 == unsignedpo
2861 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2862 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2863 {
2864 tree type = TREE_TYPE (arg0);
2865
2866 /* Make sure shorter operand is extended the right way
2867 to match the longer operand. */
2868 primarg1 = fold_convert (signed_or_unsigned_type_for
2869 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2870
2871 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2872 return 1;
2873 }
2874
2875 return 0;
2876 }
2877 \f
2878 /* See if ARG is an expression that is either a comparison or is performing
2879 arithmetic on comparisons. The comparisons must only be comparing
2880 two different values, which will be stored in *CVAL1 and *CVAL2; if
2881 they are nonzero it means that some operands have already been found.
2882 No variables may be used anywhere else in the expression except in the
2883 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2884 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2885
2886 If this is true, return 1. Otherwise, return zero. */
2887
2888 static int
2889 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2890 {
2891 enum tree_code code = TREE_CODE (arg);
2892 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2893
2894 /* We can handle some of the tcc_expression cases here. */
2895 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2896 tclass = tcc_unary;
2897 else if (tclass == tcc_expression
2898 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2899 || code == COMPOUND_EXPR))
2900 tclass = tcc_binary;
2901
2902 else if (tclass == tcc_expression && code == SAVE_EXPR
2903 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2904 {
2905 /* If we've already found a CVAL1 or CVAL2, this expression is
2906 two complex to handle. */
2907 if (*cval1 || *cval2)
2908 return 0;
2909
2910 tclass = tcc_unary;
2911 *save_p = 1;
2912 }
2913
2914 switch (tclass)
2915 {
2916 case tcc_unary:
2917 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2918
2919 case tcc_binary:
2920 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2922 cval1, cval2, save_p));
2923
2924 case tcc_constant:
2925 return 1;
2926
2927 case tcc_expression:
2928 if (code == COND_EXPR)
2929 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2930 cval1, cval2, save_p)
2931 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2932 cval1, cval2, save_p)
2933 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2934 cval1, cval2, save_p));
2935 return 0;
2936
2937 case tcc_comparison:
2938 /* First see if we can handle the first operand, then the second. For
2939 the second operand, we know *CVAL1 can't be zero. It must be that
2940 one side of the comparison is each of the values; test for the
2941 case where this isn't true by failing if the two operands
2942 are the same. */
2943
2944 if (operand_equal_p (TREE_OPERAND (arg, 0),
2945 TREE_OPERAND (arg, 1), 0))
2946 return 0;
2947
2948 if (*cval1 == 0)
2949 *cval1 = TREE_OPERAND (arg, 0);
2950 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2951 ;
2952 else if (*cval2 == 0)
2953 *cval2 = TREE_OPERAND (arg, 0);
2954 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2955 ;
2956 else
2957 return 0;
2958
2959 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2960 ;
2961 else if (*cval2 == 0)
2962 *cval2 = TREE_OPERAND (arg, 1);
2963 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2964 ;
2965 else
2966 return 0;
2967
2968 return 1;
2969
2970 default:
2971 return 0;
2972 }
2973 }
2974 \f
2975 /* ARG is a tree that is known to contain just arithmetic operations and
2976 comparisons. Evaluate the operations in the tree substituting NEW0 for
2977 any occurrence of OLD0 as an operand of a comparison and likewise for
2978 NEW1 and OLD1. */
2979
2980 static tree
2981 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2982 tree old1, tree new1)
2983 {
2984 tree type = TREE_TYPE (arg);
2985 enum tree_code code = TREE_CODE (arg);
2986 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2987
2988 /* We can handle some of the tcc_expression cases here. */
2989 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2990 tclass = tcc_unary;
2991 else if (tclass == tcc_expression
2992 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2993 tclass = tcc_binary;
2994
2995 switch (tclass)
2996 {
2997 case tcc_unary:
2998 return fold_build1_loc (loc, code, type,
2999 eval_subst (loc, TREE_OPERAND (arg, 0),
3000 old0, new0, old1, new1));
3001
3002 case tcc_binary:
3003 return fold_build2_loc (loc, code, type,
3004 eval_subst (loc, TREE_OPERAND (arg, 0),
3005 old0, new0, old1, new1),
3006 eval_subst (loc, TREE_OPERAND (arg, 1),
3007 old0, new0, old1, new1));
3008
3009 case tcc_expression:
3010 switch (code)
3011 {
3012 case SAVE_EXPR:
3013 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3014 old1, new1);
3015
3016 case COMPOUND_EXPR:
3017 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3018 old1, new1);
3019
3020 case COND_EXPR:
3021 return fold_build3_loc (loc, code, type,
3022 eval_subst (loc, TREE_OPERAND (arg, 0),
3023 old0, new0, old1, new1),
3024 eval_subst (loc, TREE_OPERAND (arg, 1),
3025 old0, new0, old1, new1),
3026 eval_subst (loc, TREE_OPERAND (arg, 2),
3027 old0, new0, old1, new1));
3028 default:
3029 break;
3030 }
3031 /* Fall through - ??? */
3032
3033 case tcc_comparison:
3034 {
3035 tree arg0 = TREE_OPERAND (arg, 0);
3036 tree arg1 = TREE_OPERAND (arg, 1);
3037
3038 /* We need to check both for exact equality and tree equality. The
3039 former will be true if the operand has a side-effect. In that
3040 case, we know the operand occurred exactly once. */
3041
3042 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3043 arg0 = new0;
3044 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3045 arg0 = new1;
3046
3047 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3048 arg1 = new0;
3049 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3050 arg1 = new1;
3051
3052 return fold_build2_loc (loc, code, type, arg0, arg1);
3053 }
3054
3055 default:
3056 return arg;
3057 }
3058 }
3059 \f
3060 /* Return a tree for the case when the result of an expression is RESULT
3061 converted to TYPE and OMITTED was previously an operand of the expression
3062 but is now not needed (e.g., we folded OMITTED * 0).
3063
3064 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3065 the conversion of RESULT to TYPE. */
3066
3067 tree
3068 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3069 {
3070 tree t = fold_convert_loc (loc, type, result);
3071
3072 /* If the resulting operand is an empty statement, just return the omitted
3073 statement casted to void. */
3074 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3075 return build1_loc (loc, NOP_EXPR, void_type_node,
3076 fold_ignored_result (omitted));
3077
3078 if (TREE_SIDE_EFFECTS (omitted))
3079 return build2_loc (loc, COMPOUND_EXPR, type,
3080 fold_ignored_result (omitted), t);
3081
3082 return non_lvalue_loc (loc, t);
3083 }
3084
3085 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3086
3087 static tree
3088 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3089 tree omitted)
3090 {
3091 tree t = fold_convert_loc (loc, type, result);
3092
3093 /* If the resulting operand is an empty statement, just return the omitted
3094 statement casted to void. */
3095 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3096 return build1_loc (loc, NOP_EXPR, void_type_node,
3097 fold_ignored_result (omitted));
3098
3099 if (TREE_SIDE_EFFECTS (omitted))
3100 return build2_loc (loc, COMPOUND_EXPR, type,
3101 fold_ignored_result (omitted), t);
3102
3103 return pedantic_non_lvalue_loc (loc, t);
3104 }
3105
3106 /* Return a tree for the case when the result of an expression is RESULT
3107 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3108 of the expression but are now not needed.
3109
3110 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3111 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3112 evaluated before OMITTED2. Otherwise, if neither has side effects,
3113 just do the conversion of RESULT to TYPE. */
3114
3115 tree
3116 omit_two_operands_loc (location_t loc, tree type, tree result,
3117 tree omitted1, tree omitted2)
3118 {
3119 tree t = fold_convert_loc (loc, type, result);
3120
3121 if (TREE_SIDE_EFFECTS (omitted2))
3122 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3123 if (TREE_SIDE_EFFECTS (omitted1))
3124 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3125
3126 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3127 }
3128
3129 \f
3130 /* Return a simplified tree node for the truth-negation of ARG. This
3131 never alters ARG itself. We assume that ARG is an operation that
3132 returns a truth value (0 or 1).
3133
3134 FIXME: one would think we would fold the result, but it causes
3135 problems with the dominator optimizer. */
3136
3137 static tree
3138 fold_truth_not_expr (location_t loc, tree arg)
3139 {
3140 tree type = TREE_TYPE (arg);
3141 enum tree_code code = TREE_CODE (arg);
3142 location_t loc1, loc2;
3143
3144 /* If this is a comparison, we can simply invert it, except for
3145 floating-point non-equality comparisons, in which case we just
3146 enclose a TRUTH_NOT_EXPR around what we have. */
3147
3148 if (TREE_CODE_CLASS (code) == tcc_comparison)
3149 {
3150 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3151 if (FLOAT_TYPE_P (op_type)
3152 && flag_trapping_math
3153 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3154 && code != NE_EXPR && code != EQ_EXPR)
3155 return NULL_TREE;
3156
3157 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3158 if (code == ERROR_MARK)
3159 return NULL_TREE;
3160
3161 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3162 TREE_OPERAND (arg, 1));
3163 }
3164
3165 switch (code)
3166 {
3167 case INTEGER_CST:
3168 return constant_boolean_node (integer_zerop (arg), type);
3169
3170 case TRUTH_AND_EXPR:
3171 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3172 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3173 return build2_loc (loc, TRUTH_OR_EXPR, type,
3174 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3175 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3176
3177 case TRUTH_OR_EXPR:
3178 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3179 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3180 return build2_loc (loc, TRUTH_AND_EXPR, type,
3181 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3182 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3183
3184 case TRUTH_XOR_EXPR:
3185 /* Here we can invert either operand. We invert the first operand
3186 unless the second operand is a TRUTH_NOT_EXPR in which case our
3187 result is the XOR of the first operand with the inside of the
3188 negation of the second operand. */
3189
3190 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3191 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3192 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3193 else
3194 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3195 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3196 TREE_OPERAND (arg, 1));
3197
3198 case TRUTH_ANDIF_EXPR:
3199 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3200 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3201 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3202 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3203 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3204
3205 case TRUTH_ORIF_EXPR:
3206 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3207 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3208 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3209 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3210 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3211
3212 case TRUTH_NOT_EXPR:
3213 return TREE_OPERAND (arg, 0);
3214
3215 case COND_EXPR:
3216 {
3217 tree arg1 = TREE_OPERAND (arg, 1);
3218 tree arg2 = TREE_OPERAND (arg, 2);
3219
3220 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3221 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3222
3223 /* A COND_EXPR may have a throw as one operand, which
3224 then has void type. Just leave void operands
3225 as they are. */
3226 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3227 VOID_TYPE_P (TREE_TYPE (arg1))
3228 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3229 VOID_TYPE_P (TREE_TYPE (arg2))
3230 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3231 }
3232
3233 case COMPOUND_EXPR:
3234 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3235 return build2_loc (loc, COMPOUND_EXPR, type,
3236 TREE_OPERAND (arg, 0),
3237 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3238
3239 case NON_LVALUE_EXPR:
3240 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3241 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3242
3243 CASE_CONVERT:
3244 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3245 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3246
3247 /* ... fall through ... */
3248
3249 case FLOAT_EXPR:
3250 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3251 return build1_loc (loc, TREE_CODE (arg), type,
3252 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3253
3254 case BIT_AND_EXPR:
3255 if (!integer_onep (TREE_OPERAND (arg, 1)))
3256 return NULL_TREE;
3257 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3258
3259 case SAVE_EXPR:
3260 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3261
3262 case CLEANUP_POINT_EXPR:
3263 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3264 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3265 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3266
3267 default:
3268 return NULL_TREE;
3269 }
3270 }
3271
3272 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3273 assume that ARG is an operation that returns a truth value (0 or 1
3274 for scalars, 0 or -1 for vectors). Return the folded expression if
3275 folding is successful. Otherwise, return NULL_TREE. */
3276
3277 static tree
3278 fold_invert_truthvalue (location_t loc, tree arg)
3279 {
3280 tree type = TREE_TYPE (arg);
3281 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3282 ? BIT_NOT_EXPR
3283 : TRUTH_NOT_EXPR,
3284 type, arg);
3285 }
3286
3287 /* Return a simplified tree node for the truth-negation of ARG. This
3288 never alters ARG itself. We assume that ARG is an operation that
3289 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3290
3291 tree
3292 invert_truthvalue_loc (location_t loc, tree arg)
3293 {
3294 if (TREE_CODE (arg) == ERROR_MARK)
3295 return arg;
3296
3297 tree type = TREE_TYPE (arg);
3298 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3299 ? BIT_NOT_EXPR
3300 : TRUTH_NOT_EXPR,
3301 type, arg);
3302 }
3303
3304 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3305 operands are another bit-wise operation with a common input. If so,
3306 distribute the bit operations to save an operation and possibly two if
3307 constants are involved. For example, convert
3308 (A | B) & (A | C) into A | (B & C)
3309 Further simplification will occur if B and C are constants.
3310
3311 If this optimization cannot be done, 0 will be returned. */
3312
3313 static tree
3314 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3315 tree arg0, tree arg1)
3316 {
3317 tree common;
3318 tree left, right;
3319
3320 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3321 || TREE_CODE (arg0) == code
3322 || (TREE_CODE (arg0) != BIT_AND_EXPR
3323 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3324 return 0;
3325
3326 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3327 {
3328 common = TREE_OPERAND (arg0, 0);
3329 left = TREE_OPERAND (arg0, 1);
3330 right = TREE_OPERAND (arg1, 1);
3331 }
3332 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3333 {
3334 common = TREE_OPERAND (arg0, 0);
3335 left = TREE_OPERAND (arg0, 1);
3336 right = TREE_OPERAND (arg1, 0);
3337 }
3338 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3339 {
3340 common = TREE_OPERAND (arg0, 1);
3341 left = TREE_OPERAND (arg0, 0);
3342 right = TREE_OPERAND (arg1, 1);
3343 }
3344 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3345 {
3346 common = TREE_OPERAND (arg0, 1);
3347 left = TREE_OPERAND (arg0, 0);
3348 right = TREE_OPERAND (arg1, 0);
3349 }
3350 else
3351 return 0;
3352
3353 common = fold_convert_loc (loc, type, common);
3354 left = fold_convert_loc (loc, type, left);
3355 right = fold_convert_loc (loc, type, right);
3356 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3357 fold_build2_loc (loc, code, type, left, right));
3358 }
3359
3360 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3361 with code CODE. This optimization is unsafe. */
3362 static tree
3363 distribute_real_division (location_t loc, enum tree_code code, tree type,
3364 tree arg0, tree arg1)
3365 {
3366 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3367 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3368
3369 /* (A / C) +- (B / C) -> (A +- B) / C. */
3370 if (mul0 == mul1
3371 && operand_equal_p (TREE_OPERAND (arg0, 1),
3372 TREE_OPERAND (arg1, 1), 0))
3373 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3374 fold_build2_loc (loc, code, type,
3375 TREE_OPERAND (arg0, 0),
3376 TREE_OPERAND (arg1, 0)),
3377 TREE_OPERAND (arg0, 1));
3378
3379 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3380 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 0), 0)
3382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3383 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3384 {
3385 REAL_VALUE_TYPE r0, r1;
3386 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3387 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3388 if (!mul0)
3389 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3390 if (!mul1)
3391 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3392 real_arithmetic (&r0, code, &r0, &r1);
3393 return fold_build2_loc (loc, MULT_EXPR, type,
3394 TREE_OPERAND (arg0, 0),
3395 build_real (type, r0));
3396 }
3397
3398 return NULL_TREE;
3399 }
3400 \f
3401 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3402 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3403
3404 static tree
3405 make_bit_field_ref (location_t loc, tree inner, tree type,
3406 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3407 {
3408 tree result, bftype;
3409
3410 if (bitpos == 0)
3411 {
3412 tree size = TYPE_SIZE (TREE_TYPE (inner));
3413 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3414 || POINTER_TYPE_P (TREE_TYPE (inner)))
3415 && tree_fits_shwi_p (size)
3416 && tree_to_shwi (size) == bitsize)
3417 return fold_convert_loc (loc, type, inner);
3418 }
3419
3420 bftype = type;
3421 if (TYPE_PRECISION (bftype) != bitsize
3422 || TYPE_UNSIGNED (bftype) == !unsignedp)
3423 bftype = build_nonstandard_integer_type (bitsize, 0);
3424
3425 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3426 size_int (bitsize), bitsize_int (bitpos));
3427
3428 if (bftype != type)
3429 result = fold_convert_loc (loc, type, result);
3430
3431 return result;
3432 }
3433
3434 /* Optimize a bit-field compare.
3435
3436 There are two cases: First is a compare against a constant and the
3437 second is a comparison of two items where the fields are at the same
3438 bit position relative to the start of a chunk (byte, halfword, word)
3439 large enough to contain it. In these cases we can avoid the shift
3440 implicit in bitfield extractions.
3441
3442 For constants, we emit a compare of the shifted constant with the
3443 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3444 compared. For two fields at the same position, we do the ANDs with the
3445 similar mask and compare the result of the ANDs.
3446
3447 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3448 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3449 are the left and right operands of the comparison, respectively.
3450
3451 If the optimization described above can be done, we return the resulting
3452 tree. Otherwise we return zero. */
3453
3454 static tree
3455 optimize_bit_field_compare (location_t loc, enum tree_code code,
3456 tree compare_type, tree lhs, tree rhs)
3457 {
3458 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3459 tree type = TREE_TYPE (lhs);
3460 tree unsigned_type;
3461 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3462 machine_mode lmode, rmode, nmode;
3463 int lunsignedp, runsignedp;
3464 int lvolatilep = 0, rvolatilep = 0;
3465 tree linner, rinner = NULL_TREE;
3466 tree mask;
3467 tree offset;
3468
3469 /* Get all the information about the extractions being done. If the bit size
3470 if the same as the size of the underlying object, we aren't doing an
3471 extraction at all and so can do nothing. We also don't want to
3472 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3473 then will no longer be able to replace it. */
3474 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3475 &lunsignedp, &lvolatilep, false);
3476 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3477 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3478 return 0;
3479
3480 if (!const_p)
3481 {
3482 /* If this is not a constant, we can only do something if bit positions,
3483 sizes, and signedness are the same. */
3484 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3485 &runsignedp, &rvolatilep, false);
3486
3487 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3488 || lunsignedp != runsignedp || offset != 0
3489 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3490 return 0;
3491 }
3492
3493 /* See if we can find a mode to refer to this field. We should be able to,
3494 but fail if we can't. */
3495 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3496 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3497 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3498 TYPE_ALIGN (TREE_TYPE (rinner))),
3499 word_mode, false);
3500 if (nmode == VOIDmode)
3501 return 0;
3502
3503 /* Set signed and unsigned types of the precision of this mode for the
3504 shifts below. */
3505 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3506
3507 /* Compute the bit position and size for the new reference and our offset
3508 within it. If the new reference is the same size as the original, we
3509 won't optimize anything, so return zero. */
3510 nbitsize = GET_MODE_BITSIZE (nmode);
3511 nbitpos = lbitpos & ~ (nbitsize - 1);
3512 lbitpos -= nbitpos;
3513 if (nbitsize == lbitsize)
3514 return 0;
3515
3516 if (BYTES_BIG_ENDIAN)
3517 lbitpos = nbitsize - lbitsize - lbitpos;
3518
3519 /* Make the mask to be used against the extracted field. */
3520 mask = build_int_cst_type (unsigned_type, -1);
3521 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3522 mask = const_binop (RSHIFT_EXPR, mask,
3523 size_int (nbitsize - lbitsize - lbitpos));
3524
3525 if (! const_p)
3526 /* If not comparing with constant, just rework the comparison
3527 and return. */
3528 return fold_build2_loc (loc, code, compare_type,
3529 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3530 make_bit_field_ref (loc, linner,
3531 unsigned_type,
3532 nbitsize, nbitpos,
3533 1),
3534 mask),
3535 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3536 make_bit_field_ref (loc, rinner,
3537 unsigned_type,
3538 nbitsize, nbitpos,
3539 1),
3540 mask));
3541
3542 /* Otherwise, we are handling the constant case. See if the constant is too
3543 big for the field. Warn and return a tree of for 0 (false) if so. We do
3544 this not only for its own sake, but to avoid having to test for this
3545 error case below. If we didn't, we might generate wrong code.
3546
3547 For unsigned fields, the constant shifted right by the field length should
3548 be all zero. For signed fields, the high-order bits should agree with
3549 the sign bit. */
3550
3551 if (lunsignedp)
3552 {
3553 if (wi::lrshift (rhs, lbitsize) != 0)
3554 {
3555 warning (0, "comparison is always %d due to width of bit-field",
3556 code == NE_EXPR);
3557 return constant_boolean_node (code == NE_EXPR, compare_type);
3558 }
3559 }
3560 else
3561 {
3562 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3563 if (tem != 0 && tem != -1)
3564 {
3565 warning (0, "comparison is always %d due to width of bit-field",
3566 code == NE_EXPR);
3567 return constant_boolean_node (code == NE_EXPR, compare_type);
3568 }
3569 }
3570
3571 /* Single-bit compares should always be against zero. */
3572 if (lbitsize == 1 && ! integer_zerop (rhs))
3573 {
3574 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3575 rhs = build_int_cst (type, 0);
3576 }
3577
3578 /* Make a new bitfield reference, shift the constant over the
3579 appropriate number of bits and mask it with the computed mask
3580 (in case this was a signed field). If we changed it, make a new one. */
3581 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3582
3583 rhs = const_binop (BIT_AND_EXPR,
3584 const_binop (LSHIFT_EXPR,
3585 fold_convert_loc (loc, unsigned_type, rhs),
3586 size_int (lbitpos)),
3587 mask);
3588
3589 lhs = build2_loc (loc, code, compare_type,
3590 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3591 return lhs;
3592 }
3593 \f
3594 /* Subroutine for fold_truth_andor_1: decode a field reference.
3595
3596 If EXP is a comparison reference, we return the innermost reference.
3597
3598 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3599 set to the starting bit number.
3600
3601 If the innermost field can be completely contained in a mode-sized
3602 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3603
3604 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3605 otherwise it is not changed.
3606
3607 *PUNSIGNEDP is set to the signedness of the field.
3608
3609 *PMASK is set to the mask used. This is either contained in a
3610 BIT_AND_EXPR or derived from the width of the field.
3611
3612 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3613
3614 Return 0 if this is not a component reference or is one that we can't
3615 do anything with. */
3616
3617 static tree
3618 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3619 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3620 int *punsignedp, int *pvolatilep,
3621 tree *pmask, tree *pand_mask)
3622 {
3623 tree outer_type = 0;
3624 tree and_mask = 0;
3625 tree mask, inner, offset;
3626 tree unsigned_type;
3627 unsigned int precision;
3628
3629 /* All the optimizations using this function assume integer fields.
3630 There are problems with FP fields since the type_for_size call
3631 below can fail for, e.g., XFmode. */
3632 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3633 return 0;
3634
3635 /* We are interested in the bare arrangement of bits, so strip everything
3636 that doesn't affect the machine mode. However, record the type of the
3637 outermost expression if it may matter below. */
3638 if (CONVERT_EXPR_P (exp)
3639 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3640 outer_type = TREE_TYPE (exp);
3641 STRIP_NOPS (exp);
3642
3643 if (TREE_CODE (exp) == BIT_AND_EXPR)
3644 {
3645 and_mask = TREE_OPERAND (exp, 1);
3646 exp = TREE_OPERAND (exp, 0);
3647 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3648 if (TREE_CODE (and_mask) != INTEGER_CST)
3649 return 0;
3650 }
3651
3652 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3653 punsignedp, pvolatilep, false);
3654 if ((inner == exp && and_mask == 0)
3655 || *pbitsize < 0 || offset != 0
3656 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3657 return 0;
3658
3659 /* If the number of bits in the reference is the same as the bitsize of
3660 the outer type, then the outer type gives the signedness. Otherwise
3661 (in case of a small bitfield) the signedness is unchanged. */
3662 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3663 *punsignedp = TYPE_UNSIGNED (outer_type);
3664
3665 /* Compute the mask to access the bitfield. */
3666 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3667 precision = TYPE_PRECISION (unsigned_type);
3668
3669 mask = build_int_cst_type (unsigned_type, -1);
3670
3671 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3672 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3673
3674 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3675 if (and_mask != 0)
3676 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3677 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3678
3679 *pmask = mask;
3680 *pand_mask = and_mask;
3681 return inner;
3682 }
3683
3684 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3685 bit positions and MASK is SIGNED. */
3686
3687 static int
3688 all_ones_mask_p (const_tree mask, unsigned int size)
3689 {
3690 tree type = TREE_TYPE (mask);
3691 unsigned int precision = TYPE_PRECISION (type);
3692
3693 /* If this function returns true when the type of the mask is
3694 UNSIGNED, then there will be errors. In particular see
3695 gcc.c-torture/execute/990326-1.c. There does not appear to be
3696 any documentation paper trail as to why this is so. But the pre
3697 wide-int worked with that restriction and it has been preserved
3698 here. */
3699 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3700 return false;
3701
3702 return wi::mask (size, false, precision) == mask;
3703 }
3704
3705 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3706 represents the sign bit of EXP's type. If EXP represents a sign
3707 or zero extension, also test VAL against the unextended type.
3708 The return value is the (sub)expression whose sign bit is VAL,
3709 or NULL_TREE otherwise. */
3710
3711 static tree
3712 sign_bit_p (tree exp, const_tree val)
3713 {
3714 int width;
3715 tree t;
3716
3717 /* Tree EXP must have an integral type. */
3718 t = TREE_TYPE (exp);
3719 if (! INTEGRAL_TYPE_P (t))
3720 return NULL_TREE;
3721
3722 /* Tree VAL must be an integer constant. */
3723 if (TREE_CODE (val) != INTEGER_CST
3724 || TREE_OVERFLOW (val))
3725 return NULL_TREE;
3726
3727 width = TYPE_PRECISION (t);
3728 if (wi::only_sign_bit_p (val, width))
3729 return exp;
3730
3731 /* Handle extension from a narrower type. */
3732 if (TREE_CODE (exp) == NOP_EXPR
3733 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3734 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3735
3736 return NULL_TREE;
3737 }
3738
3739 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3740 to be evaluated unconditionally. */
3741
3742 static int
3743 simple_operand_p (const_tree exp)
3744 {
3745 /* Strip any conversions that don't change the machine mode. */
3746 STRIP_NOPS (exp);
3747
3748 return (CONSTANT_CLASS_P (exp)
3749 || TREE_CODE (exp) == SSA_NAME
3750 || (DECL_P (exp)
3751 && ! TREE_ADDRESSABLE (exp)
3752 && ! TREE_THIS_VOLATILE (exp)
3753 && ! DECL_NONLOCAL (exp)
3754 /* Don't regard global variables as simple. They may be
3755 allocated in ways unknown to the compiler (shared memory,
3756 #pragma weak, etc). */
3757 && ! TREE_PUBLIC (exp)
3758 && ! DECL_EXTERNAL (exp)
3759 /* Weakrefs are not safe to be read, since they can be NULL.
3760 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3761 have DECL_WEAK flag set. */
3762 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3763 /* Loading a static variable is unduly expensive, but global
3764 registers aren't expensive. */
3765 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3766 }
3767
3768 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3769 to be evaluated unconditionally.
3770 I addition to simple_operand_p, we assume that comparisons, conversions,
3771 and logic-not operations are simple, if their operands are simple, too. */
3772
3773 static bool
3774 simple_operand_p_2 (tree exp)
3775 {
3776 enum tree_code code;
3777
3778 if (TREE_SIDE_EFFECTS (exp)
3779 || tree_could_trap_p (exp))
3780 return false;
3781
3782 while (CONVERT_EXPR_P (exp))
3783 exp = TREE_OPERAND (exp, 0);
3784
3785 code = TREE_CODE (exp);
3786
3787 if (TREE_CODE_CLASS (code) == tcc_comparison)
3788 return (simple_operand_p (TREE_OPERAND (exp, 0))
3789 && simple_operand_p (TREE_OPERAND (exp, 1)));
3790
3791 if (code == TRUTH_NOT_EXPR)
3792 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3793
3794 return simple_operand_p (exp);
3795 }
3796
3797 \f
3798 /* The following functions are subroutines to fold_range_test and allow it to
3799 try to change a logical combination of comparisons into a range test.
3800
3801 For example, both
3802 X == 2 || X == 3 || X == 4 || X == 5
3803 and
3804 X >= 2 && X <= 5
3805 are converted to
3806 (unsigned) (X - 2) <= 3
3807
3808 We describe each set of comparisons as being either inside or outside
3809 a range, using a variable named like IN_P, and then describe the
3810 range with a lower and upper bound. If one of the bounds is omitted,
3811 it represents either the highest or lowest value of the type.
3812
3813 In the comments below, we represent a range by two numbers in brackets
3814 preceded by a "+" to designate being inside that range, or a "-" to
3815 designate being outside that range, so the condition can be inverted by
3816 flipping the prefix. An omitted bound is represented by a "-". For
3817 example, "- [-, 10]" means being outside the range starting at the lowest
3818 possible value and ending at 10, in other words, being greater than 10.
3819 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3820 always false.
3821
3822 We set up things so that the missing bounds are handled in a consistent
3823 manner so neither a missing bound nor "true" and "false" need to be
3824 handled using a special case. */
3825
3826 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3827 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3828 and UPPER1_P are nonzero if the respective argument is an upper bound
3829 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3830 must be specified for a comparison. ARG1 will be converted to ARG0's
3831 type if both are specified. */
3832
3833 static tree
3834 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3835 tree arg1, int upper1_p)
3836 {
3837 tree tem;
3838 int result;
3839 int sgn0, sgn1;
3840
3841 /* If neither arg represents infinity, do the normal operation.
3842 Else, if not a comparison, return infinity. Else handle the special
3843 comparison rules. Note that most of the cases below won't occur, but
3844 are handled for consistency. */
3845
3846 if (arg0 != 0 && arg1 != 0)
3847 {
3848 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3849 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3850 STRIP_NOPS (tem);
3851 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3852 }
3853
3854 if (TREE_CODE_CLASS (code) != tcc_comparison)
3855 return 0;
3856
3857 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3858 for neither. In real maths, we cannot assume open ended ranges are
3859 the same. But, this is computer arithmetic, where numbers are finite.
3860 We can therefore make the transformation of any unbounded range with
3861 the value Z, Z being greater than any representable number. This permits
3862 us to treat unbounded ranges as equal. */
3863 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3864 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3865 switch (code)
3866 {
3867 case EQ_EXPR:
3868 result = sgn0 == sgn1;
3869 break;
3870 case NE_EXPR:
3871 result = sgn0 != sgn1;
3872 break;
3873 case LT_EXPR:
3874 result = sgn0 < sgn1;
3875 break;
3876 case LE_EXPR:
3877 result = sgn0 <= sgn1;
3878 break;
3879 case GT_EXPR:
3880 result = sgn0 > sgn1;
3881 break;
3882 case GE_EXPR:
3883 result = sgn0 >= sgn1;
3884 break;
3885 default:
3886 gcc_unreachable ();
3887 }
3888
3889 return constant_boolean_node (result, type);
3890 }
3891 \f
3892 /* Helper routine for make_range. Perform one step for it, return
3893 new expression if the loop should continue or NULL_TREE if it should
3894 stop. */
3895
3896 tree
3897 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3898 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3899 bool *strict_overflow_p)
3900 {
3901 tree arg0_type = TREE_TYPE (arg0);
3902 tree n_low, n_high, low = *p_low, high = *p_high;
3903 int in_p = *p_in_p, n_in_p;
3904
3905 switch (code)
3906 {
3907 case TRUTH_NOT_EXPR:
3908 /* We can only do something if the range is testing for zero. */
3909 if (low == NULL_TREE || high == NULL_TREE
3910 || ! integer_zerop (low) || ! integer_zerop (high))
3911 return NULL_TREE;
3912 *p_in_p = ! in_p;
3913 return arg0;
3914
3915 case EQ_EXPR: case NE_EXPR:
3916 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3917 /* We can only do something if the range is testing for zero
3918 and if the second operand is an integer constant. Note that
3919 saying something is "in" the range we make is done by
3920 complementing IN_P since it will set in the initial case of
3921 being not equal to zero; "out" is leaving it alone. */
3922 if (low == NULL_TREE || high == NULL_TREE
3923 || ! integer_zerop (low) || ! integer_zerop (high)
3924 || TREE_CODE (arg1) != INTEGER_CST)
3925 return NULL_TREE;
3926
3927 switch (code)
3928 {
3929 case NE_EXPR: /* - [c, c] */
3930 low = high = arg1;
3931 break;
3932 case EQ_EXPR: /* + [c, c] */
3933 in_p = ! in_p, low = high = arg1;
3934 break;
3935 case GT_EXPR: /* - [-, c] */
3936 low = 0, high = arg1;
3937 break;
3938 case GE_EXPR: /* + [c, -] */
3939 in_p = ! in_p, low = arg1, high = 0;
3940 break;
3941 case LT_EXPR: /* - [c, -] */
3942 low = arg1, high = 0;
3943 break;
3944 case LE_EXPR: /* + [-, c] */
3945 in_p = ! in_p, low = 0, high = arg1;
3946 break;
3947 default:
3948 gcc_unreachable ();
3949 }
3950
3951 /* If this is an unsigned comparison, we also know that EXP is
3952 greater than or equal to zero. We base the range tests we make
3953 on that fact, so we record it here so we can parse existing
3954 range tests. We test arg0_type since often the return type
3955 of, e.g. EQ_EXPR, is boolean. */
3956 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3957 {
3958 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3959 in_p, low, high, 1,
3960 build_int_cst (arg0_type, 0),
3961 NULL_TREE))
3962 return NULL_TREE;
3963
3964 in_p = n_in_p, low = n_low, high = n_high;
3965
3966 /* If the high bound is missing, but we have a nonzero low
3967 bound, reverse the range so it goes from zero to the low bound
3968 minus 1. */
3969 if (high == 0 && low && ! integer_zerop (low))
3970 {
3971 in_p = ! in_p;
3972 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3973 build_int_cst (TREE_TYPE (low), 1), 0);
3974 low = build_int_cst (arg0_type, 0);
3975 }
3976 }
3977
3978 *p_low = low;
3979 *p_high = high;
3980 *p_in_p = in_p;
3981 return arg0;
3982
3983 case NEGATE_EXPR:
3984 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3985 low and high are non-NULL, then normalize will DTRT. */
3986 if (!TYPE_UNSIGNED (arg0_type)
3987 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3988 {
3989 if (low == NULL_TREE)
3990 low = TYPE_MIN_VALUE (arg0_type);
3991 if (high == NULL_TREE)
3992 high = TYPE_MAX_VALUE (arg0_type);
3993 }
3994
3995 /* (-x) IN [a,b] -> x in [-b, -a] */
3996 n_low = range_binop (MINUS_EXPR, exp_type,
3997 build_int_cst (exp_type, 0),
3998 0, high, 1);
3999 n_high = range_binop (MINUS_EXPR, exp_type,
4000 build_int_cst (exp_type, 0),
4001 0, low, 0);
4002 if (n_high != 0 && TREE_OVERFLOW (n_high))
4003 return NULL_TREE;
4004 goto normalize;
4005
4006 case BIT_NOT_EXPR:
4007 /* ~ X -> -X - 1 */
4008 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4009 build_int_cst (exp_type, 1));
4010
4011 case PLUS_EXPR:
4012 case MINUS_EXPR:
4013 if (TREE_CODE (arg1) != INTEGER_CST)
4014 return NULL_TREE;
4015
4016 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4017 move a constant to the other side. */
4018 if (!TYPE_UNSIGNED (arg0_type)
4019 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4020 return NULL_TREE;
4021
4022 /* If EXP is signed, any overflow in the computation is undefined,
4023 so we don't worry about it so long as our computations on
4024 the bounds don't overflow. For unsigned, overflow is defined
4025 and this is exactly the right thing. */
4026 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4027 arg0_type, low, 0, arg1, 0);
4028 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4029 arg0_type, high, 1, arg1, 0);
4030 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4031 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4032 return NULL_TREE;
4033
4034 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4035 *strict_overflow_p = true;
4036
4037 normalize:
4038 /* Check for an unsigned range which has wrapped around the maximum
4039 value thus making n_high < n_low, and normalize it. */
4040 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4041 {
4042 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4043 build_int_cst (TREE_TYPE (n_high), 1), 0);
4044 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4045 build_int_cst (TREE_TYPE (n_low), 1), 0);
4046
4047 /* If the range is of the form +/- [ x+1, x ], we won't
4048 be able to normalize it. But then, it represents the
4049 whole range or the empty set, so make it
4050 +/- [ -, - ]. */
4051 if (tree_int_cst_equal (n_low, low)
4052 && tree_int_cst_equal (n_high, high))
4053 low = high = 0;
4054 else
4055 in_p = ! in_p;
4056 }
4057 else
4058 low = n_low, high = n_high;
4059
4060 *p_low = low;
4061 *p_high = high;
4062 *p_in_p = in_p;
4063 return arg0;
4064
4065 CASE_CONVERT:
4066 case NON_LVALUE_EXPR:
4067 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4068 return NULL_TREE;
4069
4070 if (! INTEGRAL_TYPE_P (arg0_type)
4071 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4072 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4073 return NULL_TREE;
4074
4075 n_low = low, n_high = high;
4076
4077 if (n_low != 0)
4078 n_low = fold_convert_loc (loc, arg0_type, n_low);
4079
4080 if (n_high != 0)
4081 n_high = fold_convert_loc (loc, arg0_type, n_high);
4082
4083 /* If we're converting arg0 from an unsigned type, to exp,
4084 a signed type, we will be doing the comparison as unsigned.
4085 The tests above have already verified that LOW and HIGH
4086 are both positive.
4087
4088 So we have to ensure that we will handle large unsigned
4089 values the same way that the current signed bounds treat
4090 negative values. */
4091
4092 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4093 {
4094 tree high_positive;
4095 tree equiv_type;
4096 /* For fixed-point modes, we need to pass the saturating flag
4097 as the 2nd parameter. */
4098 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4099 equiv_type
4100 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4101 TYPE_SATURATING (arg0_type));
4102 else
4103 equiv_type
4104 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4105
4106 /* A range without an upper bound is, naturally, unbounded.
4107 Since convert would have cropped a very large value, use
4108 the max value for the destination type. */
4109 high_positive
4110 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4111 : TYPE_MAX_VALUE (arg0_type);
4112
4113 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4114 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4115 fold_convert_loc (loc, arg0_type,
4116 high_positive),
4117 build_int_cst (arg0_type, 1));
4118
4119 /* If the low bound is specified, "and" the range with the
4120 range for which the original unsigned value will be
4121 positive. */
4122 if (low != 0)
4123 {
4124 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4125 1, fold_convert_loc (loc, arg0_type,
4126 integer_zero_node),
4127 high_positive))
4128 return NULL_TREE;
4129
4130 in_p = (n_in_p == in_p);
4131 }
4132 else
4133 {
4134 /* Otherwise, "or" the range with the range of the input
4135 that will be interpreted as negative. */
4136 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4137 1, fold_convert_loc (loc, arg0_type,
4138 integer_zero_node),
4139 high_positive))
4140 return NULL_TREE;
4141
4142 in_p = (in_p != n_in_p);
4143 }
4144 }
4145
4146 *p_low = n_low;
4147 *p_high = n_high;
4148 *p_in_p = in_p;
4149 return arg0;
4150
4151 default:
4152 return NULL_TREE;
4153 }
4154 }
4155
4156 /* Given EXP, a logical expression, set the range it is testing into
4157 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4158 actually being tested. *PLOW and *PHIGH will be made of the same
4159 type as the returned expression. If EXP is not a comparison, we
4160 will most likely not be returning a useful value and range. Set
4161 *STRICT_OVERFLOW_P to true if the return value is only valid
4162 because signed overflow is undefined; otherwise, do not change
4163 *STRICT_OVERFLOW_P. */
4164
4165 tree
4166 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4167 bool *strict_overflow_p)
4168 {
4169 enum tree_code code;
4170 tree arg0, arg1 = NULL_TREE;
4171 tree exp_type, nexp;
4172 int in_p;
4173 tree low, high;
4174 location_t loc = EXPR_LOCATION (exp);
4175
4176 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4177 and see if we can refine the range. Some of the cases below may not
4178 happen, but it doesn't seem worth worrying about this. We "continue"
4179 the outer loop when we've changed something; otherwise we "break"
4180 the switch, which will "break" the while. */
4181
4182 in_p = 0;
4183 low = high = build_int_cst (TREE_TYPE (exp), 0);
4184
4185 while (1)
4186 {
4187 code = TREE_CODE (exp);
4188 exp_type = TREE_TYPE (exp);
4189 arg0 = NULL_TREE;
4190
4191 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4192 {
4193 if (TREE_OPERAND_LENGTH (exp) > 0)
4194 arg0 = TREE_OPERAND (exp, 0);
4195 if (TREE_CODE_CLASS (code) == tcc_binary
4196 || TREE_CODE_CLASS (code) == tcc_comparison
4197 || (TREE_CODE_CLASS (code) == tcc_expression
4198 && TREE_OPERAND_LENGTH (exp) > 1))
4199 arg1 = TREE_OPERAND (exp, 1);
4200 }
4201 if (arg0 == NULL_TREE)
4202 break;
4203
4204 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4205 &high, &in_p, strict_overflow_p);
4206 if (nexp == NULL_TREE)
4207 break;
4208 exp = nexp;
4209 }
4210
4211 /* If EXP is a constant, we can evaluate whether this is true or false. */
4212 if (TREE_CODE (exp) == INTEGER_CST)
4213 {
4214 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4215 exp, 0, low, 0))
4216 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4217 exp, 1, high, 1)));
4218 low = high = 0;
4219 exp = 0;
4220 }
4221
4222 *pin_p = in_p, *plow = low, *phigh = high;
4223 return exp;
4224 }
4225 \f
4226 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4227 type, TYPE, return an expression to test if EXP is in (or out of, depending
4228 on IN_P) the range. Return 0 if the test couldn't be created. */
4229
4230 tree
4231 build_range_check (location_t loc, tree type, tree exp, int in_p,
4232 tree low, tree high)
4233 {
4234 tree etype = TREE_TYPE (exp), value;
4235
4236 #ifdef HAVE_canonicalize_funcptr_for_compare
4237 /* Disable this optimization for function pointer expressions
4238 on targets that require function pointer canonicalization. */
4239 if (HAVE_canonicalize_funcptr_for_compare
4240 && TREE_CODE (etype) == POINTER_TYPE
4241 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4242 return NULL_TREE;
4243 #endif
4244
4245 if (! in_p)
4246 {
4247 value = build_range_check (loc, type, exp, 1, low, high);
4248 if (value != 0)
4249 return invert_truthvalue_loc (loc, value);
4250
4251 return 0;
4252 }
4253
4254 if (low == 0 && high == 0)
4255 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4256
4257 if (low == 0)
4258 return fold_build2_loc (loc, LE_EXPR, type, exp,
4259 fold_convert_loc (loc, etype, high));
4260
4261 if (high == 0)
4262 return fold_build2_loc (loc, GE_EXPR, type, exp,
4263 fold_convert_loc (loc, etype, low));
4264
4265 if (operand_equal_p (low, high, 0))
4266 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4267 fold_convert_loc (loc, etype, low));
4268
4269 if (integer_zerop (low))
4270 {
4271 if (! TYPE_UNSIGNED (etype))
4272 {
4273 etype = unsigned_type_for (etype);
4274 high = fold_convert_loc (loc, etype, high);
4275 exp = fold_convert_loc (loc, etype, exp);
4276 }
4277 return build_range_check (loc, type, exp, 1, 0, high);
4278 }
4279
4280 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4281 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4282 {
4283 int prec = TYPE_PRECISION (etype);
4284
4285 if (wi::mask (prec - 1, false, prec) == high)
4286 {
4287 if (TYPE_UNSIGNED (etype))
4288 {
4289 tree signed_etype = signed_type_for (etype);
4290 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4291 etype
4292 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4293 else
4294 etype = signed_etype;
4295 exp = fold_convert_loc (loc, etype, exp);
4296 }
4297 return fold_build2_loc (loc, GT_EXPR, type, exp,
4298 build_int_cst (etype, 0));
4299 }
4300 }
4301
4302 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4303 This requires wrap-around arithmetics for the type of the expression.
4304 First make sure that arithmetics in this type is valid, then make sure
4305 that it wraps around. */
4306 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4307 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4308 TYPE_UNSIGNED (etype));
4309
4310 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4311 {
4312 tree utype, minv, maxv;
4313
4314 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4315 for the type in question, as we rely on this here. */
4316 utype = unsigned_type_for (etype);
4317 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4318 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4319 build_int_cst (TREE_TYPE (maxv), 1), 1);
4320 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4321
4322 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4323 minv, 1, maxv, 1)))
4324 etype = utype;
4325 else
4326 return 0;
4327 }
4328
4329 high = fold_convert_loc (loc, etype, high);
4330 low = fold_convert_loc (loc, etype, low);
4331 exp = fold_convert_loc (loc, etype, exp);
4332
4333 value = const_binop (MINUS_EXPR, high, low);
4334
4335
4336 if (POINTER_TYPE_P (etype))
4337 {
4338 if (value != 0 && !TREE_OVERFLOW (value))
4339 {
4340 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4341 return build_range_check (loc, type,
4342 fold_build_pointer_plus_loc (loc, exp, low),
4343 1, build_int_cst (etype, 0), value);
4344 }
4345 return 0;
4346 }
4347
4348 if (value != 0 && !TREE_OVERFLOW (value))
4349 return build_range_check (loc, type,
4350 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4351 1, build_int_cst (etype, 0), value);
4352
4353 return 0;
4354 }
4355 \f
4356 /* Return the predecessor of VAL in its type, handling the infinite case. */
4357
4358 static tree
4359 range_predecessor (tree val)
4360 {
4361 tree type = TREE_TYPE (val);
4362
4363 if (INTEGRAL_TYPE_P (type)
4364 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4365 return 0;
4366 else
4367 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4368 build_int_cst (TREE_TYPE (val), 1), 0);
4369 }
4370
4371 /* Return the successor of VAL in its type, handling the infinite case. */
4372
4373 static tree
4374 range_successor (tree val)
4375 {
4376 tree type = TREE_TYPE (val);
4377
4378 if (INTEGRAL_TYPE_P (type)
4379 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4380 return 0;
4381 else
4382 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4383 build_int_cst (TREE_TYPE (val), 1), 0);
4384 }
4385
4386 /* Given two ranges, see if we can merge them into one. Return 1 if we
4387 can, 0 if we can't. Set the output range into the specified parameters. */
4388
4389 bool
4390 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4391 tree high0, int in1_p, tree low1, tree high1)
4392 {
4393 int no_overlap;
4394 int subset;
4395 int temp;
4396 tree tem;
4397 int in_p;
4398 tree low, high;
4399 int lowequal = ((low0 == 0 && low1 == 0)
4400 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4401 low0, 0, low1, 0)));
4402 int highequal = ((high0 == 0 && high1 == 0)
4403 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4404 high0, 1, high1, 1)));
4405
4406 /* Make range 0 be the range that starts first, or ends last if they
4407 start at the same value. Swap them if it isn't. */
4408 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4409 low0, 0, low1, 0))
4410 || (lowequal
4411 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4412 high1, 1, high0, 1))))
4413 {
4414 temp = in0_p, in0_p = in1_p, in1_p = temp;
4415 tem = low0, low0 = low1, low1 = tem;
4416 tem = high0, high0 = high1, high1 = tem;
4417 }
4418
4419 /* Now flag two cases, whether the ranges are disjoint or whether the
4420 second range is totally subsumed in the first. Note that the tests
4421 below are simplified by the ones above. */
4422 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4423 high0, 1, low1, 0));
4424 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4425 high1, 1, high0, 1));
4426
4427 /* We now have four cases, depending on whether we are including or
4428 excluding the two ranges. */
4429 if (in0_p && in1_p)
4430 {
4431 /* If they don't overlap, the result is false. If the second range
4432 is a subset it is the result. Otherwise, the range is from the start
4433 of the second to the end of the first. */
4434 if (no_overlap)
4435 in_p = 0, low = high = 0;
4436 else if (subset)
4437 in_p = 1, low = low1, high = high1;
4438 else
4439 in_p = 1, low = low1, high = high0;
4440 }
4441
4442 else if (in0_p && ! in1_p)
4443 {
4444 /* If they don't overlap, the result is the first range. If they are
4445 equal, the result is false. If the second range is a subset of the
4446 first, and the ranges begin at the same place, we go from just after
4447 the end of the second range to the end of the first. If the second
4448 range is not a subset of the first, or if it is a subset and both
4449 ranges end at the same place, the range starts at the start of the
4450 first range and ends just before the second range.
4451 Otherwise, we can't describe this as a single range. */
4452 if (no_overlap)
4453 in_p = 1, low = low0, high = high0;
4454 else if (lowequal && highequal)
4455 in_p = 0, low = high = 0;
4456 else if (subset && lowequal)
4457 {
4458 low = range_successor (high1);
4459 high = high0;
4460 in_p = 1;
4461 if (low == 0)
4462 {
4463 /* We are in the weird situation where high0 > high1 but
4464 high1 has no successor. Punt. */
4465 return 0;
4466 }
4467 }
4468 else if (! subset || highequal)
4469 {
4470 low = low0;
4471 high = range_predecessor (low1);
4472 in_p = 1;
4473 if (high == 0)
4474 {
4475 /* low0 < low1 but low1 has no predecessor. Punt. */
4476 return 0;
4477 }
4478 }
4479 else
4480 return 0;
4481 }
4482
4483 else if (! in0_p && in1_p)
4484 {
4485 /* If they don't overlap, the result is the second range. If the second
4486 is a subset of the first, the result is false. Otherwise,
4487 the range starts just after the first range and ends at the
4488 end of the second. */
4489 if (no_overlap)
4490 in_p = 1, low = low1, high = high1;
4491 else if (subset || highequal)
4492 in_p = 0, low = high = 0;
4493 else
4494 {
4495 low = range_successor (high0);
4496 high = high1;
4497 in_p = 1;
4498 if (low == 0)
4499 {
4500 /* high1 > high0 but high0 has no successor. Punt. */
4501 return 0;
4502 }
4503 }
4504 }
4505
4506 else
4507 {
4508 /* The case where we are excluding both ranges. Here the complex case
4509 is if they don't overlap. In that case, the only time we have a
4510 range is if they are adjacent. If the second is a subset of the
4511 first, the result is the first. Otherwise, the range to exclude
4512 starts at the beginning of the first range and ends at the end of the
4513 second. */
4514 if (no_overlap)
4515 {
4516 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4517 range_successor (high0),
4518 1, low1, 0)))
4519 in_p = 0, low = low0, high = high1;
4520 else
4521 {
4522 /* Canonicalize - [min, x] into - [-, x]. */
4523 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4524 switch (TREE_CODE (TREE_TYPE (low0)))
4525 {
4526 case ENUMERAL_TYPE:
4527 if (TYPE_PRECISION (TREE_TYPE (low0))
4528 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4529 break;
4530 /* FALLTHROUGH */
4531 case INTEGER_TYPE:
4532 if (tree_int_cst_equal (low0,
4533 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4534 low0 = 0;
4535 break;
4536 case POINTER_TYPE:
4537 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4538 && integer_zerop (low0))
4539 low0 = 0;
4540 break;
4541 default:
4542 break;
4543 }
4544
4545 /* Canonicalize - [x, max] into - [x, -]. */
4546 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4547 switch (TREE_CODE (TREE_TYPE (high1)))
4548 {
4549 case ENUMERAL_TYPE:
4550 if (TYPE_PRECISION (TREE_TYPE (high1))
4551 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4552 break;
4553 /* FALLTHROUGH */
4554 case INTEGER_TYPE:
4555 if (tree_int_cst_equal (high1,
4556 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4557 high1 = 0;
4558 break;
4559 case POINTER_TYPE:
4560 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4561 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4562 high1, 1,
4563 build_int_cst (TREE_TYPE (high1), 1),
4564 1)))
4565 high1 = 0;
4566 break;
4567 default:
4568 break;
4569 }
4570
4571 /* The ranges might be also adjacent between the maximum and
4572 minimum values of the given type. For
4573 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4574 return + [x + 1, y - 1]. */
4575 if (low0 == 0 && high1 == 0)
4576 {
4577 low = range_successor (high0);
4578 high = range_predecessor (low1);
4579 if (low == 0 || high == 0)
4580 return 0;
4581
4582 in_p = 1;
4583 }
4584 else
4585 return 0;
4586 }
4587 }
4588 else if (subset)
4589 in_p = 0, low = low0, high = high0;
4590 else
4591 in_p = 0, low = low0, high = high1;
4592 }
4593
4594 *pin_p = in_p, *plow = low, *phigh = high;
4595 return 1;
4596 }
4597 \f
4598
4599 /* Subroutine of fold, looking inside expressions of the form
4600 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4601 of the COND_EXPR. This function is being used also to optimize
4602 A op B ? C : A, by reversing the comparison first.
4603
4604 Return a folded expression whose code is not a COND_EXPR
4605 anymore, or NULL_TREE if no folding opportunity is found. */
4606
4607 static tree
4608 fold_cond_expr_with_comparison (location_t loc, tree type,
4609 tree arg0, tree arg1, tree arg2)
4610 {
4611 enum tree_code comp_code = TREE_CODE (arg0);
4612 tree arg00 = TREE_OPERAND (arg0, 0);
4613 tree arg01 = TREE_OPERAND (arg0, 1);
4614 tree arg1_type = TREE_TYPE (arg1);
4615 tree tem;
4616
4617 STRIP_NOPS (arg1);
4618 STRIP_NOPS (arg2);
4619
4620 /* If we have A op 0 ? A : -A, consider applying the following
4621 transformations:
4622
4623 A == 0? A : -A same as -A
4624 A != 0? A : -A same as A
4625 A >= 0? A : -A same as abs (A)
4626 A > 0? A : -A same as abs (A)
4627 A <= 0? A : -A same as -abs (A)
4628 A < 0? A : -A same as -abs (A)
4629
4630 None of these transformations work for modes with signed
4631 zeros. If A is +/-0, the first two transformations will
4632 change the sign of the result (from +0 to -0, or vice
4633 versa). The last four will fix the sign of the result,
4634 even though the original expressions could be positive or
4635 negative, depending on the sign of A.
4636
4637 Note that all these transformations are correct if A is
4638 NaN, since the two alternatives (A and -A) are also NaNs. */
4639 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4640 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4641 ? real_zerop (arg01)
4642 : integer_zerop (arg01))
4643 && ((TREE_CODE (arg2) == NEGATE_EXPR
4644 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4645 /* In the case that A is of the form X-Y, '-A' (arg2) may
4646 have already been folded to Y-X, check for that. */
4647 || (TREE_CODE (arg1) == MINUS_EXPR
4648 && TREE_CODE (arg2) == MINUS_EXPR
4649 && operand_equal_p (TREE_OPERAND (arg1, 0),
4650 TREE_OPERAND (arg2, 1), 0)
4651 && operand_equal_p (TREE_OPERAND (arg1, 1),
4652 TREE_OPERAND (arg2, 0), 0))))
4653 switch (comp_code)
4654 {
4655 case EQ_EXPR:
4656 case UNEQ_EXPR:
4657 tem = fold_convert_loc (loc, arg1_type, arg1);
4658 return pedantic_non_lvalue_loc (loc,
4659 fold_convert_loc (loc, type,
4660 negate_expr (tem)));
4661 case NE_EXPR:
4662 case LTGT_EXPR:
4663 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4664 case UNGE_EXPR:
4665 case UNGT_EXPR:
4666 if (flag_trapping_math)
4667 break;
4668 /* Fall through. */
4669 case GE_EXPR:
4670 case GT_EXPR:
4671 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4672 arg1 = fold_convert_loc (loc, signed_type_for
4673 (TREE_TYPE (arg1)), arg1);
4674 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4675 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4676 case UNLE_EXPR:
4677 case UNLT_EXPR:
4678 if (flag_trapping_math)
4679 break;
4680 case LE_EXPR:
4681 case LT_EXPR:
4682 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4683 arg1 = fold_convert_loc (loc, signed_type_for
4684 (TREE_TYPE (arg1)), arg1);
4685 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4686 return negate_expr (fold_convert_loc (loc, type, tem));
4687 default:
4688 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4689 break;
4690 }
4691
4692 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4693 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4694 both transformations are correct when A is NaN: A != 0
4695 is then true, and A == 0 is false. */
4696
4697 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4698 && integer_zerop (arg01) && integer_zerop (arg2))
4699 {
4700 if (comp_code == NE_EXPR)
4701 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4702 else if (comp_code == EQ_EXPR)
4703 return build_zero_cst (type);
4704 }
4705
4706 /* Try some transformations of A op B ? A : B.
4707
4708 A == B? A : B same as B
4709 A != B? A : B same as A
4710 A >= B? A : B same as max (A, B)
4711 A > B? A : B same as max (B, A)
4712 A <= B? A : B same as min (A, B)
4713 A < B? A : B same as min (B, A)
4714
4715 As above, these transformations don't work in the presence
4716 of signed zeros. For example, if A and B are zeros of
4717 opposite sign, the first two transformations will change
4718 the sign of the result. In the last four, the original
4719 expressions give different results for (A=+0, B=-0) and
4720 (A=-0, B=+0), but the transformed expressions do not.
4721
4722 The first two transformations are correct if either A or B
4723 is a NaN. In the first transformation, the condition will
4724 be false, and B will indeed be chosen. In the case of the
4725 second transformation, the condition A != B will be true,
4726 and A will be chosen.
4727
4728 The conversions to max() and min() are not correct if B is
4729 a number and A is not. The conditions in the original
4730 expressions will be false, so all four give B. The min()
4731 and max() versions would give a NaN instead. */
4732 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4733 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4734 /* Avoid these transformations if the COND_EXPR may be used
4735 as an lvalue in the C++ front-end. PR c++/19199. */
4736 && (in_gimple_form
4737 || VECTOR_TYPE_P (type)
4738 || (strcmp (lang_hooks.name, "GNU C++") != 0
4739 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4740 || ! maybe_lvalue_p (arg1)
4741 || ! maybe_lvalue_p (arg2)))
4742 {
4743 tree comp_op0 = arg00;
4744 tree comp_op1 = arg01;
4745 tree comp_type = TREE_TYPE (comp_op0);
4746
4747 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4748 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4749 {
4750 comp_type = type;
4751 comp_op0 = arg1;
4752 comp_op1 = arg2;
4753 }
4754
4755 switch (comp_code)
4756 {
4757 case EQ_EXPR:
4758 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4759 case NE_EXPR:
4760 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4761 case LE_EXPR:
4762 case LT_EXPR:
4763 case UNLE_EXPR:
4764 case UNLT_EXPR:
4765 /* In C++ a ?: expression can be an lvalue, so put the
4766 operand which will be used if they are equal first
4767 so that we can convert this back to the
4768 corresponding COND_EXPR. */
4769 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4770 {
4771 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4772 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4773 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4774 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4775 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4776 comp_op1, comp_op0);
4777 return pedantic_non_lvalue_loc (loc,
4778 fold_convert_loc (loc, type, tem));
4779 }
4780 break;
4781 case GE_EXPR:
4782 case GT_EXPR:
4783 case UNGE_EXPR:
4784 case UNGT_EXPR:
4785 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4786 {
4787 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4788 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4789 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4790 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4791 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4792 comp_op1, comp_op0);
4793 return pedantic_non_lvalue_loc (loc,
4794 fold_convert_loc (loc, type, tem));
4795 }
4796 break;
4797 case UNEQ_EXPR:
4798 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4799 return pedantic_non_lvalue_loc (loc,
4800 fold_convert_loc (loc, type, arg2));
4801 break;
4802 case LTGT_EXPR:
4803 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4804 return pedantic_non_lvalue_loc (loc,
4805 fold_convert_loc (loc, type, arg1));
4806 break;
4807 default:
4808 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4809 break;
4810 }
4811 }
4812
4813 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4814 we might still be able to simplify this. For example,
4815 if C1 is one less or one more than C2, this might have started
4816 out as a MIN or MAX and been transformed by this function.
4817 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4818
4819 if (INTEGRAL_TYPE_P (type)
4820 && TREE_CODE (arg01) == INTEGER_CST
4821 && TREE_CODE (arg2) == INTEGER_CST)
4822 switch (comp_code)
4823 {
4824 case EQ_EXPR:
4825 if (TREE_CODE (arg1) == INTEGER_CST)
4826 break;
4827 /* We can replace A with C1 in this case. */
4828 arg1 = fold_convert_loc (loc, type, arg01);
4829 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4830
4831 case LT_EXPR:
4832 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4833 MIN_EXPR, to preserve the signedness of the comparison. */
4834 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4835 OEP_ONLY_CONST)
4836 && operand_equal_p (arg01,
4837 const_binop (PLUS_EXPR, arg2,
4838 build_int_cst (type, 1)),
4839 OEP_ONLY_CONST))
4840 {
4841 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4842 fold_convert_loc (loc, TREE_TYPE (arg00),
4843 arg2));
4844 return pedantic_non_lvalue_loc (loc,
4845 fold_convert_loc (loc, type, tem));
4846 }
4847 break;
4848
4849 case LE_EXPR:
4850 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4851 as above. */
4852 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4853 OEP_ONLY_CONST)
4854 && operand_equal_p (arg01,
4855 const_binop (MINUS_EXPR, arg2,
4856 build_int_cst (type, 1)),
4857 OEP_ONLY_CONST))
4858 {
4859 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4860 fold_convert_loc (loc, TREE_TYPE (arg00),
4861 arg2));
4862 return pedantic_non_lvalue_loc (loc,
4863 fold_convert_loc (loc, type, tem));
4864 }
4865 break;
4866
4867 case GT_EXPR:
4868 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4869 MAX_EXPR, to preserve the signedness of the comparison. */
4870 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4871 OEP_ONLY_CONST)
4872 && operand_equal_p (arg01,
4873 const_binop (MINUS_EXPR, arg2,
4874 build_int_cst (type, 1)),
4875 OEP_ONLY_CONST))
4876 {
4877 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4878 fold_convert_loc (loc, TREE_TYPE (arg00),
4879 arg2));
4880 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4881 }
4882 break;
4883
4884 case GE_EXPR:
4885 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4886 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4887 OEP_ONLY_CONST)
4888 && operand_equal_p (arg01,
4889 const_binop (PLUS_EXPR, arg2,
4890 build_int_cst (type, 1)),
4891 OEP_ONLY_CONST))
4892 {
4893 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4894 fold_convert_loc (loc, TREE_TYPE (arg00),
4895 arg2));
4896 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4897 }
4898 break;
4899 case NE_EXPR:
4900 break;
4901 default:
4902 gcc_unreachable ();
4903 }
4904
4905 return NULL_TREE;
4906 }
4907
4908
4909 \f
4910 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4911 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4912 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4913 false) >= 2)
4914 #endif
4915
4916 /* EXP is some logical combination of boolean tests. See if we can
4917 merge it into some range test. Return the new tree if so. */
4918
4919 static tree
4920 fold_range_test (location_t loc, enum tree_code code, tree type,
4921 tree op0, tree op1)
4922 {
4923 int or_op = (code == TRUTH_ORIF_EXPR
4924 || code == TRUTH_OR_EXPR);
4925 int in0_p, in1_p, in_p;
4926 tree low0, low1, low, high0, high1, high;
4927 bool strict_overflow_p = false;
4928 tree tem, lhs, rhs;
4929 const char * const warnmsg = G_("assuming signed overflow does not occur "
4930 "when simplifying range test");
4931
4932 if (!INTEGRAL_TYPE_P (type))
4933 return 0;
4934
4935 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4936 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4937
4938 /* If this is an OR operation, invert both sides; we will invert
4939 again at the end. */
4940 if (or_op)
4941 in0_p = ! in0_p, in1_p = ! in1_p;
4942
4943 /* If both expressions are the same, if we can merge the ranges, and we
4944 can build the range test, return it or it inverted. If one of the
4945 ranges is always true or always false, consider it to be the same
4946 expression as the other. */
4947 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4948 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4949 in1_p, low1, high1)
4950 && 0 != (tem = (build_range_check (loc, type,
4951 lhs != 0 ? lhs
4952 : rhs != 0 ? rhs : integer_zero_node,
4953 in_p, low, high))))
4954 {
4955 if (strict_overflow_p)
4956 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4957 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4958 }
4959
4960 /* On machines where the branch cost is expensive, if this is a
4961 short-circuited branch and the underlying object on both sides
4962 is the same, make a non-short-circuit operation. */
4963 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4964 && lhs != 0 && rhs != 0
4965 && (code == TRUTH_ANDIF_EXPR
4966 || code == TRUTH_ORIF_EXPR)
4967 && operand_equal_p (lhs, rhs, 0))
4968 {
4969 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4970 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4971 which cases we can't do this. */
4972 if (simple_operand_p (lhs))
4973 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4974 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4975 type, op0, op1);
4976
4977 else if (!lang_hooks.decls.global_bindings_p ()
4978 && !CONTAINS_PLACEHOLDER_P (lhs))
4979 {
4980 tree common = save_expr (lhs);
4981
4982 if (0 != (lhs = build_range_check (loc, type, common,
4983 or_op ? ! in0_p : in0_p,
4984 low0, high0))
4985 && (0 != (rhs = build_range_check (loc, type, common,
4986 or_op ? ! in1_p : in1_p,
4987 low1, high1))))
4988 {
4989 if (strict_overflow_p)
4990 fold_overflow_warning (warnmsg,
4991 WARN_STRICT_OVERFLOW_COMPARISON);
4992 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4994 type, lhs, rhs);
4995 }
4996 }
4997 }
4998
4999 return 0;
5000 }
5001 \f
5002 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5003 bit value. Arrange things so the extra bits will be set to zero if and
5004 only if C is signed-extended to its full width. If MASK is nonzero,
5005 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5006
5007 static tree
5008 unextend (tree c, int p, int unsignedp, tree mask)
5009 {
5010 tree type = TREE_TYPE (c);
5011 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5012 tree temp;
5013
5014 if (p == modesize || unsignedp)
5015 return c;
5016
5017 /* We work by getting just the sign bit into the low-order bit, then
5018 into the high-order bit, then sign-extend. We then XOR that value
5019 with C. */
5020 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5021
5022 /* We must use a signed type in order to get an arithmetic right shift.
5023 However, we must also avoid introducing accidental overflows, so that
5024 a subsequent call to integer_zerop will work. Hence we must
5025 do the type conversion here. At this point, the constant is either
5026 zero or one, and the conversion to a signed type can never overflow.
5027 We could get an overflow if this conversion is done anywhere else. */
5028 if (TYPE_UNSIGNED (type))
5029 temp = fold_convert (signed_type_for (type), temp);
5030
5031 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5032 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5033 if (mask != 0)
5034 temp = const_binop (BIT_AND_EXPR, temp,
5035 fold_convert (TREE_TYPE (c), mask));
5036 /* If necessary, convert the type back to match the type of C. */
5037 if (TYPE_UNSIGNED (type))
5038 temp = fold_convert (type, temp);
5039
5040 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5041 }
5042 \f
5043 /* For an expression that has the form
5044 (A && B) || ~B
5045 or
5046 (A || B) && ~B,
5047 we can drop one of the inner expressions and simplify to
5048 A || ~B
5049 or
5050 A && ~B
5051 LOC is the location of the resulting expression. OP is the inner
5052 logical operation; the left-hand side in the examples above, while CMPOP
5053 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5054 removing a condition that guards another, as in
5055 (A != NULL && A->...) || A == NULL
5056 which we must not transform. If RHS_ONLY is true, only eliminate the
5057 right-most operand of the inner logical operation. */
5058
5059 static tree
5060 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5061 bool rhs_only)
5062 {
5063 tree type = TREE_TYPE (cmpop);
5064 enum tree_code code = TREE_CODE (cmpop);
5065 enum tree_code truthop_code = TREE_CODE (op);
5066 tree lhs = TREE_OPERAND (op, 0);
5067 tree rhs = TREE_OPERAND (op, 1);
5068 tree orig_lhs = lhs, orig_rhs = rhs;
5069 enum tree_code rhs_code = TREE_CODE (rhs);
5070 enum tree_code lhs_code = TREE_CODE (lhs);
5071 enum tree_code inv_code;
5072
5073 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5074 return NULL_TREE;
5075
5076 if (TREE_CODE_CLASS (code) != tcc_comparison)
5077 return NULL_TREE;
5078
5079 if (rhs_code == truthop_code)
5080 {
5081 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5082 if (newrhs != NULL_TREE)
5083 {
5084 rhs = newrhs;
5085 rhs_code = TREE_CODE (rhs);
5086 }
5087 }
5088 if (lhs_code == truthop_code && !rhs_only)
5089 {
5090 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5091 if (newlhs != NULL_TREE)
5092 {
5093 lhs = newlhs;
5094 lhs_code = TREE_CODE (lhs);
5095 }
5096 }
5097
5098 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5099 if (inv_code == rhs_code
5100 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5101 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5102 return lhs;
5103 if (!rhs_only && inv_code == lhs_code
5104 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5105 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5106 return rhs;
5107 if (rhs != orig_rhs || lhs != orig_lhs)
5108 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5109 lhs, rhs);
5110 return NULL_TREE;
5111 }
5112
5113 /* Find ways of folding logical expressions of LHS and RHS:
5114 Try to merge two comparisons to the same innermost item.
5115 Look for range tests like "ch >= '0' && ch <= '9'".
5116 Look for combinations of simple terms on machines with expensive branches
5117 and evaluate the RHS unconditionally.
5118
5119 For example, if we have p->a == 2 && p->b == 4 and we can make an
5120 object large enough to span both A and B, we can do this with a comparison
5121 against the object ANDed with the a mask.
5122
5123 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5124 operations to do this with one comparison.
5125
5126 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5127 function and the one above.
5128
5129 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5130 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5131
5132 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5133 two operands.
5134
5135 We return the simplified tree or 0 if no optimization is possible. */
5136
5137 static tree
5138 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5139 tree lhs, tree rhs)
5140 {
5141 /* If this is the "or" of two comparisons, we can do something if
5142 the comparisons are NE_EXPR. If this is the "and", we can do something
5143 if the comparisons are EQ_EXPR. I.e.,
5144 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5145
5146 WANTED_CODE is this operation code. For single bit fields, we can
5147 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5148 comparison for one-bit fields. */
5149
5150 enum tree_code wanted_code;
5151 enum tree_code lcode, rcode;
5152 tree ll_arg, lr_arg, rl_arg, rr_arg;
5153 tree ll_inner, lr_inner, rl_inner, rr_inner;
5154 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5155 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5156 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5157 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5158 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5159 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5160 machine_mode lnmode, rnmode;
5161 tree ll_mask, lr_mask, rl_mask, rr_mask;
5162 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5163 tree l_const, r_const;
5164 tree lntype, rntype, result;
5165 HOST_WIDE_INT first_bit, end_bit;
5166 int volatilep;
5167
5168 /* Start by getting the comparison codes. Fail if anything is volatile.
5169 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5170 it were surrounded with a NE_EXPR. */
5171
5172 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5173 return 0;
5174
5175 lcode = TREE_CODE (lhs);
5176 rcode = TREE_CODE (rhs);
5177
5178 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5179 {
5180 lhs = build2 (NE_EXPR, truth_type, lhs,
5181 build_int_cst (TREE_TYPE (lhs), 0));
5182 lcode = NE_EXPR;
5183 }
5184
5185 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5186 {
5187 rhs = build2 (NE_EXPR, truth_type, rhs,
5188 build_int_cst (TREE_TYPE (rhs), 0));
5189 rcode = NE_EXPR;
5190 }
5191
5192 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5193 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5194 return 0;
5195
5196 ll_arg = TREE_OPERAND (lhs, 0);
5197 lr_arg = TREE_OPERAND (lhs, 1);
5198 rl_arg = TREE_OPERAND (rhs, 0);
5199 rr_arg = TREE_OPERAND (rhs, 1);
5200
5201 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5202 if (simple_operand_p (ll_arg)
5203 && simple_operand_p (lr_arg))
5204 {
5205 if (operand_equal_p (ll_arg, rl_arg, 0)
5206 && operand_equal_p (lr_arg, rr_arg, 0))
5207 {
5208 result = combine_comparisons (loc, code, lcode, rcode,
5209 truth_type, ll_arg, lr_arg);
5210 if (result)
5211 return result;
5212 }
5213 else if (operand_equal_p (ll_arg, rr_arg, 0)
5214 && operand_equal_p (lr_arg, rl_arg, 0))
5215 {
5216 result = combine_comparisons (loc, code, lcode,
5217 swap_tree_comparison (rcode),
5218 truth_type, ll_arg, lr_arg);
5219 if (result)
5220 return result;
5221 }
5222 }
5223
5224 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5225 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5226
5227 /* If the RHS can be evaluated unconditionally and its operands are
5228 simple, it wins to evaluate the RHS unconditionally on machines
5229 with expensive branches. In this case, this isn't a comparison
5230 that can be merged. */
5231
5232 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5233 false) >= 2
5234 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5235 && simple_operand_p (rl_arg)
5236 && simple_operand_p (rr_arg))
5237 {
5238 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5239 if (code == TRUTH_OR_EXPR
5240 && lcode == NE_EXPR && integer_zerop (lr_arg)
5241 && rcode == NE_EXPR && integer_zerop (rr_arg)
5242 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5244 return build2_loc (loc, NE_EXPR, truth_type,
5245 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5246 ll_arg, rl_arg),
5247 build_int_cst (TREE_TYPE (ll_arg), 0));
5248
5249 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5250 if (code == TRUTH_AND_EXPR
5251 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5252 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5253 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5254 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5255 return build2_loc (loc, EQ_EXPR, truth_type,
5256 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5257 ll_arg, rl_arg),
5258 build_int_cst (TREE_TYPE (ll_arg), 0));
5259 }
5260
5261 /* See if the comparisons can be merged. Then get all the parameters for
5262 each side. */
5263
5264 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5265 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5266 return 0;
5267
5268 volatilep = 0;
5269 ll_inner = decode_field_reference (loc, ll_arg,
5270 &ll_bitsize, &ll_bitpos, &ll_mode,
5271 &ll_unsignedp, &volatilep, &ll_mask,
5272 &ll_and_mask);
5273 lr_inner = decode_field_reference (loc, lr_arg,
5274 &lr_bitsize, &lr_bitpos, &lr_mode,
5275 &lr_unsignedp, &volatilep, &lr_mask,
5276 &lr_and_mask);
5277 rl_inner = decode_field_reference (loc, rl_arg,
5278 &rl_bitsize, &rl_bitpos, &rl_mode,
5279 &rl_unsignedp, &volatilep, &rl_mask,
5280 &rl_and_mask);
5281 rr_inner = decode_field_reference (loc, rr_arg,
5282 &rr_bitsize, &rr_bitpos, &rr_mode,
5283 &rr_unsignedp, &volatilep, &rr_mask,
5284 &rr_and_mask);
5285
5286 /* It must be true that the inner operation on the lhs of each
5287 comparison must be the same if we are to be able to do anything.
5288 Then see if we have constants. If not, the same must be true for
5289 the rhs's. */
5290 if (volatilep || ll_inner == 0 || rl_inner == 0
5291 || ! operand_equal_p (ll_inner, rl_inner, 0))
5292 return 0;
5293
5294 if (TREE_CODE (lr_arg) == INTEGER_CST
5295 && TREE_CODE (rr_arg) == INTEGER_CST)
5296 l_const = lr_arg, r_const = rr_arg;
5297 else if (lr_inner == 0 || rr_inner == 0
5298 || ! operand_equal_p (lr_inner, rr_inner, 0))
5299 return 0;
5300 else
5301 l_const = r_const = 0;
5302
5303 /* If either comparison code is not correct for our logical operation,
5304 fail. However, we can convert a one-bit comparison against zero into
5305 the opposite comparison against that bit being set in the field. */
5306
5307 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5308 if (lcode != wanted_code)
5309 {
5310 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5311 {
5312 /* Make the left operand unsigned, since we are only interested
5313 in the value of one bit. Otherwise we are doing the wrong
5314 thing below. */
5315 ll_unsignedp = 1;
5316 l_const = ll_mask;
5317 }
5318 else
5319 return 0;
5320 }
5321
5322 /* This is analogous to the code for l_const above. */
5323 if (rcode != wanted_code)
5324 {
5325 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5326 {
5327 rl_unsignedp = 1;
5328 r_const = rl_mask;
5329 }
5330 else
5331 return 0;
5332 }
5333
5334 /* See if we can find a mode that contains both fields being compared on
5335 the left. If we can't, fail. Otherwise, update all constants and masks
5336 to be relative to a field of that size. */
5337 first_bit = MIN (ll_bitpos, rl_bitpos);
5338 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5339 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5340 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5341 volatilep);
5342 if (lnmode == VOIDmode)
5343 return 0;
5344
5345 lnbitsize = GET_MODE_BITSIZE (lnmode);
5346 lnbitpos = first_bit & ~ (lnbitsize - 1);
5347 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5348 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5349
5350 if (BYTES_BIG_ENDIAN)
5351 {
5352 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5353 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5354 }
5355
5356 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5357 size_int (xll_bitpos));
5358 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5359 size_int (xrl_bitpos));
5360
5361 if (l_const)
5362 {
5363 l_const = fold_convert_loc (loc, lntype, l_const);
5364 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5365 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5366 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5367 fold_build1_loc (loc, BIT_NOT_EXPR,
5368 lntype, ll_mask))))
5369 {
5370 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5371
5372 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5373 }
5374 }
5375 if (r_const)
5376 {
5377 r_const = fold_convert_loc (loc, lntype, r_const);
5378 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5379 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5380 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5381 fold_build1_loc (loc, BIT_NOT_EXPR,
5382 lntype, rl_mask))))
5383 {
5384 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5385
5386 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5387 }
5388 }
5389
5390 /* If the right sides are not constant, do the same for it. Also,
5391 disallow this optimization if a size or signedness mismatch occurs
5392 between the left and right sides. */
5393 if (l_const == 0)
5394 {
5395 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5396 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5397 /* Make sure the two fields on the right
5398 correspond to the left without being swapped. */
5399 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5400 return 0;
5401
5402 first_bit = MIN (lr_bitpos, rr_bitpos);
5403 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5404 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5405 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5406 volatilep);
5407 if (rnmode == VOIDmode)
5408 return 0;
5409
5410 rnbitsize = GET_MODE_BITSIZE (rnmode);
5411 rnbitpos = first_bit & ~ (rnbitsize - 1);
5412 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5413 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5414
5415 if (BYTES_BIG_ENDIAN)
5416 {
5417 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5418 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5419 }
5420
5421 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5422 rntype, lr_mask),
5423 size_int (xlr_bitpos));
5424 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5425 rntype, rr_mask),
5426 size_int (xrr_bitpos));
5427
5428 /* Make a mask that corresponds to both fields being compared.
5429 Do this for both items being compared. If the operands are the
5430 same size and the bits being compared are in the same position
5431 then we can do this by masking both and comparing the masked
5432 results. */
5433 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5434 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5435 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5436 {
5437 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5438 ll_unsignedp || rl_unsignedp);
5439 if (! all_ones_mask_p (ll_mask, lnbitsize))
5440 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5441
5442 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5443 lr_unsignedp || rr_unsignedp);
5444 if (! all_ones_mask_p (lr_mask, rnbitsize))
5445 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5446
5447 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5448 }
5449
5450 /* There is still another way we can do something: If both pairs of
5451 fields being compared are adjacent, we may be able to make a wider
5452 field containing them both.
5453
5454 Note that we still must mask the lhs/rhs expressions. Furthermore,
5455 the mask must be shifted to account for the shift done by
5456 make_bit_field_ref. */
5457 if ((ll_bitsize + ll_bitpos == rl_bitpos
5458 && lr_bitsize + lr_bitpos == rr_bitpos)
5459 || (ll_bitpos == rl_bitpos + rl_bitsize
5460 && lr_bitpos == rr_bitpos + rr_bitsize))
5461 {
5462 tree type;
5463
5464 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5465 ll_bitsize + rl_bitsize,
5466 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5467 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5468 lr_bitsize + rr_bitsize,
5469 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5470
5471 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5472 size_int (MIN (xll_bitpos, xrl_bitpos)));
5473 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5474 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5475
5476 /* Convert to the smaller type before masking out unwanted bits. */
5477 type = lntype;
5478 if (lntype != rntype)
5479 {
5480 if (lnbitsize > rnbitsize)
5481 {
5482 lhs = fold_convert_loc (loc, rntype, lhs);
5483 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5484 type = rntype;
5485 }
5486 else if (lnbitsize < rnbitsize)
5487 {
5488 rhs = fold_convert_loc (loc, lntype, rhs);
5489 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5490 type = lntype;
5491 }
5492 }
5493
5494 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5495 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5496
5497 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5498 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5499
5500 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5501 }
5502
5503 return 0;
5504 }
5505
5506 /* Handle the case of comparisons with constants. If there is something in
5507 common between the masks, those bits of the constants must be the same.
5508 If not, the condition is always false. Test for this to avoid generating
5509 incorrect code below. */
5510 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5511 if (! integer_zerop (result)
5512 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5513 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5514 {
5515 if (wanted_code == NE_EXPR)
5516 {
5517 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5518 return constant_boolean_node (true, truth_type);
5519 }
5520 else
5521 {
5522 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5523 return constant_boolean_node (false, truth_type);
5524 }
5525 }
5526
5527 /* Construct the expression we will return. First get the component
5528 reference we will make. Unless the mask is all ones the width of
5529 that field, perform the mask operation. Then compare with the
5530 merged constant. */
5531 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5532 ll_unsignedp || rl_unsignedp);
5533
5534 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5535 if (! all_ones_mask_p (ll_mask, lnbitsize))
5536 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5537
5538 return build2_loc (loc, wanted_code, truth_type, result,
5539 const_binop (BIT_IOR_EXPR, l_const, r_const));
5540 }
5541 \f
5542 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5543 constant. */
5544
5545 static tree
5546 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5547 tree op0, tree op1)
5548 {
5549 tree arg0 = op0;
5550 enum tree_code op_code;
5551 tree comp_const;
5552 tree minmax_const;
5553 int consts_equal, consts_lt;
5554 tree inner;
5555
5556 STRIP_SIGN_NOPS (arg0);
5557
5558 op_code = TREE_CODE (arg0);
5559 minmax_const = TREE_OPERAND (arg0, 1);
5560 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5561 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5562 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5563 inner = TREE_OPERAND (arg0, 0);
5564
5565 /* If something does not permit us to optimize, return the original tree. */
5566 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5567 || TREE_CODE (comp_const) != INTEGER_CST
5568 || TREE_OVERFLOW (comp_const)
5569 || TREE_CODE (minmax_const) != INTEGER_CST
5570 || TREE_OVERFLOW (minmax_const))
5571 return NULL_TREE;
5572
5573 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5574 and GT_EXPR, doing the rest with recursive calls using logical
5575 simplifications. */
5576 switch (code)
5577 {
5578 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5579 {
5580 tree tem
5581 = optimize_minmax_comparison (loc,
5582 invert_tree_comparison (code, false),
5583 type, op0, op1);
5584 if (tem)
5585 return invert_truthvalue_loc (loc, tem);
5586 return NULL_TREE;
5587 }
5588
5589 case GE_EXPR:
5590 return
5591 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5592 optimize_minmax_comparison
5593 (loc, EQ_EXPR, type, arg0, comp_const),
5594 optimize_minmax_comparison
5595 (loc, GT_EXPR, type, arg0, comp_const));
5596
5597 case EQ_EXPR:
5598 if (op_code == MAX_EXPR && consts_equal)
5599 /* MAX (X, 0) == 0 -> X <= 0 */
5600 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5601
5602 else if (op_code == MAX_EXPR && consts_lt)
5603 /* MAX (X, 0) == 5 -> X == 5 */
5604 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5605
5606 else if (op_code == MAX_EXPR)
5607 /* MAX (X, 0) == -1 -> false */
5608 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5609
5610 else if (consts_equal)
5611 /* MIN (X, 0) == 0 -> X >= 0 */
5612 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5613
5614 else if (consts_lt)
5615 /* MIN (X, 0) == 5 -> false */
5616 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5617
5618 else
5619 /* MIN (X, 0) == -1 -> X == -1 */
5620 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5621
5622 case GT_EXPR:
5623 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5624 /* MAX (X, 0) > 0 -> X > 0
5625 MAX (X, 0) > 5 -> X > 5 */
5626 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5627
5628 else if (op_code == MAX_EXPR)
5629 /* MAX (X, 0) > -1 -> true */
5630 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5631
5632 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5633 /* MIN (X, 0) > 0 -> false
5634 MIN (X, 0) > 5 -> false */
5635 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5636
5637 else
5638 /* MIN (X, 0) > -1 -> X > -1 */
5639 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5640
5641 default:
5642 return NULL_TREE;
5643 }
5644 }
5645 \f
5646 /* T is an integer expression that is being multiplied, divided, or taken a
5647 modulus (CODE says which and what kind of divide or modulus) by a
5648 constant C. See if we can eliminate that operation by folding it with
5649 other operations already in T. WIDE_TYPE, if non-null, is a type that
5650 should be used for the computation if wider than our type.
5651
5652 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5653 (X * 2) + (Y * 4). We must, however, be assured that either the original
5654 expression would not overflow or that overflow is undefined for the type
5655 in the language in question.
5656
5657 If we return a non-null expression, it is an equivalent form of the
5658 original computation, but need not be in the original type.
5659
5660 We set *STRICT_OVERFLOW_P to true if the return values depends on
5661 signed overflow being undefined. Otherwise we do not change
5662 *STRICT_OVERFLOW_P. */
5663
5664 static tree
5665 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5666 bool *strict_overflow_p)
5667 {
5668 /* To avoid exponential search depth, refuse to allow recursion past
5669 three levels. Beyond that (1) it's highly unlikely that we'll find
5670 something interesting and (2) we've probably processed it before
5671 when we built the inner expression. */
5672
5673 static int depth;
5674 tree ret;
5675
5676 if (depth > 3)
5677 return NULL;
5678
5679 depth++;
5680 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5681 depth--;
5682
5683 return ret;
5684 }
5685
5686 static tree
5687 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5688 bool *strict_overflow_p)
5689 {
5690 tree type = TREE_TYPE (t);
5691 enum tree_code tcode = TREE_CODE (t);
5692 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5693 > GET_MODE_SIZE (TYPE_MODE (type)))
5694 ? wide_type : type);
5695 tree t1, t2;
5696 int same_p = tcode == code;
5697 tree op0 = NULL_TREE, op1 = NULL_TREE;
5698 bool sub_strict_overflow_p;
5699
5700 /* Don't deal with constants of zero here; they confuse the code below. */
5701 if (integer_zerop (c))
5702 return NULL_TREE;
5703
5704 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5705 op0 = TREE_OPERAND (t, 0);
5706
5707 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5708 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5709
5710 /* Note that we need not handle conditional operations here since fold
5711 already handles those cases. So just do arithmetic here. */
5712 switch (tcode)
5713 {
5714 case INTEGER_CST:
5715 /* For a constant, we can always simplify if we are a multiply
5716 or (for divide and modulus) if it is a multiple of our constant. */
5717 if (code == MULT_EXPR
5718 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5719 return const_binop (code, fold_convert (ctype, t),
5720 fold_convert (ctype, c));
5721 break;
5722
5723 CASE_CONVERT: case NON_LVALUE_EXPR:
5724 /* If op0 is an expression ... */
5725 if ((COMPARISON_CLASS_P (op0)
5726 || UNARY_CLASS_P (op0)
5727 || BINARY_CLASS_P (op0)
5728 || VL_EXP_CLASS_P (op0)
5729 || EXPRESSION_CLASS_P (op0))
5730 /* ... and has wrapping overflow, and its type is smaller
5731 than ctype, then we cannot pass through as widening. */
5732 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5733 && (TYPE_PRECISION (ctype)
5734 > TYPE_PRECISION (TREE_TYPE (op0))))
5735 /* ... or this is a truncation (t is narrower than op0),
5736 then we cannot pass through this narrowing. */
5737 || (TYPE_PRECISION (type)
5738 < TYPE_PRECISION (TREE_TYPE (op0)))
5739 /* ... or signedness changes for division or modulus,
5740 then we cannot pass through this conversion. */
5741 || (code != MULT_EXPR
5742 && (TYPE_UNSIGNED (ctype)
5743 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5744 /* ... or has undefined overflow while the converted to
5745 type has not, we cannot do the operation in the inner type
5746 as that would introduce undefined overflow. */
5747 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5748 && !TYPE_OVERFLOW_UNDEFINED (type))))
5749 break;
5750
5751 /* Pass the constant down and see if we can make a simplification. If
5752 we can, replace this expression with the inner simplification for
5753 possible later conversion to our or some other type. */
5754 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5755 && TREE_CODE (t2) == INTEGER_CST
5756 && !TREE_OVERFLOW (t2)
5757 && (0 != (t1 = extract_muldiv (op0, t2, code,
5758 code == MULT_EXPR
5759 ? ctype : NULL_TREE,
5760 strict_overflow_p))))
5761 return t1;
5762 break;
5763
5764 case ABS_EXPR:
5765 /* If widening the type changes it from signed to unsigned, then we
5766 must avoid building ABS_EXPR itself as unsigned. */
5767 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5768 {
5769 tree cstype = (*signed_type_for) (ctype);
5770 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5771 != 0)
5772 {
5773 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5774 return fold_convert (ctype, t1);
5775 }
5776 break;
5777 }
5778 /* If the constant is negative, we cannot simplify this. */
5779 if (tree_int_cst_sgn (c) == -1)
5780 break;
5781 /* FALLTHROUGH */
5782 case NEGATE_EXPR:
5783 /* For division and modulus, type can't be unsigned, as e.g.
5784 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5785 For signed types, even with wrapping overflow, this is fine. */
5786 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5787 break;
5788 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5789 != 0)
5790 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5791 break;
5792
5793 case MIN_EXPR: case MAX_EXPR:
5794 /* If widening the type changes the signedness, then we can't perform
5795 this optimization as that changes the result. */
5796 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5797 break;
5798
5799 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5800 sub_strict_overflow_p = false;
5801 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5802 &sub_strict_overflow_p)) != 0
5803 && (t2 = extract_muldiv (op1, c, code, wide_type,
5804 &sub_strict_overflow_p)) != 0)
5805 {
5806 if (tree_int_cst_sgn (c) < 0)
5807 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5808 if (sub_strict_overflow_p)
5809 *strict_overflow_p = true;
5810 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5811 fold_convert (ctype, t2));
5812 }
5813 break;
5814
5815 case LSHIFT_EXPR: case RSHIFT_EXPR:
5816 /* If the second operand is constant, this is a multiplication
5817 or floor division, by a power of two, so we can treat it that
5818 way unless the multiplier or divisor overflows. Signed
5819 left-shift overflow is implementation-defined rather than
5820 undefined in C90, so do not convert signed left shift into
5821 multiplication. */
5822 if (TREE_CODE (op1) == INTEGER_CST
5823 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5824 /* const_binop may not detect overflow correctly,
5825 so check for it explicitly here. */
5826 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5827 && 0 != (t1 = fold_convert (ctype,
5828 const_binop (LSHIFT_EXPR,
5829 size_one_node,
5830 op1)))
5831 && !TREE_OVERFLOW (t1))
5832 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5833 ? MULT_EXPR : FLOOR_DIV_EXPR,
5834 ctype,
5835 fold_convert (ctype, op0),
5836 t1),
5837 c, code, wide_type, strict_overflow_p);
5838 break;
5839
5840 case PLUS_EXPR: case MINUS_EXPR:
5841 /* See if we can eliminate the operation on both sides. If we can, we
5842 can return a new PLUS or MINUS. If we can't, the only remaining
5843 cases where we can do anything are if the second operand is a
5844 constant. */
5845 sub_strict_overflow_p = false;
5846 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5847 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5848 if (t1 != 0 && t2 != 0
5849 && (code == MULT_EXPR
5850 /* If not multiplication, we can only do this if both operands
5851 are divisible by c. */
5852 || (multiple_of_p (ctype, op0, c)
5853 && multiple_of_p (ctype, op1, c))))
5854 {
5855 if (sub_strict_overflow_p)
5856 *strict_overflow_p = true;
5857 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5858 fold_convert (ctype, t2));
5859 }
5860
5861 /* If this was a subtraction, negate OP1 and set it to be an addition.
5862 This simplifies the logic below. */
5863 if (tcode == MINUS_EXPR)
5864 {
5865 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5866 /* If OP1 was not easily negatable, the constant may be OP0. */
5867 if (TREE_CODE (op0) == INTEGER_CST)
5868 {
5869 tree tem = op0;
5870 op0 = op1;
5871 op1 = tem;
5872 tem = t1;
5873 t1 = t2;
5874 t2 = tem;
5875 }
5876 }
5877
5878 if (TREE_CODE (op1) != INTEGER_CST)
5879 break;
5880
5881 /* If either OP1 or C are negative, this optimization is not safe for
5882 some of the division and remainder types while for others we need
5883 to change the code. */
5884 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5885 {
5886 if (code == CEIL_DIV_EXPR)
5887 code = FLOOR_DIV_EXPR;
5888 else if (code == FLOOR_DIV_EXPR)
5889 code = CEIL_DIV_EXPR;
5890 else if (code != MULT_EXPR
5891 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5892 break;
5893 }
5894
5895 /* If it's a multiply or a division/modulus operation of a multiple
5896 of our constant, do the operation and verify it doesn't overflow. */
5897 if (code == MULT_EXPR
5898 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5899 {
5900 op1 = const_binop (code, fold_convert (ctype, op1),
5901 fold_convert (ctype, c));
5902 /* We allow the constant to overflow with wrapping semantics. */
5903 if (op1 == 0
5904 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5905 break;
5906 }
5907 else
5908 break;
5909
5910 /* If we have an unsigned type, we cannot widen the operation since it
5911 will change the result if the original computation overflowed. */
5912 if (TYPE_UNSIGNED (ctype) && ctype != type)
5913 break;
5914
5915 /* If we were able to eliminate our operation from the first side,
5916 apply our operation to the second side and reform the PLUS. */
5917 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5918 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5919
5920 /* The last case is if we are a multiply. In that case, we can
5921 apply the distributive law to commute the multiply and addition
5922 if the multiplication of the constants doesn't overflow
5923 and overflow is defined. With undefined overflow
5924 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5925 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5926 return fold_build2 (tcode, ctype,
5927 fold_build2 (code, ctype,
5928 fold_convert (ctype, op0),
5929 fold_convert (ctype, c)),
5930 op1);
5931
5932 break;
5933
5934 case MULT_EXPR:
5935 /* We have a special case here if we are doing something like
5936 (C * 8) % 4 since we know that's zero. */
5937 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5938 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5939 /* If the multiplication can overflow we cannot optimize this. */
5940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5941 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5942 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5943 {
5944 *strict_overflow_p = true;
5945 return omit_one_operand (type, integer_zero_node, op0);
5946 }
5947
5948 /* ... fall through ... */
5949
5950 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5951 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5952 /* If we can extract our operation from the LHS, do so and return a
5953 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5954 do something only if the second operand is a constant. */
5955 if (same_p
5956 && (t1 = extract_muldiv (op0, c, code, wide_type,
5957 strict_overflow_p)) != 0)
5958 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5959 fold_convert (ctype, op1));
5960 else if (tcode == MULT_EXPR && code == MULT_EXPR
5961 && (t1 = extract_muldiv (op1, c, code, wide_type,
5962 strict_overflow_p)) != 0)
5963 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5964 fold_convert (ctype, t1));
5965 else if (TREE_CODE (op1) != INTEGER_CST)
5966 return 0;
5967
5968 /* If these are the same operation types, we can associate them
5969 assuming no overflow. */
5970 if (tcode == code)
5971 {
5972 bool overflow_p = false;
5973 bool overflow_mul_p;
5974 signop sign = TYPE_SIGN (ctype);
5975 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5976 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5977 if (overflow_mul_p
5978 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5979 overflow_p = true;
5980 if (!overflow_p)
5981 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5982 wide_int_to_tree (ctype, mul));
5983 }
5984
5985 /* If these operations "cancel" each other, we have the main
5986 optimizations of this pass, which occur when either constant is a
5987 multiple of the other, in which case we replace this with either an
5988 operation or CODE or TCODE.
5989
5990 If we have an unsigned type, we cannot do this since it will change
5991 the result if the original computation overflowed. */
5992 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5993 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5994 || (tcode == MULT_EXPR
5995 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5996 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5997 && code != MULT_EXPR)))
5998 {
5999 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6000 {
6001 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6002 *strict_overflow_p = true;
6003 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6004 fold_convert (ctype,
6005 const_binop (TRUNC_DIV_EXPR,
6006 op1, c)));
6007 }
6008 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6009 {
6010 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6011 *strict_overflow_p = true;
6012 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6013 fold_convert (ctype,
6014 const_binop (TRUNC_DIV_EXPR,
6015 c, op1)));
6016 }
6017 }
6018 break;
6019
6020 default:
6021 break;
6022 }
6023
6024 return 0;
6025 }
6026 \f
6027 /* Return a node which has the indicated constant VALUE (either 0 or
6028 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6029 and is of the indicated TYPE. */
6030
6031 tree
6032 constant_boolean_node (bool value, tree type)
6033 {
6034 if (type == integer_type_node)
6035 return value ? integer_one_node : integer_zero_node;
6036 else if (type == boolean_type_node)
6037 return value ? boolean_true_node : boolean_false_node;
6038 else if (TREE_CODE (type) == VECTOR_TYPE)
6039 return build_vector_from_val (type,
6040 build_int_cst (TREE_TYPE (type),
6041 value ? -1 : 0));
6042 else
6043 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6044 }
6045
6046
6047 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6048 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6049 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6050 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6051 COND is the first argument to CODE; otherwise (as in the example
6052 given here), it is the second argument. TYPE is the type of the
6053 original expression. Return NULL_TREE if no simplification is
6054 possible. */
6055
6056 static tree
6057 fold_binary_op_with_conditional_arg (location_t loc,
6058 enum tree_code code,
6059 tree type, tree op0, tree op1,
6060 tree cond, tree arg, int cond_first_p)
6061 {
6062 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6063 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6064 tree test, true_value, false_value;
6065 tree lhs = NULL_TREE;
6066 tree rhs = NULL_TREE;
6067 enum tree_code cond_code = COND_EXPR;
6068
6069 if (TREE_CODE (cond) == COND_EXPR
6070 || TREE_CODE (cond) == VEC_COND_EXPR)
6071 {
6072 test = TREE_OPERAND (cond, 0);
6073 true_value = TREE_OPERAND (cond, 1);
6074 false_value = TREE_OPERAND (cond, 2);
6075 /* If this operand throws an expression, then it does not make
6076 sense to try to perform a logical or arithmetic operation
6077 involving it. */
6078 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6079 lhs = true_value;
6080 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6081 rhs = false_value;
6082 }
6083 else
6084 {
6085 tree testtype = TREE_TYPE (cond);
6086 test = cond;
6087 true_value = constant_boolean_node (true, testtype);
6088 false_value = constant_boolean_node (false, testtype);
6089 }
6090
6091 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6092 cond_code = VEC_COND_EXPR;
6093
6094 /* This transformation is only worthwhile if we don't have to wrap ARG
6095 in a SAVE_EXPR and the operation can be simplified without recursing
6096 on at least one of the branches once its pushed inside the COND_EXPR. */
6097 if (!TREE_CONSTANT (arg)
6098 && (TREE_SIDE_EFFECTS (arg)
6099 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6100 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6101 return NULL_TREE;
6102
6103 arg = fold_convert_loc (loc, arg_type, arg);
6104 if (lhs == 0)
6105 {
6106 true_value = fold_convert_loc (loc, cond_type, true_value);
6107 if (cond_first_p)
6108 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6109 else
6110 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6111 }
6112 if (rhs == 0)
6113 {
6114 false_value = fold_convert_loc (loc, cond_type, false_value);
6115 if (cond_first_p)
6116 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6117 else
6118 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6119 }
6120
6121 /* Check that we have simplified at least one of the branches. */
6122 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6123 return NULL_TREE;
6124
6125 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6126 }
6127
6128 \f
6129 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6130
6131 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6132 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6133 ADDEND is the same as X.
6134
6135 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6136 and finite. The problematic cases are when X is zero, and its mode
6137 has signed zeros. In the case of rounding towards -infinity,
6138 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6139 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6140
6141 bool
6142 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6143 {
6144 if (!real_zerop (addend))
6145 return false;
6146
6147 /* Don't allow the fold with -fsignaling-nans. */
6148 if (HONOR_SNANS (TYPE_MODE (type)))
6149 return false;
6150
6151 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6152 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6153 return true;
6154
6155 /* In a vector or complex, we would need to check the sign of all zeros. */
6156 if (TREE_CODE (addend) != REAL_CST)
6157 return false;
6158
6159 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6160 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6161 negate = !negate;
6162
6163 /* The mode has signed zeros, and we have to honor their sign.
6164 In this situation, there is only one case we can return true for.
6165 X - 0 is the same as X unless rounding towards -infinity is
6166 supported. */
6167 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6168 }
6169
6170 /* Subroutine of fold() that checks comparisons of built-in math
6171 functions against real constants.
6172
6173 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6174 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6175 is the type of the result and ARG0 and ARG1 are the operands of the
6176 comparison. ARG1 must be a TREE_REAL_CST.
6177
6178 The function returns the constant folded tree if a simplification
6179 can be made, and NULL_TREE otherwise. */
6180
6181 static tree
6182 fold_mathfn_compare (location_t loc,
6183 enum built_in_function fcode, enum tree_code code,
6184 tree type, tree arg0, tree arg1)
6185 {
6186 REAL_VALUE_TYPE c;
6187
6188 if (BUILTIN_SQRT_P (fcode))
6189 {
6190 tree arg = CALL_EXPR_ARG (arg0, 0);
6191 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6192
6193 c = TREE_REAL_CST (arg1);
6194 if (REAL_VALUE_NEGATIVE (c))
6195 {
6196 /* sqrt(x) < y is always false, if y is negative. */
6197 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6198 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6199
6200 /* sqrt(x) > y is always true, if y is negative and we
6201 don't care about NaNs, i.e. negative values of x. */
6202 if (code == NE_EXPR || !HONOR_NANS (mode))
6203 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6204
6205 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6206 return fold_build2_loc (loc, GE_EXPR, type, arg,
6207 build_real (TREE_TYPE (arg), dconst0));
6208 }
6209 else if (code == GT_EXPR || code == GE_EXPR)
6210 {
6211 REAL_VALUE_TYPE c2;
6212
6213 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6214 real_convert (&c2, mode, &c2);
6215
6216 if (REAL_VALUE_ISINF (c2))
6217 {
6218 /* sqrt(x) > y is x == +Inf, when y is very large. */
6219 if (HONOR_INFINITIES (mode))
6220 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg), c2));
6222
6223 /* sqrt(x) > y is always false, when y is very large
6224 and we don't care about infinities. */
6225 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6226 }
6227
6228 /* sqrt(x) > c is the same as x > c*c. */
6229 return fold_build2_loc (loc, code, type, arg,
6230 build_real (TREE_TYPE (arg), c2));
6231 }
6232 else if (code == LT_EXPR || code == LE_EXPR)
6233 {
6234 REAL_VALUE_TYPE c2;
6235
6236 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6237 real_convert (&c2, mode, &c2);
6238
6239 if (REAL_VALUE_ISINF (c2))
6240 {
6241 /* sqrt(x) < y is always true, when y is a very large
6242 value and we don't care about NaNs or Infinities. */
6243 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6244 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6245
6246 /* sqrt(x) < y is x != +Inf when y is very large and we
6247 don't care about NaNs. */
6248 if (! HONOR_NANS (mode))
6249 return fold_build2_loc (loc, NE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg), c2));
6251
6252 /* sqrt(x) < y is x >= 0 when y is very large and we
6253 don't care about Infinities. */
6254 if (! HONOR_INFINITIES (mode))
6255 return fold_build2_loc (loc, GE_EXPR, type, arg,
6256 build_real (TREE_TYPE (arg), dconst0));
6257
6258 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6259 arg = save_expr (arg);
6260 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6261 fold_build2_loc (loc, GE_EXPR, type, arg,
6262 build_real (TREE_TYPE (arg),
6263 dconst0)),
6264 fold_build2_loc (loc, NE_EXPR, type, arg,
6265 build_real (TREE_TYPE (arg),
6266 c2)));
6267 }
6268
6269 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6270 if (! HONOR_NANS (mode))
6271 return fold_build2_loc (loc, code, type, arg,
6272 build_real (TREE_TYPE (arg), c2));
6273
6274 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6275 arg = save_expr (arg);
6276 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6277 fold_build2_loc (loc, GE_EXPR, type, arg,
6278 build_real (TREE_TYPE (arg),
6279 dconst0)),
6280 fold_build2_loc (loc, code, type, arg,
6281 build_real (TREE_TYPE (arg),
6282 c2)));
6283 }
6284 }
6285
6286 return NULL_TREE;
6287 }
6288
6289 /* Subroutine of fold() that optimizes comparisons against Infinities,
6290 either +Inf or -Inf.
6291
6292 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6293 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6294 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6295
6296 The function returns the constant folded tree if a simplification
6297 can be made, and NULL_TREE otherwise. */
6298
6299 static tree
6300 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6301 tree arg0, tree arg1)
6302 {
6303 machine_mode mode;
6304 REAL_VALUE_TYPE max;
6305 tree temp;
6306 bool neg;
6307
6308 mode = TYPE_MODE (TREE_TYPE (arg0));
6309
6310 /* For negative infinity swap the sense of the comparison. */
6311 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6312 if (neg)
6313 code = swap_tree_comparison (code);
6314
6315 switch (code)
6316 {
6317 case GT_EXPR:
6318 /* x > +Inf is always false, if with ignore sNANs. */
6319 if (HONOR_SNANS (mode))
6320 return NULL_TREE;
6321 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6322
6323 case LE_EXPR:
6324 /* x <= +Inf is always true, if we don't case about NaNs. */
6325 if (! HONOR_NANS (mode))
6326 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6327
6328 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6329 arg0 = save_expr (arg0);
6330 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6331
6332 case EQ_EXPR:
6333 case GE_EXPR:
6334 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6335 real_maxval (&max, neg, mode);
6336 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6337 arg0, build_real (TREE_TYPE (arg0), max));
6338
6339 case LT_EXPR:
6340 /* x < +Inf is always equal to x <= DBL_MAX. */
6341 real_maxval (&max, neg, mode);
6342 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6343 arg0, build_real (TREE_TYPE (arg0), max));
6344
6345 case NE_EXPR:
6346 /* x != +Inf is always equal to !(x > DBL_MAX). */
6347 real_maxval (&max, neg, mode);
6348 if (! HONOR_NANS (mode))
6349 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6350 arg0, build_real (TREE_TYPE (arg0), max));
6351
6352 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6353 arg0, build_real (TREE_TYPE (arg0), max));
6354 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6355
6356 default:
6357 break;
6358 }
6359
6360 return NULL_TREE;
6361 }
6362
6363 /* Subroutine of fold() that optimizes comparisons of a division by
6364 a nonzero integer constant against an integer constant, i.e.
6365 X/C1 op C2.
6366
6367 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6368 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6369 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6370
6371 The function returns the constant folded tree if a simplification
6372 can be made, and NULL_TREE otherwise. */
6373
6374 static tree
6375 fold_div_compare (location_t loc,
6376 enum tree_code code, tree type, tree arg0, tree arg1)
6377 {
6378 tree prod, tmp, hi, lo;
6379 tree arg00 = TREE_OPERAND (arg0, 0);
6380 tree arg01 = TREE_OPERAND (arg0, 1);
6381 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6382 bool neg_overflow = false;
6383 bool overflow;
6384
6385 /* We have to do this the hard way to detect unsigned overflow.
6386 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6387 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6388 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6389 neg_overflow = false;
6390
6391 if (sign == UNSIGNED)
6392 {
6393 tmp = int_const_binop (MINUS_EXPR, arg01,
6394 build_int_cst (TREE_TYPE (arg01), 1));
6395 lo = prod;
6396
6397 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6398 val = wi::add (prod, tmp, sign, &overflow);
6399 hi = force_fit_type (TREE_TYPE (arg00), val,
6400 -1, overflow | TREE_OVERFLOW (prod));
6401 }
6402 else if (tree_int_cst_sgn (arg01) >= 0)
6403 {
6404 tmp = int_const_binop (MINUS_EXPR, arg01,
6405 build_int_cst (TREE_TYPE (arg01), 1));
6406 switch (tree_int_cst_sgn (arg1))
6407 {
6408 case -1:
6409 neg_overflow = true;
6410 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6411 hi = prod;
6412 break;
6413
6414 case 0:
6415 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6416 hi = tmp;
6417 break;
6418
6419 case 1:
6420 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6421 lo = prod;
6422 break;
6423
6424 default:
6425 gcc_unreachable ();
6426 }
6427 }
6428 else
6429 {
6430 /* A negative divisor reverses the relational operators. */
6431 code = swap_tree_comparison (code);
6432
6433 tmp = int_const_binop (PLUS_EXPR, arg01,
6434 build_int_cst (TREE_TYPE (arg01), 1));
6435 switch (tree_int_cst_sgn (arg1))
6436 {
6437 case -1:
6438 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6439 lo = prod;
6440 break;
6441
6442 case 0:
6443 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6444 lo = tmp;
6445 break;
6446
6447 case 1:
6448 neg_overflow = true;
6449 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6450 hi = prod;
6451 break;
6452
6453 default:
6454 gcc_unreachable ();
6455 }
6456 }
6457
6458 switch (code)
6459 {
6460 case EQ_EXPR:
6461 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6462 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6463 if (TREE_OVERFLOW (hi))
6464 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6465 if (TREE_OVERFLOW (lo))
6466 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6467 return build_range_check (loc, type, arg00, 1, lo, hi);
6468
6469 case NE_EXPR:
6470 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6471 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6472 if (TREE_OVERFLOW (hi))
6473 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6474 if (TREE_OVERFLOW (lo))
6475 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6476 return build_range_check (loc, type, arg00, 0, lo, hi);
6477
6478 case LT_EXPR:
6479 if (TREE_OVERFLOW (lo))
6480 {
6481 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6482 return omit_one_operand_loc (loc, type, tmp, arg00);
6483 }
6484 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6485
6486 case LE_EXPR:
6487 if (TREE_OVERFLOW (hi))
6488 {
6489 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6491 }
6492 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6493
6494 case GT_EXPR:
6495 if (TREE_OVERFLOW (hi))
6496 {
6497 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6499 }
6500 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6501
6502 case GE_EXPR:
6503 if (TREE_OVERFLOW (lo))
6504 {
6505 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6506 return omit_one_operand_loc (loc, type, tmp, arg00);
6507 }
6508 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6509
6510 default:
6511 break;
6512 }
6513
6514 return NULL_TREE;
6515 }
6516
6517
6518 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6519 equality/inequality test, then return a simplified form of the test
6520 using a sign testing. Otherwise return NULL. TYPE is the desired
6521 result type. */
6522
6523 static tree
6524 fold_single_bit_test_into_sign_test (location_t loc,
6525 enum tree_code code, tree arg0, tree arg1,
6526 tree result_type)
6527 {
6528 /* If this is testing a single bit, we can optimize the test. */
6529 if ((code == NE_EXPR || code == EQ_EXPR)
6530 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6531 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6532 {
6533 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6534 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6535 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6536
6537 if (arg00 != NULL_TREE
6538 /* This is only a win if casting to a signed type is cheap,
6539 i.e. when arg00's type is not a partial mode. */
6540 && TYPE_PRECISION (TREE_TYPE (arg00))
6541 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6542 {
6543 tree stype = signed_type_for (TREE_TYPE (arg00));
6544 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6545 result_type,
6546 fold_convert_loc (loc, stype, arg00),
6547 build_int_cst (stype, 0));
6548 }
6549 }
6550
6551 return NULL_TREE;
6552 }
6553
6554 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6555 equality/inequality test, then return a simplified form of
6556 the test using shifts and logical operations. Otherwise return
6557 NULL. TYPE is the desired result type. */
6558
6559 tree
6560 fold_single_bit_test (location_t loc, enum tree_code code,
6561 tree arg0, tree arg1, tree result_type)
6562 {
6563 /* If this is testing a single bit, we can optimize the test. */
6564 if ((code == NE_EXPR || code == EQ_EXPR)
6565 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6566 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6567 {
6568 tree inner = TREE_OPERAND (arg0, 0);
6569 tree type = TREE_TYPE (arg0);
6570 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6571 machine_mode operand_mode = TYPE_MODE (type);
6572 int ops_unsigned;
6573 tree signed_type, unsigned_type, intermediate_type;
6574 tree tem, one;
6575
6576 /* First, see if we can fold the single bit test into a sign-bit
6577 test. */
6578 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6579 result_type);
6580 if (tem)
6581 return tem;
6582
6583 /* Otherwise we have (A & C) != 0 where C is a single bit,
6584 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6585 Similarly for (A & C) == 0. */
6586
6587 /* If INNER is a right shift of a constant and it plus BITNUM does
6588 not overflow, adjust BITNUM and INNER. */
6589 if (TREE_CODE (inner) == RSHIFT_EXPR
6590 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6591 && bitnum < TYPE_PRECISION (type)
6592 && wi::ltu_p (TREE_OPERAND (inner, 1),
6593 TYPE_PRECISION (type) - bitnum))
6594 {
6595 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6596 inner = TREE_OPERAND (inner, 0);
6597 }
6598
6599 /* If we are going to be able to omit the AND below, we must do our
6600 operations as unsigned. If we must use the AND, we have a choice.
6601 Normally unsigned is faster, but for some machines signed is. */
6602 #ifdef LOAD_EXTEND_OP
6603 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6604 && !flag_syntax_only) ? 0 : 1;
6605 #else
6606 ops_unsigned = 1;
6607 #endif
6608
6609 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6610 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6611 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6612 inner = fold_convert_loc (loc, intermediate_type, inner);
6613
6614 if (bitnum != 0)
6615 inner = build2 (RSHIFT_EXPR, intermediate_type,
6616 inner, size_int (bitnum));
6617
6618 one = build_int_cst (intermediate_type, 1);
6619
6620 if (code == EQ_EXPR)
6621 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6622
6623 /* Put the AND last so it can combine with more things. */
6624 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6625
6626 /* Make sure to return the proper type. */
6627 inner = fold_convert_loc (loc, result_type, inner);
6628
6629 return inner;
6630 }
6631 return NULL_TREE;
6632 }
6633
6634 /* Check whether we are allowed to reorder operands arg0 and arg1,
6635 such that the evaluation of arg1 occurs before arg0. */
6636
6637 static bool
6638 reorder_operands_p (const_tree arg0, const_tree arg1)
6639 {
6640 if (! flag_evaluation_order)
6641 return true;
6642 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6643 return true;
6644 return ! TREE_SIDE_EFFECTS (arg0)
6645 && ! TREE_SIDE_EFFECTS (arg1);
6646 }
6647
6648 /* Test whether it is preferable two swap two operands, ARG0 and
6649 ARG1, for example because ARG0 is an integer constant and ARG1
6650 isn't. If REORDER is true, only recommend swapping if we can
6651 evaluate the operands in reverse order. */
6652
6653 bool
6654 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6655 {
6656 if (CONSTANT_CLASS_P (arg1))
6657 return 0;
6658 if (CONSTANT_CLASS_P (arg0))
6659 return 1;
6660
6661 STRIP_SIGN_NOPS (arg0);
6662 STRIP_SIGN_NOPS (arg1);
6663
6664 if (TREE_CONSTANT (arg1))
6665 return 0;
6666 if (TREE_CONSTANT (arg0))
6667 return 1;
6668
6669 if (reorder && flag_evaluation_order
6670 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6671 return 0;
6672
6673 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6674 for commutative and comparison operators. Ensuring a canonical
6675 form allows the optimizers to find additional redundancies without
6676 having to explicitly check for both orderings. */
6677 if (TREE_CODE (arg0) == SSA_NAME
6678 && TREE_CODE (arg1) == SSA_NAME
6679 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6680 return 1;
6681
6682 /* Put SSA_NAMEs last. */
6683 if (TREE_CODE (arg1) == SSA_NAME)
6684 return 0;
6685 if (TREE_CODE (arg0) == SSA_NAME)
6686 return 1;
6687
6688 /* Put variables last. */
6689 if (DECL_P (arg1))
6690 return 0;
6691 if (DECL_P (arg0))
6692 return 1;
6693
6694 return 0;
6695 }
6696
6697 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6698 ARG0 is extended to a wider type. */
6699
6700 static tree
6701 fold_widened_comparison (location_t loc, enum tree_code code,
6702 tree type, tree arg0, tree arg1)
6703 {
6704 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6705 tree arg1_unw;
6706 tree shorter_type, outer_type;
6707 tree min, max;
6708 bool above, below;
6709
6710 if (arg0_unw == arg0)
6711 return NULL_TREE;
6712 shorter_type = TREE_TYPE (arg0_unw);
6713
6714 #ifdef HAVE_canonicalize_funcptr_for_compare
6715 /* Disable this optimization if we're casting a function pointer
6716 type on targets that require function pointer canonicalization. */
6717 if (HAVE_canonicalize_funcptr_for_compare
6718 && TREE_CODE (shorter_type) == POINTER_TYPE
6719 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6720 return NULL_TREE;
6721 #endif
6722
6723 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6724 return NULL_TREE;
6725
6726 arg1_unw = get_unwidened (arg1, NULL_TREE);
6727
6728 /* If possible, express the comparison in the shorter mode. */
6729 if ((code == EQ_EXPR || code == NE_EXPR
6730 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6731 && (TREE_TYPE (arg1_unw) == shorter_type
6732 || ((TYPE_PRECISION (shorter_type)
6733 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6734 && (TYPE_UNSIGNED (shorter_type)
6735 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6736 || (TREE_CODE (arg1_unw) == INTEGER_CST
6737 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6738 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6739 && int_fits_type_p (arg1_unw, shorter_type))))
6740 return fold_build2_loc (loc, code, type, arg0_unw,
6741 fold_convert_loc (loc, shorter_type, arg1_unw));
6742
6743 if (TREE_CODE (arg1_unw) != INTEGER_CST
6744 || TREE_CODE (shorter_type) != INTEGER_TYPE
6745 || !int_fits_type_p (arg1_unw, shorter_type))
6746 return NULL_TREE;
6747
6748 /* If we are comparing with the integer that does not fit into the range
6749 of the shorter type, the result is known. */
6750 outer_type = TREE_TYPE (arg1_unw);
6751 min = lower_bound_in_type (outer_type, shorter_type);
6752 max = upper_bound_in_type (outer_type, shorter_type);
6753
6754 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6755 max, arg1_unw));
6756 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6757 arg1_unw, min));
6758
6759 switch (code)
6760 {
6761 case EQ_EXPR:
6762 if (above || below)
6763 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6764 break;
6765
6766 case NE_EXPR:
6767 if (above || below)
6768 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6769 break;
6770
6771 case LT_EXPR:
6772 case LE_EXPR:
6773 if (above)
6774 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6775 else if (below)
6776 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6777
6778 case GT_EXPR:
6779 case GE_EXPR:
6780 if (above)
6781 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6782 else if (below)
6783 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6784
6785 default:
6786 break;
6787 }
6788
6789 return NULL_TREE;
6790 }
6791
6792 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6793 ARG0 just the signedness is changed. */
6794
6795 static tree
6796 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6797 tree arg0, tree arg1)
6798 {
6799 tree arg0_inner;
6800 tree inner_type, outer_type;
6801
6802 if (!CONVERT_EXPR_P (arg0))
6803 return NULL_TREE;
6804
6805 outer_type = TREE_TYPE (arg0);
6806 arg0_inner = TREE_OPERAND (arg0, 0);
6807 inner_type = TREE_TYPE (arg0_inner);
6808
6809 #ifdef HAVE_canonicalize_funcptr_for_compare
6810 /* Disable this optimization if we're casting a function pointer
6811 type on targets that require function pointer canonicalization. */
6812 if (HAVE_canonicalize_funcptr_for_compare
6813 && TREE_CODE (inner_type) == POINTER_TYPE
6814 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6815 return NULL_TREE;
6816 #endif
6817
6818 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6819 return NULL_TREE;
6820
6821 if (TREE_CODE (arg1) != INTEGER_CST
6822 && !(CONVERT_EXPR_P (arg1)
6823 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6824 return NULL_TREE;
6825
6826 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6827 && code != NE_EXPR
6828 && code != EQ_EXPR)
6829 return NULL_TREE;
6830
6831 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6832 return NULL_TREE;
6833
6834 if (TREE_CODE (arg1) == INTEGER_CST)
6835 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6836 TREE_OVERFLOW (arg1));
6837 else
6838 arg1 = fold_convert_loc (loc, inner_type, arg1);
6839
6840 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6841 }
6842
6843
6844 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6845 means A >= Y && A != MAX, but in this case we know that
6846 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6847
6848 static tree
6849 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6850 {
6851 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6852
6853 if (TREE_CODE (bound) == LT_EXPR)
6854 a = TREE_OPERAND (bound, 0);
6855 else if (TREE_CODE (bound) == GT_EXPR)
6856 a = TREE_OPERAND (bound, 1);
6857 else
6858 return NULL_TREE;
6859
6860 typea = TREE_TYPE (a);
6861 if (!INTEGRAL_TYPE_P (typea)
6862 && !POINTER_TYPE_P (typea))
6863 return NULL_TREE;
6864
6865 if (TREE_CODE (ineq) == LT_EXPR)
6866 {
6867 a1 = TREE_OPERAND (ineq, 1);
6868 y = TREE_OPERAND (ineq, 0);
6869 }
6870 else if (TREE_CODE (ineq) == GT_EXPR)
6871 {
6872 a1 = TREE_OPERAND (ineq, 0);
6873 y = TREE_OPERAND (ineq, 1);
6874 }
6875 else
6876 return NULL_TREE;
6877
6878 if (TREE_TYPE (a1) != typea)
6879 return NULL_TREE;
6880
6881 if (POINTER_TYPE_P (typea))
6882 {
6883 /* Convert the pointer types into integer before taking the difference. */
6884 tree ta = fold_convert_loc (loc, ssizetype, a);
6885 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6886 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6887 }
6888 else
6889 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6890
6891 if (!diff || !integer_onep (diff))
6892 return NULL_TREE;
6893
6894 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6895 }
6896
6897 /* Fold a sum or difference of at least one multiplication.
6898 Returns the folded tree or NULL if no simplification could be made. */
6899
6900 static tree
6901 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6902 tree arg0, tree arg1)
6903 {
6904 tree arg00, arg01, arg10, arg11;
6905 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6906
6907 /* (A * C) +- (B * C) -> (A+-B) * C.
6908 (A * C) +- A -> A * (C+-1).
6909 We are most concerned about the case where C is a constant,
6910 but other combinations show up during loop reduction. Since
6911 it is not difficult, try all four possibilities. */
6912
6913 if (TREE_CODE (arg0) == MULT_EXPR)
6914 {
6915 arg00 = TREE_OPERAND (arg0, 0);
6916 arg01 = TREE_OPERAND (arg0, 1);
6917 }
6918 else if (TREE_CODE (arg0) == INTEGER_CST)
6919 {
6920 arg00 = build_one_cst (type);
6921 arg01 = arg0;
6922 }
6923 else
6924 {
6925 /* We cannot generate constant 1 for fract. */
6926 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6927 return NULL_TREE;
6928 arg00 = arg0;
6929 arg01 = build_one_cst (type);
6930 }
6931 if (TREE_CODE (arg1) == MULT_EXPR)
6932 {
6933 arg10 = TREE_OPERAND (arg1, 0);
6934 arg11 = TREE_OPERAND (arg1, 1);
6935 }
6936 else if (TREE_CODE (arg1) == INTEGER_CST)
6937 {
6938 arg10 = build_one_cst (type);
6939 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6940 the purpose of this canonicalization. */
6941 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6942 && negate_expr_p (arg1)
6943 && code == PLUS_EXPR)
6944 {
6945 arg11 = negate_expr (arg1);
6946 code = MINUS_EXPR;
6947 }
6948 else
6949 arg11 = arg1;
6950 }
6951 else
6952 {
6953 /* We cannot generate constant 1 for fract. */
6954 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6955 return NULL_TREE;
6956 arg10 = arg1;
6957 arg11 = build_one_cst (type);
6958 }
6959 same = NULL_TREE;
6960
6961 if (operand_equal_p (arg01, arg11, 0))
6962 same = arg01, alt0 = arg00, alt1 = arg10;
6963 else if (operand_equal_p (arg00, arg10, 0))
6964 same = arg00, alt0 = arg01, alt1 = arg11;
6965 else if (operand_equal_p (arg00, arg11, 0))
6966 same = arg00, alt0 = arg01, alt1 = arg10;
6967 else if (operand_equal_p (arg01, arg10, 0))
6968 same = arg01, alt0 = arg00, alt1 = arg11;
6969
6970 /* No identical multiplicands; see if we can find a common
6971 power-of-two factor in non-power-of-two multiplies. This
6972 can help in multi-dimensional array access. */
6973 else if (tree_fits_shwi_p (arg01)
6974 && tree_fits_shwi_p (arg11))
6975 {
6976 HOST_WIDE_INT int01, int11, tmp;
6977 bool swap = false;
6978 tree maybe_same;
6979 int01 = tree_to_shwi (arg01);
6980 int11 = tree_to_shwi (arg11);
6981
6982 /* Move min of absolute values to int11. */
6983 if (absu_hwi (int01) < absu_hwi (int11))
6984 {
6985 tmp = int01, int01 = int11, int11 = tmp;
6986 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6987 maybe_same = arg01;
6988 swap = true;
6989 }
6990 else
6991 maybe_same = arg11;
6992
6993 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6994 /* The remainder should not be a constant, otherwise we
6995 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6996 increased the number of multiplications necessary. */
6997 && TREE_CODE (arg10) != INTEGER_CST)
6998 {
6999 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7000 build_int_cst (TREE_TYPE (arg00),
7001 int01 / int11));
7002 alt1 = arg10;
7003 same = maybe_same;
7004 if (swap)
7005 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7006 }
7007 }
7008
7009 if (same)
7010 return fold_build2_loc (loc, MULT_EXPR, type,
7011 fold_build2_loc (loc, code, type,
7012 fold_convert_loc (loc, type, alt0),
7013 fold_convert_loc (loc, type, alt1)),
7014 fold_convert_loc (loc, type, same));
7015
7016 return NULL_TREE;
7017 }
7018
7019 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7020 specified by EXPR into the buffer PTR of length LEN bytes.
7021 Return the number of bytes placed in the buffer, or zero
7022 upon failure. */
7023
7024 static int
7025 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7026 {
7027 tree type = TREE_TYPE (expr);
7028 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7029 int byte, offset, word, words;
7030 unsigned char value;
7031
7032 if ((off == -1 && total_bytes > len)
7033 || off >= total_bytes)
7034 return 0;
7035 if (off == -1)
7036 off = 0;
7037 words = total_bytes / UNITS_PER_WORD;
7038
7039 for (byte = 0; byte < total_bytes; byte++)
7040 {
7041 int bitpos = byte * BITS_PER_UNIT;
7042 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7043 number of bytes. */
7044 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7045
7046 if (total_bytes > UNITS_PER_WORD)
7047 {
7048 word = byte / UNITS_PER_WORD;
7049 if (WORDS_BIG_ENDIAN)
7050 word = (words - 1) - word;
7051 offset = word * UNITS_PER_WORD;
7052 if (BYTES_BIG_ENDIAN)
7053 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7054 else
7055 offset += byte % UNITS_PER_WORD;
7056 }
7057 else
7058 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7059 if (offset >= off
7060 && offset - off < len)
7061 ptr[offset - off] = value;
7062 }
7063 return MIN (len, total_bytes - off);
7064 }
7065
7066
7067 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7068 specified by EXPR into the buffer PTR of length LEN bytes.
7069 Return the number of bytes placed in the buffer, or zero
7070 upon failure. */
7071
7072 static int
7073 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7074 {
7075 tree type = TREE_TYPE (expr);
7076 machine_mode mode = TYPE_MODE (type);
7077 int total_bytes = GET_MODE_SIZE (mode);
7078 FIXED_VALUE_TYPE value;
7079 tree i_value, i_type;
7080
7081 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7082 return 0;
7083
7084 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7085
7086 if (NULL_TREE == i_type
7087 || TYPE_PRECISION (i_type) != total_bytes)
7088 return 0;
7089
7090 value = TREE_FIXED_CST (expr);
7091 i_value = double_int_to_tree (i_type, value.data);
7092
7093 return native_encode_int (i_value, ptr, len, off);
7094 }
7095
7096
7097 /* Subroutine of native_encode_expr. Encode the REAL_CST
7098 specified by EXPR into the buffer PTR of length LEN bytes.
7099 Return the number of bytes placed in the buffer, or zero
7100 upon failure. */
7101
7102 static int
7103 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7104 {
7105 tree type = TREE_TYPE (expr);
7106 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7107 int byte, offset, word, words, bitpos;
7108 unsigned char value;
7109
7110 /* There are always 32 bits in each long, no matter the size of
7111 the hosts long. We handle floating point representations with
7112 up to 192 bits. */
7113 long tmp[6];
7114
7115 if ((off == -1 && total_bytes > len)
7116 || off >= total_bytes)
7117 return 0;
7118 if (off == -1)
7119 off = 0;
7120 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7121
7122 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7123
7124 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7125 bitpos += BITS_PER_UNIT)
7126 {
7127 byte = (bitpos / BITS_PER_UNIT) & 3;
7128 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7129
7130 if (UNITS_PER_WORD < 4)
7131 {
7132 word = byte / UNITS_PER_WORD;
7133 if (WORDS_BIG_ENDIAN)
7134 word = (words - 1) - word;
7135 offset = word * UNITS_PER_WORD;
7136 if (BYTES_BIG_ENDIAN)
7137 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7138 else
7139 offset += byte % UNITS_PER_WORD;
7140 }
7141 else
7142 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7143 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7144 if (offset >= off
7145 && offset - off < len)
7146 ptr[offset - off] = value;
7147 }
7148 return MIN (len, total_bytes - off);
7149 }
7150
7151 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7154 upon failure. */
7155
7156 static int
7157 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7158 {
7159 int rsize, isize;
7160 tree part;
7161
7162 part = TREE_REALPART (expr);
7163 rsize = native_encode_expr (part, ptr, len, off);
7164 if (off == -1
7165 && rsize == 0)
7166 return 0;
7167 part = TREE_IMAGPART (expr);
7168 if (off != -1)
7169 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7170 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7171 if (off == -1
7172 && isize != rsize)
7173 return 0;
7174 return rsize + isize;
7175 }
7176
7177
7178 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7179 specified by EXPR into the buffer PTR of length LEN bytes.
7180 Return the number of bytes placed in the buffer, or zero
7181 upon failure. */
7182
7183 static int
7184 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7185 {
7186 unsigned i, count;
7187 int size, offset;
7188 tree itype, elem;
7189
7190 offset = 0;
7191 count = VECTOR_CST_NELTS (expr);
7192 itype = TREE_TYPE (TREE_TYPE (expr));
7193 size = GET_MODE_SIZE (TYPE_MODE (itype));
7194 for (i = 0; i < count; i++)
7195 {
7196 if (off >= size)
7197 {
7198 off -= size;
7199 continue;
7200 }
7201 elem = VECTOR_CST_ELT (expr, i);
7202 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7203 if ((off == -1 && res != size)
7204 || res == 0)
7205 return 0;
7206 offset += res;
7207 if (offset >= len)
7208 return offset;
7209 if (off != -1)
7210 off = 0;
7211 }
7212 return offset;
7213 }
7214
7215
7216 /* Subroutine of native_encode_expr. Encode the STRING_CST
7217 specified by EXPR into the buffer PTR of length LEN bytes.
7218 Return the number of bytes placed in the buffer, or zero
7219 upon failure. */
7220
7221 static int
7222 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7223 {
7224 tree type = TREE_TYPE (expr);
7225 HOST_WIDE_INT total_bytes;
7226
7227 if (TREE_CODE (type) != ARRAY_TYPE
7228 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7229 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7230 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7231 return 0;
7232 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7233 if ((off == -1 && total_bytes > len)
7234 || off >= total_bytes)
7235 return 0;
7236 if (off == -1)
7237 off = 0;
7238 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7239 {
7240 int written = 0;
7241 if (off < TREE_STRING_LENGTH (expr))
7242 {
7243 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7244 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7245 }
7246 memset (ptr + written, 0,
7247 MIN (total_bytes - written, len - written));
7248 }
7249 else
7250 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7251 return MIN (total_bytes - off, len);
7252 }
7253
7254
7255 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7256 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7257 buffer PTR of length LEN bytes. If OFF is not -1 then start
7258 the encoding at byte offset OFF and encode at most LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero upon failure. */
7260
7261 int
7262 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7263 {
7264 switch (TREE_CODE (expr))
7265 {
7266 case INTEGER_CST:
7267 return native_encode_int (expr, ptr, len, off);
7268
7269 case REAL_CST:
7270 return native_encode_real (expr, ptr, len, off);
7271
7272 case FIXED_CST:
7273 return native_encode_fixed (expr, ptr, len, off);
7274
7275 case COMPLEX_CST:
7276 return native_encode_complex (expr, ptr, len, off);
7277
7278 case VECTOR_CST:
7279 return native_encode_vector (expr, ptr, len, off);
7280
7281 case STRING_CST:
7282 return native_encode_string (expr, ptr, len, off);
7283
7284 default:
7285 return 0;
7286 }
7287 }
7288
7289
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7293
7294 static tree
7295 native_interpret_int (tree type, const unsigned char *ptr, int len)
7296 {
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298
7299 if (total_bytes > len
7300 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7301 return NULL_TREE;
7302
7303 wide_int result = wi::from_buffer (ptr, total_bytes);
7304
7305 return wide_int_to_tree (type, result);
7306 }
7307
7308
7309 /* Subroutine of native_interpret_expr. Interpret the contents of
7310 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7311 If the buffer cannot be interpreted, return NULL_TREE. */
7312
7313 static tree
7314 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7315 {
7316 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7317 double_int result;
7318 FIXED_VALUE_TYPE fixed_value;
7319
7320 if (total_bytes > len
7321 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7322 return NULL_TREE;
7323
7324 result = double_int::from_buffer (ptr, total_bytes);
7325 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7326
7327 return build_fixed (type, fixed_value);
7328 }
7329
7330
7331 /* Subroutine of native_interpret_expr. Interpret the contents of
7332 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7333 If the buffer cannot be interpreted, return NULL_TREE. */
7334
7335 static tree
7336 native_interpret_real (tree type, const unsigned char *ptr, int len)
7337 {
7338 machine_mode mode = TYPE_MODE (type);
7339 int total_bytes = GET_MODE_SIZE (mode);
7340 int byte, offset, word, words, bitpos;
7341 unsigned char value;
7342 /* There are always 32 bits in each long, no matter the size of
7343 the hosts long. We handle floating point representations with
7344 up to 192 bits. */
7345 REAL_VALUE_TYPE r;
7346 long tmp[6];
7347
7348 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7349 if (total_bytes > len || total_bytes > 24)
7350 return NULL_TREE;
7351 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7352
7353 memset (tmp, 0, sizeof (tmp));
7354 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7355 bitpos += BITS_PER_UNIT)
7356 {
7357 byte = (bitpos / BITS_PER_UNIT) & 3;
7358 if (UNITS_PER_WORD < 4)
7359 {
7360 word = byte / UNITS_PER_WORD;
7361 if (WORDS_BIG_ENDIAN)
7362 word = (words - 1) - word;
7363 offset = word * UNITS_PER_WORD;
7364 if (BYTES_BIG_ENDIAN)
7365 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7366 else
7367 offset += byte % UNITS_PER_WORD;
7368 }
7369 else
7370 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7371 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7372
7373 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7374 }
7375
7376 real_from_target (&r, tmp, mode);
7377 return build_real (type, r);
7378 }
7379
7380
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7384
7385 static tree
7386 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7387 {
7388 tree etype, rpart, ipart;
7389 int size;
7390
7391 etype = TREE_TYPE (type);
7392 size = GET_MODE_SIZE (TYPE_MODE (etype));
7393 if (size * 2 > len)
7394 return NULL_TREE;
7395 rpart = native_interpret_expr (etype, ptr, size);
7396 if (!rpart)
7397 return NULL_TREE;
7398 ipart = native_interpret_expr (etype, ptr+size, size);
7399 if (!ipart)
7400 return NULL_TREE;
7401 return build_complex (type, rpart, ipart);
7402 }
7403
7404
7405 /* Subroutine of native_interpret_expr. Interpret the contents of
7406 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7407 If the buffer cannot be interpreted, return NULL_TREE. */
7408
7409 static tree
7410 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7411 {
7412 tree etype, elem;
7413 int i, size, count;
7414 tree *elements;
7415
7416 etype = TREE_TYPE (type);
7417 size = GET_MODE_SIZE (TYPE_MODE (etype));
7418 count = TYPE_VECTOR_SUBPARTS (type);
7419 if (size * count > len)
7420 return NULL_TREE;
7421
7422 elements = XALLOCAVEC (tree, count);
7423 for (i = count - 1; i >= 0; i--)
7424 {
7425 elem = native_interpret_expr (etype, ptr+(i*size), size);
7426 if (!elem)
7427 return NULL_TREE;
7428 elements[i] = elem;
7429 }
7430 return build_vector (type, elements);
7431 }
7432
7433
7434 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a constant of type TYPE. For
7436 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7437 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7438 return NULL_TREE. */
7439
7440 tree
7441 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7442 {
7443 switch (TREE_CODE (type))
7444 {
7445 case INTEGER_TYPE:
7446 case ENUMERAL_TYPE:
7447 case BOOLEAN_TYPE:
7448 case POINTER_TYPE:
7449 case REFERENCE_TYPE:
7450 return native_interpret_int (type, ptr, len);
7451
7452 case REAL_TYPE:
7453 return native_interpret_real (type, ptr, len);
7454
7455 case FIXED_POINT_TYPE:
7456 return native_interpret_fixed (type, ptr, len);
7457
7458 case COMPLEX_TYPE:
7459 return native_interpret_complex (type, ptr, len);
7460
7461 case VECTOR_TYPE:
7462 return native_interpret_vector (type, ptr, len);
7463
7464 default:
7465 return NULL_TREE;
7466 }
7467 }
7468
7469 /* Returns true if we can interpret the contents of a native encoding
7470 as TYPE. */
7471
7472 static bool
7473 can_native_interpret_type_p (tree type)
7474 {
7475 switch (TREE_CODE (type))
7476 {
7477 case INTEGER_TYPE:
7478 case ENUMERAL_TYPE:
7479 case BOOLEAN_TYPE:
7480 case POINTER_TYPE:
7481 case REFERENCE_TYPE:
7482 case FIXED_POINT_TYPE:
7483 case REAL_TYPE:
7484 case COMPLEX_TYPE:
7485 case VECTOR_TYPE:
7486 return true;
7487 default:
7488 return false;
7489 }
7490 }
7491
7492 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7493 TYPE at compile-time. If we're unable to perform the conversion
7494 return NULL_TREE. */
7495
7496 static tree
7497 fold_view_convert_expr (tree type, tree expr)
7498 {
7499 /* We support up to 512-bit values (for V8DFmode). */
7500 unsigned char buffer[64];
7501 int len;
7502
7503 /* Check that the host and target are sane. */
7504 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7505 return NULL_TREE;
7506
7507 len = native_encode_expr (expr, buffer, sizeof (buffer));
7508 if (len == 0)
7509 return NULL_TREE;
7510
7511 return native_interpret_expr (type, buffer, len);
7512 }
7513
7514 /* Build an expression for the address of T. Folds away INDIRECT_REF
7515 to avoid confusing the gimplify process. */
7516
7517 tree
7518 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7519 {
7520 /* The size of the object is not relevant when talking about its address. */
7521 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7522 t = TREE_OPERAND (t, 0);
7523
7524 if (TREE_CODE (t) == INDIRECT_REF)
7525 {
7526 t = TREE_OPERAND (t, 0);
7527
7528 if (TREE_TYPE (t) != ptrtype)
7529 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7530 }
7531 else if (TREE_CODE (t) == MEM_REF
7532 && integer_zerop (TREE_OPERAND (t, 1)))
7533 return TREE_OPERAND (t, 0);
7534 else if (TREE_CODE (t) == MEM_REF
7535 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7536 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7537 TREE_OPERAND (t, 0),
7538 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7539 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7540 {
7541 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7542
7543 if (TREE_TYPE (t) != ptrtype)
7544 t = fold_convert_loc (loc, ptrtype, t);
7545 }
7546 else
7547 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7548
7549 return t;
7550 }
7551
7552 /* Build an expression for the address of T. */
7553
7554 tree
7555 build_fold_addr_expr_loc (location_t loc, tree t)
7556 {
7557 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7558
7559 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7560 }
7561
7562 static bool vec_cst_ctor_to_array (tree, tree *);
7563
7564 /* Fold a unary expression of code CODE and type TYPE with operand
7565 OP0. Return the folded expression if folding is successful.
7566 Otherwise, return NULL_TREE. */
7567
7568 tree
7569 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7570 {
7571 tree tem;
7572 tree arg0;
7573 enum tree_code_class kind = TREE_CODE_CLASS (code);
7574
7575 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7576 && TREE_CODE_LENGTH (code) == 1);
7577
7578 tem = generic_simplify (loc, code, type, op0);
7579 if (tem)
7580 return tem;
7581
7582 arg0 = op0;
7583 if (arg0)
7584 {
7585 if (CONVERT_EXPR_CODE_P (code)
7586 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7587 {
7588 /* Don't use STRIP_NOPS, because signedness of argument type
7589 matters. */
7590 STRIP_SIGN_NOPS (arg0);
7591 }
7592 else
7593 {
7594 /* Strip any conversions that don't change the mode. This
7595 is safe for every expression, except for a comparison
7596 expression because its signedness is derived from its
7597 operands.
7598
7599 Note that this is done as an internal manipulation within
7600 the constant folder, in order to find the simplest
7601 representation of the arguments so that their form can be
7602 studied. In any cases, the appropriate type conversions
7603 should be put back in the tree that will get out of the
7604 constant folder. */
7605 STRIP_NOPS (arg0);
7606 }
7607 }
7608
7609 if (TREE_CODE_CLASS (code) == tcc_unary)
7610 {
7611 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7612 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7613 fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc, TREE_TYPE (op0),
7615 TREE_OPERAND (arg0, 1))));
7616 else if (TREE_CODE (arg0) == COND_EXPR)
7617 {
7618 tree arg01 = TREE_OPERAND (arg0, 1);
7619 tree arg02 = TREE_OPERAND (arg0, 2);
7620 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7621 arg01 = fold_build1_loc (loc, code, type,
7622 fold_convert_loc (loc,
7623 TREE_TYPE (op0), arg01));
7624 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7625 arg02 = fold_build1_loc (loc, code, type,
7626 fold_convert_loc (loc,
7627 TREE_TYPE (op0), arg02));
7628 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7629 arg01, arg02);
7630
7631 /* If this was a conversion, and all we did was to move into
7632 inside the COND_EXPR, bring it back out. But leave it if
7633 it is a conversion from integer to integer and the
7634 result precision is no wider than a word since such a
7635 conversion is cheap and may be optimized away by combine,
7636 while it couldn't if it were outside the COND_EXPR. Then return
7637 so we don't get into an infinite recursion loop taking the
7638 conversion out and then back in. */
7639
7640 if ((CONVERT_EXPR_CODE_P (code)
7641 || code == NON_LVALUE_EXPR)
7642 && TREE_CODE (tem) == COND_EXPR
7643 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7644 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7646 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7647 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7648 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7649 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7650 && (INTEGRAL_TYPE_P
7651 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7652 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7653 || flag_syntax_only))
7654 tem = build1_loc (loc, code, type,
7655 build3 (COND_EXPR,
7656 TREE_TYPE (TREE_OPERAND
7657 (TREE_OPERAND (tem, 1), 0)),
7658 TREE_OPERAND (tem, 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7660 TREE_OPERAND (TREE_OPERAND (tem, 2),
7661 0)));
7662 return tem;
7663 }
7664 }
7665
7666 switch (code)
7667 {
7668 case NON_LVALUE_EXPR:
7669 if (!maybe_lvalue_p (op0))
7670 return fold_convert_loc (loc, type, op0);
7671 return NULL_TREE;
7672
7673 CASE_CONVERT:
7674 case FLOAT_EXPR:
7675 case FIX_TRUNC_EXPR:
7676 if (COMPARISON_CLASS_P (op0))
7677 {
7678 /* If we have (type) (a CMP b) and type is an integral type, return
7679 new expression involving the new type. Canonicalize
7680 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7681 non-integral type.
7682 Do not fold the result as that would not simplify further, also
7683 folding again results in recursions. */
7684 if (TREE_CODE (type) == BOOLEAN_TYPE)
7685 return build2_loc (loc, TREE_CODE (op0), type,
7686 TREE_OPERAND (op0, 0),
7687 TREE_OPERAND (op0, 1));
7688 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7689 && TREE_CODE (type) != VECTOR_TYPE)
7690 return build3_loc (loc, COND_EXPR, type, op0,
7691 constant_boolean_node (true, type),
7692 constant_boolean_node (false, type));
7693 }
7694
7695 /* Handle (T *)&A.B.C for A being of type T and B and C
7696 living at offset zero. This occurs frequently in
7697 C++ upcasting and then accessing the base. */
7698 if (TREE_CODE (op0) == ADDR_EXPR
7699 && POINTER_TYPE_P (type)
7700 && handled_component_p (TREE_OPERAND (op0, 0)))
7701 {
7702 HOST_WIDE_INT bitsize, bitpos;
7703 tree offset;
7704 machine_mode mode;
7705 int unsignedp, volatilep;
7706 tree base = TREE_OPERAND (op0, 0);
7707 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7708 &mode, &unsignedp, &volatilep, false);
7709 /* If the reference was to a (constant) zero offset, we can use
7710 the address of the base if it has the same base type
7711 as the result type and the pointer type is unqualified. */
7712 if (! offset && bitpos == 0
7713 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7714 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7715 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7716 return fold_convert_loc (loc, type,
7717 build_fold_addr_expr_loc (loc, base));
7718 }
7719
7720 if (TREE_CODE (op0) == MODIFY_EXPR
7721 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7722 /* Detect assigning a bitfield. */
7723 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7724 && DECL_BIT_FIELD
7725 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7726 {
7727 /* Don't leave an assignment inside a conversion
7728 unless assigning a bitfield. */
7729 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7730 /* First do the assignment, then return converted constant. */
7731 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7732 TREE_NO_WARNING (tem) = 1;
7733 TREE_USED (tem) = 1;
7734 return tem;
7735 }
7736
7737 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7738 constants (if x has signed type, the sign bit cannot be set
7739 in c). This folds extension into the BIT_AND_EXPR.
7740 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7741 very likely don't have maximal range for their precision and this
7742 transformation effectively doesn't preserve non-maximal ranges. */
7743 if (TREE_CODE (type) == INTEGER_TYPE
7744 && TREE_CODE (op0) == BIT_AND_EXPR
7745 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7746 {
7747 tree and_expr = op0;
7748 tree and0 = TREE_OPERAND (and_expr, 0);
7749 tree and1 = TREE_OPERAND (and_expr, 1);
7750 int change = 0;
7751
7752 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7753 || (TYPE_PRECISION (type)
7754 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7755 change = 1;
7756 else if (TYPE_PRECISION (TREE_TYPE (and1))
7757 <= HOST_BITS_PER_WIDE_INT
7758 && tree_fits_uhwi_p (and1))
7759 {
7760 unsigned HOST_WIDE_INT cst;
7761
7762 cst = tree_to_uhwi (and1);
7763 cst &= HOST_WIDE_INT_M1U
7764 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7765 change = (cst == 0);
7766 #ifdef LOAD_EXTEND_OP
7767 if (change
7768 && !flag_syntax_only
7769 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7770 == ZERO_EXTEND))
7771 {
7772 tree uns = unsigned_type_for (TREE_TYPE (and0));
7773 and0 = fold_convert_loc (loc, uns, and0);
7774 and1 = fold_convert_loc (loc, uns, and1);
7775 }
7776 #endif
7777 }
7778 if (change)
7779 {
7780 tem = force_fit_type (type, wi::to_widest (and1), 0,
7781 TREE_OVERFLOW (and1));
7782 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7783 fold_convert_loc (loc, type, and0), tem);
7784 }
7785 }
7786
7787 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7788 when one of the new casts will fold away. Conservatively we assume
7789 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7790 if (POINTER_TYPE_P (type)
7791 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7792 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7793 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7794 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7795 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7796 {
7797 tree arg00 = TREE_OPERAND (arg0, 0);
7798 tree arg01 = TREE_OPERAND (arg0, 1);
7799
7800 return fold_build_pointer_plus_loc
7801 (loc, fold_convert_loc (loc, type, arg00), arg01);
7802 }
7803
7804 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7805 of the same precision, and X is an integer type not narrower than
7806 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7807 if (INTEGRAL_TYPE_P (type)
7808 && TREE_CODE (op0) == BIT_NOT_EXPR
7809 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7810 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7811 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7812 {
7813 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7814 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7815 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7816 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7817 fold_convert_loc (loc, type, tem));
7818 }
7819
7820 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7821 type of X and Y (integer types only). */
7822 if (INTEGRAL_TYPE_P (type)
7823 && TREE_CODE (op0) == MULT_EXPR
7824 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7825 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7826 {
7827 /* Be careful not to introduce new overflows. */
7828 tree mult_type;
7829 if (TYPE_OVERFLOW_WRAPS (type))
7830 mult_type = type;
7831 else
7832 mult_type = unsigned_type_for (type);
7833
7834 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7835 {
7836 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7837 fold_convert_loc (loc, mult_type,
7838 TREE_OPERAND (op0, 0)),
7839 fold_convert_loc (loc, mult_type,
7840 TREE_OPERAND (op0, 1)));
7841 return fold_convert_loc (loc, type, tem);
7842 }
7843 }
7844
7845 tem = fold_convert_const (code, type, arg0);
7846 return tem ? tem : NULL_TREE;
7847
7848 case ADDR_SPACE_CONVERT_EXPR:
7849 if (integer_zerop (arg0))
7850 return fold_convert_const (code, type, arg0);
7851 return NULL_TREE;
7852
7853 case FIXED_CONVERT_EXPR:
7854 tem = fold_convert_const (code, type, arg0);
7855 return tem ? tem : NULL_TREE;
7856
7857 case VIEW_CONVERT_EXPR:
7858 if (TREE_CODE (op0) == MEM_REF)
7859 return fold_build2_loc (loc, MEM_REF, type,
7860 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7861
7862 return fold_view_convert_expr (type, op0);
7863
7864 case NEGATE_EXPR:
7865 tem = fold_negate_expr (loc, arg0);
7866 if (tem)
7867 return fold_convert_loc (loc, type, tem);
7868 return NULL_TREE;
7869
7870 case ABS_EXPR:
7871 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7872 return fold_abs_const (arg0, type);
7873 /* Convert fabs((double)float) into (double)fabsf(float). */
7874 else if (TREE_CODE (arg0) == NOP_EXPR
7875 && TREE_CODE (type) == REAL_TYPE)
7876 {
7877 tree targ0 = strip_float_extensions (arg0);
7878 if (targ0 != arg0)
7879 return fold_convert_loc (loc, type,
7880 fold_build1_loc (loc, ABS_EXPR,
7881 TREE_TYPE (targ0),
7882 targ0));
7883 }
7884 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7885 else if (TREE_CODE (arg0) == ABS_EXPR)
7886 return arg0;
7887
7888 /* Strip sign ops from argument. */
7889 if (TREE_CODE (type) == REAL_TYPE)
7890 {
7891 tem = fold_strip_sign_ops (arg0);
7892 if (tem)
7893 return fold_build1_loc (loc, ABS_EXPR, type,
7894 fold_convert_loc (loc, type, tem));
7895 }
7896 return NULL_TREE;
7897
7898 case CONJ_EXPR:
7899 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7900 return fold_convert_loc (loc, type, arg0);
7901 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7902 {
7903 tree itype = TREE_TYPE (type);
7904 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7905 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7906 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7907 negate_expr (ipart));
7908 }
7909 if (TREE_CODE (arg0) == COMPLEX_CST)
7910 {
7911 tree itype = TREE_TYPE (type);
7912 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7913 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7914 return build_complex (type, rpart, negate_expr (ipart));
7915 }
7916 if (TREE_CODE (arg0) == CONJ_EXPR)
7917 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7918 return NULL_TREE;
7919
7920 case BIT_NOT_EXPR:
7921 if (TREE_CODE (arg0) == INTEGER_CST)
7922 return fold_not_const (arg0, type);
7923 /* Convert ~ (-A) to A - 1. */
7924 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7925 return fold_build2_loc (loc, MINUS_EXPR, type,
7926 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7927 build_int_cst (type, 1));
7928 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7929 else if (INTEGRAL_TYPE_P (type)
7930 && ((TREE_CODE (arg0) == MINUS_EXPR
7931 && integer_onep (TREE_OPERAND (arg0, 1)))
7932 || (TREE_CODE (arg0) == PLUS_EXPR
7933 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7934 return fold_build1_loc (loc, NEGATE_EXPR, type,
7935 fold_convert_loc (loc, type,
7936 TREE_OPERAND (arg0, 0)));
7937 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7938 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7939 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7940 fold_convert_loc (loc, type,
7941 TREE_OPERAND (arg0, 0)))))
7942 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7943 fold_convert_loc (loc, type,
7944 TREE_OPERAND (arg0, 1)));
7945 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7946 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7947 fold_convert_loc (loc, type,
7948 TREE_OPERAND (arg0, 1)))))
7949 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7950 fold_convert_loc (loc, type,
7951 TREE_OPERAND (arg0, 0)), tem);
7952 /* Perform BIT_NOT_EXPR on each element individually. */
7953 else if (TREE_CODE (arg0) == VECTOR_CST)
7954 {
7955 tree *elements;
7956 tree elem;
7957 unsigned count = VECTOR_CST_NELTS (arg0), i;
7958
7959 elements = XALLOCAVEC (tree, count);
7960 for (i = 0; i < count; i++)
7961 {
7962 elem = VECTOR_CST_ELT (arg0, i);
7963 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7964 if (elem == NULL_TREE)
7965 break;
7966 elements[i] = elem;
7967 }
7968 if (i == count)
7969 return build_vector (type, elements);
7970 }
7971 else if (COMPARISON_CLASS_P (arg0)
7972 && (VECTOR_TYPE_P (type)
7973 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
7974 {
7975 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
7976 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
7977 HONOR_NANS (TYPE_MODE (op_type)));
7978 if (subcode != ERROR_MARK)
7979 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
7980 TREE_OPERAND (arg0, 1));
7981 }
7982
7983
7984 return NULL_TREE;
7985
7986 case TRUTH_NOT_EXPR:
7987 /* Note that the operand of this must be an int
7988 and its values must be 0 or 1.
7989 ("true" is a fixed value perhaps depending on the language,
7990 but we don't handle values other than 1 correctly yet.) */
7991 tem = fold_truth_not_expr (loc, arg0);
7992 if (!tem)
7993 return NULL_TREE;
7994 return fold_convert_loc (loc, type, tem);
7995
7996 case REALPART_EXPR:
7997 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7998 return fold_convert_loc (loc, type, arg0);
7999 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8000 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8001 TREE_OPERAND (arg0, 1));
8002 if (TREE_CODE (arg0) == COMPLEX_CST)
8003 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8004 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8005 {
8006 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8007 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8008 fold_build1_loc (loc, REALPART_EXPR, itype,
8009 TREE_OPERAND (arg0, 0)),
8010 fold_build1_loc (loc, REALPART_EXPR, itype,
8011 TREE_OPERAND (arg0, 1)));
8012 return fold_convert_loc (loc, type, tem);
8013 }
8014 if (TREE_CODE (arg0) == CONJ_EXPR)
8015 {
8016 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8017 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8018 TREE_OPERAND (arg0, 0));
8019 return fold_convert_loc (loc, type, tem);
8020 }
8021 if (TREE_CODE (arg0) == CALL_EXPR)
8022 {
8023 tree fn = get_callee_fndecl (arg0);
8024 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8025 switch (DECL_FUNCTION_CODE (fn))
8026 {
8027 CASE_FLT_FN (BUILT_IN_CEXPI):
8028 fn = mathfn_built_in (type, BUILT_IN_COS);
8029 if (fn)
8030 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8031 break;
8032
8033 default:
8034 break;
8035 }
8036 }
8037 return NULL_TREE;
8038
8039 case IMAGPART_EXPR:
8040 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8041 return build_zero_cst (type);
8042 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8043 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8044 TREE_OPERAND (arg0, 0));
8045 if (TREE_CODE (arg0) == COMPLEX_CST)
8046 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8047 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8048 {
8049 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8050 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8051 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8052 TREE_OPERAND (arg0, 0)),
8053 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8054 TREE_OPERAND (arg0, 1)));
8055 return fold_convert_loc (loc, type, tem);
8056 }
8057 if (TREE_CODE (arg0) == CONJ_EXPR)
8058 {
8059 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8060 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8061 return fold_convert_loc (loc, type, negate_expr (tem));
8062 }
8063 if (TREE_CODE (arg0) == CALL_EXPR)
8064 {
8065 tree fn = get_callee_fndecl (arg0);
8066 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8067 switch (DECL_FUNCTION_CODE (fn))
8068 {
8069 CASE_FLT_FN (BUILT_IN_CEXPI):
8070 fn = mathfn_built_in (type, BUILT_IN_SIN);
8071 if (fn)
8072 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8073 break;
8074
8075 default:
8076 break;
8077 }
8078 }
8079 return NULL_TREE;
8080
8081 case INDIRECT_REF:
8082 /* Fold *&X to X if X is an lvalue. */
8083 if (TREE_CODE (op0) == ADDR_EXPR)
8084 {
8085 tree op00 = TREE_OPERAND (op0, 0);
8086 if ((TREE_CODE (op00) == VAR_DECL
8087 || TREE_CODE (op00) == PARM_DECL
8088 || TREE_CODE (op00) == RESULT_DECL)
8089 && !TREE_READONLY (op00))
8090 return op00;
8091 }
8092 return NULL_TREE;
8093
8094 case VEC_UNPACK_LO_EXPR:
8095 case VEC_UNPACK_HI_EXPR:
8096 case VEC_UNPACK_FLOAT_LO_EXPR:
8097 case VEC_UNPACK_FLOAT_HI_EXPR:
8098 {
8099 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8100 tree *elts;
8101 enum tree_code subcode;
8102
8103 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8104 if (TREE_CODE (arg0) != VECTOR_CST)
8105 return NULL_TREE;
8106
8107 elts = XALLOCAVEC (tree, nelts * 2);
8108 if (!vec_cst_ctor_to_array (arg0, elts))
8109 return NULL_TREE;
8110
8111 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8112 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8113 elts += nelts;
8114
8115 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8116 subcode = NOP_EXPR;
8117 else
8118 subcode = FLOAT_EXPR;
8119
8120 for (i = 0; i < nelts; i++)
8121 {
8122 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8123 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8124 return NULL_TREE;
8125 }
8126
8127 return build_vector (type, elts);
8128 }
8129
8130 case REDUC_MIN_EXPR:
8131 case REDUC_MAX_EXPR:
8132 case REDUC_PLUS_EXPR:
8133 {
8134 unsigned int nelts, i;
8135 tree *elts;
8136 enum tree_code subcode;
8137
8138 if (TREE_CODE (op0) != VECTOR_CST)
8139 return NULL_TREE;
8140 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8141
8142 elts = XALLOCAVEC (tree, nelts);
8143 if (!vec_cst_ctor_to_array (op0, elts))
8144 return NULL_TREE;
8145
8146 switch (code)
8147 {
8148 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8149 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8150 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8151 default: gcc_unreachable ();
8152 }
8153
8154 for (i = 1; i < nelts; i++)
8155 {
8156 elts[0] = const_binop (subcode, elts[0], elts[i]);
8157 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8158 return NULL_TREE;
8159 }
8160
8161 return elts[0];
8162 }
8163
8164 default:
8165 return NULL_TREE;
8166 } /* switch (code) */
8167 }
8168
8169
8170 /* If the operation was a conversion do _not_ mark a resulting constant
8171 with TREE_OVERFLOW if the original constant was not. These conversions
8172 have implementation defined behavior and retaining the TREE_OVERFLOW
8173 flag here would confuse later passes such as VRP. */
8174 tree
8175 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8176 tree type, tree op0)
8177 {
8178 tree res = fold_unary_loc (loc, code, type, op0);
8179 if (res
8180 && TREE_CODE (res) == INTEGER_CST
8181 && TREE_CODE (op0) == INTEGER_CST
8182 && CONVERT_EXPR_CODE_P (code))
8183 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8184
8185 return res;
8186 }
8187
8188 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8189 operands OP0 and OP1. LOC is the location of the resulting expression.
8190 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8191 Return the folded expression if folding is successful. Otherwise,
8192 return NULL_TREE. */
8193 static tree
8194 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8195 tree arg0, tree arg1, tree op0, tree op1)
8196 {
8197 tree tem;
8198
8199 /* We only do these simplifications if we are optimizing. */
8200 if (!optimize)
8201 return NULL_TREE;
8202
8203 /* Check for things like (A || B) && (A || C). We can convert this
8204 to A || (B && C). Note that either operator can be any of the four
8205 truth and/or operations and the transformation will still be
8206 valid. Also note that we only care about order for the
8207 ANDIF and ORIF operators. If B contains side effects, this
8208 might change the truth-value of A. */
8209 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8210 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8211 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8212 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8213 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8214 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8215 {
8216 tree a00 = TREE_OPERAND (arg0, 0);
8217 tree a01 = TREE_OPERAND (arg0, 1);
8218 tree a10 = TREE_OPERAND (arg1, 0);
8219 tree a11 = TREE_OPERAND (arg1, 1);
8220 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8221 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8222 && (code == TRUTH_AND_EXPR
8223 || code == TRUTH_OR_EXPR));
8224
8225 if (operand_equal_p (a00, a10, 0))
8226 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8227 fold_build2_loc (loc, code, type, a01, a11));
8228 else if (commutative && operand_equal_p (a00, a11, 0))
8229 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8230 fold_build2_loc (loc, code, type, a01, a10));
8231 else if (commutative && operand_equal_p (a01, a10, 0))
8232 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8233 fold_build2_loc (loc, code, type, a00, a11));
8234
8235 /* This case if tricky because we must either have commutative
8236 operators or else A10 must not have side-effects. */
8237
8238 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8239 && operand_equal_p (a01, a11, 0))
8240 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8241 fold_build2_loc (loc, code, type, a00, a10),
8242 a01);
8243 }
8244
8245 /* See if we can build a range comparison. */
8246 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8247 return tem;
8248
8249 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8250 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8251 {
8252 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8253 if (tem)
8254 return fold_build2_loc (loc, code, type, tem, arg1);
8255 }
8256
8257 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8258 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8259 {
8260 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8261 if (tem)
8262 return fold_build2_loc (loc, code, type, arg0, tem);
8263 }
8264
8265 /* Check for the possibility of merging component references. If our
8266 lhs is another similar operation, try to merge its rhs with our
8267 rhs. Then try to merge our lhs and rhs. */
8268 if (TREE_CODE (arg0) == code
8269 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8270 TREE_OPERAND (arg0, 1), arg1)))
8271 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8272
8273 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8274 return tem;
8275
8276 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8277 && (code == TRUTH_AND_EXPR
8278 || code == TRUTH_ANDIF_EXPR
8279 || code == TRUTH_OR_EXPR
8280 || code == TRUTH_ORIF_EXPR))
8281 {
8282 enum tree_code ncode, icode;
8283
8284 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8285 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8286 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8287
8288 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8289 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8290 We don't want to pack more than two leafs to a non-IF AND/OR
8291 expression.
8292 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8293 equal to IF-CODE, then we don't want to add right-hand operand.
8294 If the inner right-hand side of left-hand operand has
8295 side-effects, or isn't simple, then we can't add to it,
8296 as otherwise we might destroy if-sequence. */
8297 if (TREE_CODE (arg0) == icode
8298 && simple_operand_p_2 (arg1)
8299 /* Needed for sequence points to handle trappings, and
8300 side-effects. */
8301 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8302 {
8303 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8304 arg1);
8305 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8306 tem);
8307 }
8308 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8309 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8310 else if (TREE_CODE (arg1) == icode
8311 && simple_operand_p_2 (arg0)
8312 /* Needed for sequence points to handle trappings, and
8313 side-effects. */
8314 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8315 {
8316 tem = fold_build2_loc (loc, ncode, type,
8317 arg0, TREE_OPERAND (arg1, 0));
8318 return fold_build2_loc (loc, icode, type, tem,
8319 TREE_OPERAND (arg1, 1));
8320 }
8321 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8322 into (A OR B).
8323 For sequence point consistancy, we need to check for trapping,
8324 and side-effects. */
8325 else if (code == icode && simple_operand_p_2 (arg0)
8326 && simple_operand_p_2 (arg1))
8327 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8328 }
8329
8330 return NULL_TREE;
8331 }
8332
8333 /* Fold a binary expression of code CODE and type TYPE with operands
8334 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8335 Return the folded expression if folding is successful. Otherwise,
8336 return NULL_TREE. */
8337
8338 static tree
8339 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8340 {
8341 enum tree_code compl_code;
8342
8343 if (code == MIN_EXPR)
8344 compl_code = MAX_EXPR;
8345 else if (code == MAX_EXPR)
8346 compl_code = MIN_EXPR;
8347 else
8348 gcc_unreachable ();
8349
8350 /* MIN (MAX (a, b), b) == b. */
8351 if (TREE_CODE (op0) == compl_code
8352 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8353 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8354
8355 /* MIN (MAX (b, a), b) == b. */
8356 if (TREE_CODE (op0) == compl_code
8357 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8358 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8359 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8360
8361 /* MIN (a, MAX (a, b)) == a. */
8362 if (TREE_CODE (op1) == compl_code
8363 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8364 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8365 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8366
8367 /* MIN (a, MAX (b, a)) == a. */
8368 if (TREE_CODE (op1) == compl_code
8369 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8370 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8371 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8372
8373 return NULL_TREE;
8374 }
8375
8376 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8377 by changing CODE to reduce the magnitude of constants involved in
8378 ARG0 of the comparison.
8379 Returns a canonicalized comparison tree if a simplification was
8380 possible, otherwise returns NULL_TREE.
8381 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8382 valid if signed overflow is undefined. */
8383
8384 static tree
8385 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8386 tree arg0, tree arg1,
8387 bool *strict_overflow_p)
8388 {
8389 enum tree_code code0 = TREE_CODE (arg0);
8390 tree t, cst0 = NULL_TREE;
8391 int sgn0;
8392 bool swap = false;
8393
8394 /* Match A +- CST code arg1 and CST code arg1. We can change the
8395 first form only if overflow is undefined. */
8396 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8397 /* In principle pointers also have undefined overflow behavior,
8398 but that causes problems elsewhere. */
8399 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8400 && (code0 == MINUS_EXPR
8401 || code0 == PLUS_EXPR)
8402 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8403 || code0 == INTEGER_CST))
8404 return NULL_TREE;
8405
8406 /* Identify the constant in arg0 and its sign. */
8407 if (code0 == INTEGER_CST)
8408 cst0 = arg0;
8409 else
8410 cst0 = TREE_OPERAND (arg0, 1);
8411 sgn0 = tree_int_cst_sgn (cst0);
8412
8413 /* Overflowed constants and zero will cause problems. */
8414 if (integer_zerop (cst0)
8415 || TREE_OVERFLOW (cst0))
8416 return NULL_TREE;
8417
8418 /* See if we can reduce the magnitude of the constant in
8419 arg0 by changing the comparison code. */
8420 if (code0 == INTEGER_CST)
8421 {
8422 /* CST <= arg1 -> CST-1 < arg1. */
8423 if (code == LE_EXPR && sgn0 == 1)
8424 code = LT_EXPR;
8425 /* -CST < arg1 -> -CST-1 <= arg1. */
8426 else if (code == LT_EXPR && sgn0 == -1)
8427 code = LE_EXPR;
8428 /* CST > arg1 -> CST-1 >= arg1. */
8429 else if (code == GT_EXPR && sgn0 == 1)
8430 code = GE_EXPR;
8431 /* -CST >= arg1 -> -CST-1 > arg1. */
8432 else if (code == GE_EXPR && sgn0 == -1)
8433 code = GT_EXPR;
8434 else
8435 return NULL_TREE;
8436 /* arg1 code' CST' might be more canonical. */
8437 swap = true;
8438 }
8439 else
8440 {
8441 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8442 if (code == LT_EXPR
8443 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8444 code = LE_EXPR;
8445 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8446 else if (code == GT_EXPR
8447 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8448 code = GE_EXPR;
8449 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8450 else if (code == LE_EXPR
8451 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8452 code = LT_EXPR;
8453 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8454 else if (code == GE_EXPR
8455 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8456 code = GT_EXPR;
8457 else
8458 return NULL_TREE;
8459 *strict_overflow_p = true;
8460 }
8461
8462 /* Now build the constant reduced in magnitude. But not if that
8463 would produce one outside of its types range. */
8464 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8465 && ((sgn0 == 1
8466 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8467 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8468 || (sgn0 == -1
8469 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8470 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8471 /* We cannot swap the comparison here as that would cause us to
8472 endlessly recurse. */
8473 return NULL_TREE;
8474
8475 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8476 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8477 if (code0 != INTEGER_CST)
8478 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8479 t = fold_convert (TREE_TYPE (arg1), t);
8480
8481 /* If swapping might yield to a more canonical form, do so. */
8482 if (swap)
8483 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8484 else
8485 return fold_build2_loc (loc, code, type, t, arg1);
8486 }
8487
8488 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8489 overflow further. Try to decrease the magnitude of constants involved
8490 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8491 and put sole constants at the second argument position.
8492 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8493
8494 static tree
8495 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8496 tree arg0, tree arg1)
8497 {
8498 tree t;
8499 bool strict_overflow_p;
8500 const char * const warnmsg = G_("assuming signed overflow does not occur "
8501 "when reducing constant in comparison");
8502
8503 /* Try canonicalization by simplifying arg0. */
8504 strict_overflow_p = false;
8505 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8506 &strict_overflow_p);
8507 if (t)
8508 {
8509 if (strict_overflow_p)
8510 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8511 return t;
8512 }
8513
8514 /* Try canonicalization by simplifying arg1 using the swapped
8515 comparison. */
8516 code = swap_tree_comparison (code);
8517 strict_overflow_p = false;
8518 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8519 &strict_overflow_p);
8520 if (t && strict_overflow_p)
8521 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8522 return t;
8523 }
8524
8525 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8526 space. This is used to avoid issuing overflow warnings for
8527 expressions like &p->x which can not wrap. */
8528
8529 static bool
8530 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8531 {
8532 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8533 return true;
8534
8535 if (bitpos < 0)
8536 return true;
8537
8538 wide_int wi_offset;
8539 int precision = TYPE_PRECISION (TREE_TYPE (base));
8540 if (offset == NULL_TREE)
8541 wi_offset = wi::zero (precision);
8542 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8543 return true;
8544 else
8545 wi_offset = offset;
8546
8547 bool overflow;
8548 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8549 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8550 if (overflow)
8551 return true;
8552
8553 if (!wi::fits_uhwi_p (total))
8554 return true;
8555
8556 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8557 if (size <= 0)
8558 return true;
8559
8560 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8561 array. */
8562 if (TREE_CODE (base) == ADDR_EXPR)
8563 {
8564 HOST_WIDE_INT base_size;
8565
8566 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8567 if (base_size > 0 && size < base_size)
8568 size = base_size;
8569 }
8570
8571 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8572 }
8573
8574 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8575 kind INTEGER_CST. This makes sure to properly sign-extend the
8576 constant. */
8577
8578 static HOST_WIDE_INT
8579 size_low_cst (const_tree t)
8580 {
8581 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8582 int prec = TYPE_PRECISION (TREE_TYPE (t));
8583 if (prec < HOST_BITS_PER_WIDE_INT)
8584 return sext_hwi (w, prec);
8585 return w;
8586 }
8587
8588 /* Subroutine of fold_binary. This routine performs all of the
8589 transformations that are common to the equality/inequality
8590 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8591 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8592 fold_binary should call fold_binary. Fold a comparison with
8593 tree code CODE and type TYPE with operands OP0 and OP1. Return
8594 the folded comparison or NULL_TREE. */
8595
8596 static tree
8597 fold_comparison (location_t loc, enum tree_code code, tree type,
8598 tree op0, tree op1)
8599 {
8600 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8601 tree arg0, arg1, tem;
8602
8603 arg0 = op0;
8604 arg1 = op1;
8605
8606 STRIP_SIGN_NOPS (arg0);
8607 STRIP_SIGN_NOPS (arg1);
8608
8609 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8610 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8611 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8612 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8613 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8614 && TREE_CODE (arg1) == INTEGER_CST
8615 && !TREE_OVERFLOW (arg1))
8616 {
8617 const enum tree_code
8618 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8619 tree const1 = TREE_OPERAND (arg0, 1);
8620 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8621 tree variable = TREE_OPERAND (arg0, 0);
8622 tree new_const = int_const_binop (reverse_op, const2, const1);
8623
8624 /* If the constant operation overflowed this can be
8625 simplified as a comparison against INT_MAX/INT_MIN. */
8626 if (TREE_OVERFLOW (new_const)
8627 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8628 {
8629 int const1_sgn = tree_int_cst_sgn (const1);
8630 enum tree_code code2 = code;
8631
8632 /* Get the sign of the constant on the lhs if the
8633 operation were VARIABLE + CONST1. */
8634 if (TREE_CODE (arg0) == MINUS_EXPR)
8635 const1_sgn = -const1_sgn;
8636
8637 /* The sign of the constant determines if we overflowed
8638 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8639 Canonicalize to the INT_MIN overflow by swapping the comparison
8640 if necessary. */
8641 if (const1_sgn == -1)
8642 code2 = swap_tree_comparison (code);
8643
8644 /* We now can look at the canonicalized case
8645 VARIABLE + 1 CODE2 INT_MIN
8646 and decide on the result. */
8647 switch (code2)
8648 {
8649 case EQ_EXPR:
8650 case LT_EXPR:
8651 case LE_EXPR:
8652 return
8653 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8654
8655 case NE_EXPR:
8656 case GE_EXPR:
8657 case GT_EXPR:
8658 return
8659 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8660
8661 default:
8662 gcc_unreachable ();
8663 }
8664 }
8665 else
8666 {
8667 if (!equality_code)
8668 fold_overflow_warning ("assuming signed overflow does not occur "
8669 "when changing X +- C1 cmp C2 to "
8670 "X cmp C2 -+ C1",
8671 WARN_STRICT_OVERFLOW_COMPARISON);
8672 return fold_build2_loc (loc, code, type, variable, new_const);
8673 }
8674 }
8675
8676 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8677 if (TREE_CODE (arg0) == MINUS_EXPR
8678 && equality_code
8679 && integer_zerop (arg1))
8680 {
8681 /* ??? The transformation is valid for the other operators if overflow
8682 is undefined for the type, but performing it here badly interacts
8683 with the transformation in fold_cond_expr_with_comparison which
8684 attempts to synthetize ABS_EXPR. */
8685 if (!equality_code)
8686 fold_overflow_warning ("assuming signed overflow does not occur "
8687 "when changing X - Y cmp 0 to X cmp Y",
8688 WARN_STRICT_OVERFLOW_COMPARISON);
8689 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8690 TREE_OPERAND (arg0, 1));
8691 }
8692
8693 /* For comparisons of pointers we can decompose it to a compile time
8694 comparison of the base objects and the offsets into the object.
8695 This requires at least one operand being an ADDR_EXPR or a
8696 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8697 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8698 && (TREE_CODE (arg0) == ADDR_EXPR
8699 || TREE_CODE (arg1) == ADDR_EXPR
8700 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8701 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8702 {
8703 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8704 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8705 machine_mode mode;
8706 int volatilep, unsignedp;
8707 bool indirect_base0 = false, indirect_base1 = false;
8708
8709 /* Get base and offset for the access. Strip ADDR_EXPR for
8710 get_inner_reference, but put it back by stripping INDIRECT_REF
8711 off the base object if possible. indirect_baseN will be true
8712 if baseN is not an address but refers to the object itself. */
8713 base0 = arg0;
8714 if (TREE_CODE (arg0) == ADDR_EXPR)
8715 {
8716 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8717 &bitsize, &bitpos0, &offset0, &mode,
8718 &unsignedp, &volatilep, false);
8719 if (TREE_CODE (base0) == INDIRECT_REF)
8720 base0 = TREE_OPERAND (base0, 0);
8721 else
8722 indirect_base0 = true;
8723 }
8724 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8725 {
8726 base0 = TREE_OPERAND (arg0, 0);
8727 STRIP_SIGN_NOPS (base0);
8728 if (TREE_CODE (base0) == ADDR_EXPR)
8729 {
8730 base0 = TREE_OPERAND (base0, 0);
8731 indirect_base0 = true;
8732 }
8733 offset0 = TREE_OPERAND (arg0, 1);
8734 if (tree_fits_shwi_p (offset0))
8735 {
8736 HOST_WIDE_INT off = size_low_cst (offset0);
8737 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8738 * BITS_PER_UNIT)
8739 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8740 {
8741 bitpos0 = off * BITS_PER_UNIT;
8742 offset0 = NULL_TREE;
8743 }
8744 }
8745 }
8746
8747 base1 = arg1;
8748 if (TREE_CODE (arg1) == ADDR_EXPR)
8749 {
8750 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8751 &bitsize, &bitpos1, &offset1, &mode,
8752 &unsignedp, &volatilep, false);
8753 if (TREE_CODE (base1) == INDIRECT_REF)
8754 base1 = TREE_OPERAND (base1, 0);
8755 else
8756 indirect_base1 = true;
8757 }
8758 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8759 {
8760 base1 = TREE_OPERAND (arg1, 0);
8761 STRIP_SIGN_NOPS (base1);
8762 if (TREE_CODE (base1) == ADDR_EXPR)
8763 {
8764 base1 = TREE_OPERAND (base1, 0);
8765 indirect_base1 = true;
8766 }
8767 offset1 = TREE_OPERAND (arg1, 1);
8768 if (tree_fits_shwi_p (offset1))
8769 {
8770 HOST_WIDE_INT off = size_low_cst (offset1);
8771 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8772 * BITS_PER_UNIT)
8773 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8774 {
8775 bitpos1 = off * BITS_PER_UNIT;
8776 offset1 = NULL_TREE;
8777 }
8778 }
8779 }
8780
8781 /* A local variable can never be pointed to by
8782 the default SSA name of an incoming parameter. */
8783 if ((TREE_CODE (arg0) == ADDR_EXPR
8784 && indirect_base0
8785 && TREE_CODE (base0) == VAR_DECL
8786 && auto_var_in_fn_p (base0, current_function_decl)
8787 && !indirect_base1
8788 && TREE_CODE (base1) == SSA_NAME
8789 && SSA_NAME_IS_DEFAULT_DEF (base1)
8790 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8791 || (TREE_CODE (arg1) == ADDR_EXPR
8792 && indirect_base1
8793 && TREE_CODE (base1) == VAR_DECL
8794 && auto_var_in_fn_p (base1, current_function_decl)
8795 && !indirect_base0
8796 && TREE_CODE (base0) == SSA_NAME
8797 && SSA_NAME_IS_DEFAULT_DEF (base0)
8798 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8799 {
8800 if (code == NE_EXPR)
8801 return constant_boolean_node (1, type);
8802 else if (code == EQ_EXPR)
8803 return constant_boolean_node (0, type);
8804 }
8805 /* If we have equivalent bases we might be able to simplify. */
8806 else if (indirect_base0 == indirect_base1
8807 && operand_equal_p (base0, base1, 0))
8808 {
8809 /* We can fold this expression to a constant if the non-constant
8810 offset parts are equal. */
8811 if ((offset0 == offset1
8812 || (offset0 && offset1
8813 && operand_equal_p (offset0, offset1, 0)))
8814 && (code == EQ_EXPR
8815 || code == NE_EXPR
8816 || (indirect_base0 && DECL_P (base0))
8817 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8818
8819 {
8820 if (!equality_code
8821 && bitpos0 != bitpos1
8822 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8823 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8824 fold_overflow_warning (("assuming pointer wraparound does not "
8825 "occur when comparing P +- C1 with "
8826 "P +- C2"),
8827 WARN_STRICT_OVERFLOW_CONDITIONAL);
8828
8829 switch (code)
8830 {
8831 case EQ_EXPR:
8832 return constant_boolean_node (bitpos0 == bitpos1, type);
8833 case NE_EXPR:
8834 return constant_boolean_node (bitpos0 != bitpos1, type);
8835 case LT_EXPR:
8836 return constant_boolean_node (bitpos0 < bitpos1, type);
8837 case LE_EXPR:
8838 return constant_boolean_node (bitpos0 <= bitpos1, type);
8839 case GE_EXPR:
8840 return constant_boolean_node (bitpos0 >= bitpos1, type);
8841 case GT_EXPR:
8842 return constant_boolean_node (bitpos0 > bitpos1, type);
8843 default:;
8844 }
8845 }
8846 /* We can simplify the comparison to a comparison of the variable
8847 offset parts if the constant offset parts are equal.
8848 Be careful to use signed sizetype here because otherwise we
8849 mess with array offsets in the wrong way. This is possible
8850 because pointer arithmetic is restricted to retain within an
8851 object and overflow on pointer differences is undefined as of
8852 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8853 else if (bitpos0 == bitpos1
8854 && (equality_code
8855 || (indirect_base0 && DECL_P (base0))
8856 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8857 {
8858 /* By converting to signed sizetype we cover middle-end pointer
8859 arithmetic which operates on unsigned pointer types of size
8860 type size and ARRAY_REF offsets which are properly sign or
8861 zero extended from their type in case it is narrower than
8862 sizetype. */
8863 if (offset0 == NULL_TREE)
8864 offset0 = build_int_cst (ssizetype, 0);
8865 else
8866 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8867 if (offset1 == NULL_TREE)
8868 offset1 = build_int_cst (ssizetype, 0);
8869 else
8870 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8871
8872 if (!equality_code
8873 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8874 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8875 fold_overflow_warning (("assuming pointer wraparound does not "
8876 "occur when comparing P +- C1 with "
8877 "P +- C2"),
8878 WARN_STRICT_OVERFLOW_COMPARISON);
8879
8880 return fold_build2_loc (loc, code, type, offset0, offset1);
8881 }
8882 }
8883 /* For non-equal bases we can simplify if they are addresses
8884 of local binding decls or constants. */
8885 else if (indirect_base0 && indirect_base1
8886 /* We know that !operand_equal_p (base0, base1, 0)
8887 because the if condition was false. But make
8888 sure two decls are not the same. */
8889 && base0 != base1
8890 && TREE_CODE (arg0) == ADDR_EXPR
8891 && TREE_CODE (arg1) == ADDR_EXPR
8892 && (((TREE_CODE (base0) == VAR_DECL
8893 || TREE_CODE (base0) == PARM_DECL)
8894 && (targetm.binds_local_p (base0)
8895 || CONSTANT_CLASS_P (base1)))
8896 || CONSTANT_CLASS_P (base0))
8897 && (((TREE_CODE (base1) == VAR_DECL
8898 || TREE_CODE (base1) == PARM_DECL)
8899 && (targetm.binds_local_p (base1)
8900 || CONSTANT_CLASS_P (base0)))
8901 || CONSTANT_CLASS_P (base1)))
8902 {
8903 if (code == EQ_EXPR)
8904 return omit_two_operands_loc (loc, type, boolean_false_node,
8905 arg0, arg1);
8906 else if (code == NE_EXPR)
8907 return omit_two_operands_loc (loc, type, boolean_true_node,
8908 arg0, arg1);
8909 }
8910 /* For equal offsets we can simplify to a comparison of the
8911 base addresses. */
8912 else if (bitpos0 == bitpos1
8913 && (indirect_base0
8914 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8915 && (indirect_base1
8916 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8917 && ((offset0 == offset1)
8918 || (offset0 && offset1
8919 && operand_equal_p (offset0, offset1, 0))))
8920 {
8921 if (indirect_base0)
8922 base0 = build_fold_addr_expr_loc (loc, base0);
8923 if (indirect_base1)
8924 base1 = build_fold_addr_expr_loc (loc, base1);
8925 return fold_build2_loc (loc, code, type, base0, base1);
8926 }
8927 }
8928
8929 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8930 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8931 the resulting offset is smaller in absolute value than the
8932 original one and has the same sign. */
8933 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8934 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8935 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8936 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8937 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8938 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8939 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8940 {
8941 tree const1 = TREE_OPERAND (arg0, 1);
8942 tree const2 = TREE_OPERAND (arg1, 1);
8943 tree variable1 = TREE_OPERAND (arg0, 0);
8944 tree variable2 = TREE_OPERAND (arg1, 0);
8945 tree cst;
8946 const char * const warnmsg = G_("assuming signed overflow does not "
8947 "occur when combining constants around "
8948 "a comparison");
8949
8950 /* Put the constant on the side where it doesn't overflow and is
8951 of lower absolute value and of same sign than before. */
8952 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8953 ? MINUS_EXPR : PLUS_EXPR,
8954 const2, const1);
8955 if (!TREE_OVERFLOW (cst)
8956 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8957 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8958 {
8959 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8960 return fold_build2_loc (loc, code, type,
8961 variable1,
8962 fold_build2_loc (loc, TREE_CODE (arg1),
8963 TREE_TYPE (arg1),
8964 variable2, cst));
8965 }
8966
8967 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8968 ? MINUS_EXPR : PLUS_EXPR,
8969 const1, const2);
8970 if (!TREE_OVERFLOW (cst)
8971 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8972 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8973 {
8974 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8975 return fold_build2_loc (loc, code, type,
8976 fold_build2_loc (loc, TREE_CODE (arg0),
8977 TREE_TYPE (arg0),
8978 variable1, cst),
8979 variable2);
8980 }
8981 }
8982
8983 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8984 signed arithmetic case. That form is created by the compiler
8985 often enough for folding it to be of value. One example is in
8986 computing loop trip counts after Operator Strength Reduction. */
8987 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8988 && TREE_CODE (arg0) == MULT_EXPR
8989 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8990 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8991 && integer_zerop (arg1))
8992 {
8993 tree const1 = TREE_OPERAND (arg0, 1);
8994 tree const2 = arg1; /* zero */
8995 tree variable1 = TREE_OPERAND (arg0, 0);
8996 enum tree_code cmp_code = code;
8997
8998 /* Handle unfolded multiplication by zero. */
8999 if (integer_zerop (const1))
9000 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9001
9002 fold_overflow_warning (("assuming signed overflow does not occur when "
9003 "eliminating multiplication in comparison "
9004 "with zero"),
9005 WARN_STRICT_OVERFLOW_COMPARISON);
9006
9007 /* If const1 is negative we swap the sense of the comparison. */
9008 if (tree_int_cst_sgn (const1) < 0)
9009 cmp_code = swap_tree_comparison (cmp_code);
9010
9011 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9012 }
9013
9014 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9015 if (tem)
9016 return tem;
9017
9018 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9019 {
9020 tree targ0 = strip_float_extensions (arg0);
9021 tree targ1 = strip_float_extensions (arg1);
9022 tree newtype = TREE_TYPE (targ0);
9023
9024 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9025 newtype = TREE_TYPE (targ1);
9026
9027 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9028 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9029 return fold_build2_loc (loc, code, type,
9030 fold_convert_loc (loc, newtype, targ0),
9031 fold_convert_loc (loc, newtype, targ1));
9032
9033 /* (-a) CMP (-b) -> b CMP a */
9034 if (TREE_CODE (arg0) == NEGATE_EXPR
9035 && TREE_CODE (arg1) == NEGATE_EXPR)
9036 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9037 TREE_OPERAND (arg0, 0));
9038
9039 if (TREE_CODE (arg1) == REAL_CST)
9040 {
9041 REAL_VALUE_TYPE cst;
9042 cst = TREE_REAL_CST (arg1);
9043
9044 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9045 if (TREE_CODE (arg0) == NEGATE_EXPR)
9046 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9047 TREE_OPERAND (arg0, 0),
9048 build_real (TREE_TYPE (arg1),
9049 real_value_negate (&cst)));
9050
9051 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9052 /* a CMP (-0) -> a CMP 0 */
9053 if (REAL_VALUE_MINUS_ZERO (cst))
9054 return fold_build2_loc (loc, code, type, arg0,
9055 build_real (TREE_TYPE (arg1), dconst0));
9056
9057 /* x != NaN is always true, other ops are always false. */
9058 if (REAL_VALUE_ISNAN (cst)
9059 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9060 {
9061 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9062 return omit_one_operand_loc (loc, type, tem, arg0);
9063 }
9064
9065 /* Fold comparisons against infinity. */
9066 if (REAL_VALUE_ISINF (cst)
9067 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9068 {
9069 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9070 if (tem != NULL_TREE)
9071 return tem;
9072 }
9073 }
9074
9075 /* If this is a comparison of a real constant with a PLUS_EXPR
9076 or a MINUS_EXPR of a real constant, we can convert it into a
9077 comparison with a revised real constant as long as no overflow
9078 occurs when unsafe_math_optimizations are enabled. */
9079 if (flag_unsafe_math_optimizations
9080 && TREE_CODE (arg1) == REAL_CST
9081 && (TREE_CODE (arg0) == PLUS_EXPR
9082 || TREE_CODE (arg0) == MINUS_EXPR)
9083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9084 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9085 ? MINUS_EXPR : PLUS_EXPR,
9086 arg1, TREE_OPERAND (arg0, 1)))
9087 && !TREE_OVERFLOW (tem))
9088 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9089
9090 /* Likewise, we can simplify a comparison of a real constant with
9091 a MINUS_EXPR whose first operand is also a real constant, i.e.
9092 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9093 floating-point types only if -fassociative-math is set. */
9094 if (flag_associative_math
9095 && TREE_CODE (arg1) == REAL_CST
9096 && TREE_CODE (arg0) == MINUS_EXPR
9097 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9098 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9099 arg1))
9100 && !TREE_OVERFLOW (tem))
9101 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9102 TREE_OPERAND (arg0, 1), tem);
9103
9104 /* Fold comparisons against built-in math functions. */
9105 if (TREE_CODE (arg1) == REAL_CST
9106 && flag_unsafe_math_optimizations
9107 && ! flag_errno_math)
9108 {
9109 enum built_in_function fcode = builtin_mathfn_code (arg0);
9110
9111 if (fcode != END_BUILTINS)
9112 {
9113 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9114 if (tem != NULL_TREE)
9115 return tem;
9116 }
9117 }
9118 }
9119
9120 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9121 && CONVERT_EXPR_P (arg0))
9122 {
9123 /* If we are widening one operand of an integer comparison,
9124 see if the other operand is similarly being widened. Perhaps we
9125 can do the comparison in the narrower type. */
9126 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9127 if (tem)
9128 return tem;
9129
9130 /* Or if we are changing signedness. */
9131 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9132 if (tem)
9133 return tem;
9134 }
9135
9136 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9137 constant, we can simplify it. */
9138 if (TREE_CODE (arg1) == INTEGER_CST
9139 && (TREE_CODE (arg0) == MIN_EXPR
9140 || TREE_CODE (arg0) == MAX_EXPR)
9141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9142 {
9143 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9144 if (tem)
9145 return tem;
9146 }
9147
9148 /* Simplify comparison of something with itself. (For IEEE
9149 floating-point, we can only do some of these simplifications.) */
9150 if (operand_equal_p (arg0, arg1, 0))
9151 {
9152 switch (code)
9153 {
9154 case EQ_EXPR:
9155 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9156 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9157 return constant_boolean_node (1, type);
9158 break;
9159
9160 case GE_EXPR:
9161 case LE_EXPR:
9162 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9163 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9164 return constant_boolean_node (1, type);
9165 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9166
9167 case NE_EXPR:
9168 /* For NE, we can only do this simplification if integer
9169 or we don't honor IEEE floating point NaNs. */
9170 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9171 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9172 break;
9173 /* ... fall through ... */
9174 case GT_EXPR:
9175 case LT_EXPR:
9176 return constant_boolean_node (0, type);
9177 default:
9178 gcc_unreachable ();
9179 }
9180 }
9181
9182 /* If we are comparing an expression that just has comparisons
9183 of two integer values, arithmetic expressions of those comparisons,
9184 and constants, we can simplify it. There are only three cases
9185 to check: the two values can either be equal, the first can be
9186 greater, or the second can be greater. Fold the expression for
9187 those three values. Since each value must be 0 or 1, we have
9188 eight possibilities, each of which corresponds to the constant 0
9189 or 1 or one of the six possible comparisons.
9190
9191 This handles common cases like (a > b) == 0 but also handles
9192 expressions like ((x > y) - (y > x)) > 0, which supposedly
9193 occur in macroized code. */
9194
9195 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9196 {
9197 tree cval1 = 0, cval2 = 0;
9198 int save_p = 0;
9199
9200 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9201 /* Don't handle degenerate cases here; they should already
9202 have been handled anyway. */
9203 && cval1 != 0 && cval2 != 0
9204 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9205 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9206 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9207 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9208 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9209 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9210 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9211 {
9212 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9213 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9214
9215 /* We can't just pass T to eval_subst in case cval1 or cval2
9216 was the same as ARG1. */
9217
9218 tree high_result
9219 = fold_build2_loc (loc, code, type,
9220 eval_subst (loc, arg0, cval1, maxval,
9221 cval2, minval),
9222 arg1);
9223 tree equal_result
9224 = fold_build2_loc (loc, code, type,
9225 eval_subst (loc, arg0, cval1, maxval,
9226 cval2, maxval),
9227 arg1);
9228 tree low_result
9229 = fold_build2_loc (loc, code, type,
9230 eval_subst (loc, arg0, cval1, minval,
9231 cval2, maxval),
9232 arg1);
9233
9234 /* All three of these results should be 0 or 1. Confirm they are.
9235 Then use those values to select the proper code to use. */
9236
9237 if (TREE_CODE (high_result) == INTEGER_CST
9238 && TREE_CODE (equal_result) == INTEGER_CST
9239 && TREE_CODE (low_result) == INTEGER_CST)
9240 {
9241 /* Make a 3-bit mask with the high-order bit being the
9242 value for `>', the next for '=', and the low for '<'. */
9243 switch ((integer_onep (high_result) * 4)
9244 + (integer_onep (equal_result) * 2)
9245 + integer_onep (low_result))
9246 {
9247 case 0:
9248 /* Always false. */
9249 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9250 case 1:
9251 code = LT_EXPR;
9252 break;
9253 case 2:
9254 code = EQ_EXPR;
9255 break;
9256 case 3:
9257 code = LE_EXPR;
9258 break;
9259 case 4:
9260 code = GT_EXPR;
9261 break;
9262 case 5:
9263 code = NE_EXPR;
9264 break;
9265 case 6:
9266 code = GE_EXPR;
9267 break;
9268 case 7:
9269 /* Always true. */
9270 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9271 }
9272
9273 if (save_p)
9274 {
9275 tem = save_expr (build2 (code, type, cval1, cval2));
9276 SET_EXPR_LOCATION (tem, loc);
9277 return tem;
9278 }
9279 return fold_build2_loc (loc, code, type, cval1, cval2);
9280 }
9281 }
9282 }
9283
9284 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9285 into a single range test. */
9286 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9287 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9288 && TREE_CODE (arg1) == INTEGER_CST
9289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9290 && !integer_zerop (TREE_OPERAND (arg0, 1))
9291 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9292 && !TREE_OVERFLOW (arg1))
9293 {
9294 tem = fold_div_compare (loc, code, type, arg0, arg1);
9295 if (tem != NULL_TREE)
9296 return tem;
9297 }
9298
9299 /* Fold ~X op ~Y as Y op X. */
9300 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9301 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9302 {
9303 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9304 return fold_build2_loc (loc, code, type,
9305 fold_convert_loc (loc, cmp_type,
9306 TREE_OPERAND (arg1, 0)),
9307 TREE_OPERAND (arg0, 0));
9308 }
9309
9310 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9311 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9312 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9313 {
9314 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9315 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9316 TREE_OPERAND (arg0, 0),
9317 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9318 fold_convert_loc (loc, cmp_type, arg1)));
9319 }
9320
9321 return NULL_TREE;
9322 }
9323
9324
9325 /* Subroutine of fold_binary. Optimize complex multiplications of the
9326 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9327 argument EXPR represents the expression "z" of type TYPE. */
9328
9329 static tree
9330 fold_mult_zconjz (location_t loc, tree type, tree expr)
9331 {
9332 tree itype = TREE_TYPE (type);
9333 tree rpart, ipart, tem;
9334
9335 if (TREE_CODE (expr) == COMPLEX_EXPR)
9336 {
9337 rpart = TREE_OPERAND (expr, 0);
9338 ipart = TREE_OPERAND (expr, 1);
9339 }
9340 else if (TREE_CODE (expr) == COMPLEX_CST)
9341 {
9342 rpart = TREE_REALPART (expr);
9343 ipart = TREE_IMAGPART (expr);
9344 }
9345 else
9346 {
9347 expr = save_expr (expr);
9348 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9349 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9350 }
9351
9352 rpart = save_expr (rpart);
9353 ipart = save_expr (ipart);
9354 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9355 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9356 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9357 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9358 build_zero_cst (itype));
9359 }
9360
9361
9362 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9363 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9364 guarantees that P and N have the same least significant log2(M) bits.
9365 N is not otherwise constrained. In particular, N is not normalized to
9366 0 <= N < M as is common. In general, the precise value of P is unknown.
9367 M is chosen as large as possible such that constant N can be determined.
9368
9369 Returns M and sets *RESIDUE to N.
9370
9371 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9372 account. This is not always possible due to PR 35705.
9373 */
9374
9375 static unsigned HOST_WIDE_INT
9376 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9377 bool allow_func_align)
9378 {
9379 enum tree_code code;
9380
9381 *residue = 0;
9382
9383 code = TREE_CODE (expr);
9384 if (code == ADDR_EXPR)
9385 {
9386 unsigned int bitalign;
9387 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9388 *residue /= BITS_PER_UNIT;
9389 return bitalign / BITS_PER_UNIT;
9390 }
9391 else if (code == POINTER_PLUS_EXPR)
9392 {
9393 tree op0, op1;
9394 unsigned HOST_WIDE_INT modulus;
9395 enum tree_code inner_code;
9396
9397 op0 = TREE_OPERAND (expr, 0);
9398 STRIP_NOPS (op0);
9399 modulus = get_pointer_modulus_and_residue (op0, residue,
9400 allow_func_align);
9401
9402 op1 = TREE_OPERAND (expr, 1);
9403 STRIP_NOPS (op1);
9404 inner_code = TREE_CODE (op1);
9405 if (inner_code == INTEGER_CST)
9406 {
9407 *residue += TREE_INT_CST_LOW (op1);
9408 return modulus;
9409 }
9410 else if (inner_code == MULT_EXPR)
9411 {
9412 op1 = TREE_OPERAND (op1, 1);
9413 if (TREE_CODE (op1) == INTEGER_CST)
9414 {
9415 unsigned HOST_WIDE_INT align;
9416
9417 /* Compute the greatest power-of-2 divisor of op1. */
9418 align = TREE_INT_CST_LOW (op1);
9419 align &= -align;
9420
9421 /* If align is non-zero and less than *modulus, replace
9422 *modulus with align., If align is 0, then either op1 is 0
9423 or the greatest power-of-2 divisor of op1 doesn't fit in an
9424 unsigned HOST_WIDE_INT. In either case, no additional
9425 constraint is imposed. */
9426 if (align)
9427 modulus = MIN (modulus, align);
9428
9429 return modulus;
9430 }
9431 }
9432 }
9433
9434 /* If we get here, we were unable to determine anything useful about the
9435 expression. */
9436 return 1;
9437 }
9438
9439 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9440 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9441
9442 static bool
9443 vec_cst_ctor_to_array (tree arg, tree *elts)
9444 {
9445 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9446
9447 if (TREE_CODE (arg) == VECTOR_CST)
9448 {
9449 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9450 elts[i] = VECTOR_CST_ELT (arg, i);
9451 }
9452 else if (TREE_CODE (arg) == CONSTRUCTOR)
9453 {
9454 constructor_elt *elt;
9455
9456 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9457 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9458 return false;
9459 else
9460 elts[i] = elt->value;
9461 }
9462 else
9463 return false;
9464 for (; i < nelts; i++)
9465 elts[i]
9466 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9467 return true;
9468 }
9469
9470 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9471 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9472 NULL_TREE otherwise. */
9473
9474 static tree
9475 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9476 {
9477 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9478 tree *elts;
9479 bool need_ctor = false;
9480
9481 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9482 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9483 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9484 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9485 return NULL_TREE;
9486
9487 elts = XALLOCAVEC (tree, nelts * 3);
9488 if (!vec_cst_ctor_to_array (arg0, elts)
9489 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9490 return NULL_TREE;
9491
9492 for (i = 0; i < nelts; i++)
9493 {
9494 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9495 need_ctor = true;
9496 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9497 }
9498
9499 if (need_ctor)
9500 {
9501 vec<constructor_elt, va_gc> *v;
9502 vec_alloc (v, nelts);
9503 for (i = 0; i < nelts; i++)
9504 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9505 return build_constructor (type, v);
9506 }
9507 else
9508 return build_vector (type, &elts[2 * nelts]);
9509 }
9510
9511 /* Try to fold a pointer difference of type TYPE two address expressions of
9512 array references AREF0 and AREF1 using location LOC. Return a
9513 simplified expression for the difference or NULL_TREE. */
9514
9515 static tree
9516 fold_addr_of_array_ref_difference (location_t loc, tree type,
9517 tree aref0, tree aref1)
9518 {
9519 tree base0 = TREE_OPERAND (aref0, 0);
9520 tree base1 = TREE_OPERAND (aref1, 0);
9521 tree base_offset = build_int_cst (type, 0);
9522
9523 /* If the bases are array references as well, recurse. If the bases
9524 are pointer indirections compute the difference of the pointers.
9525 If the bases are equal, we are set. */
9526 if ((TREE_CODE (base0) == ARRAY_REF
9527 && TREE_CODE (base1) == ARRAY_REF
9528 && (base_offset
9529 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9530 || (INDIRECT_REF_P (base0)
9531 && INDIRECT_REF_P (base1)
9532 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9533 TREE_OPERAND (base0, 0),
9534 TREE_OPERAND (base1, 0))))
9535 || operand_equal_p (base0, base1, 0))
9536 {
9537 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9538 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9539 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9540 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9541 return fold_build2_loc (loc, PLUS_EXPR, type,
9542 base_offset,
9543 fold_build2_loc (loc, MULT_EXPR, type,
9544 diff, esz));
9545 }
9546 return NULL_TREE;
9547 }
9548
9549 /* If the real or vector real constant CST of type TYPE has an exact
9550 inverse, return it, else return NULL. */
9551
9552 static tree
9553 exact_inverse (tree type, tree cst)
9554 {
9555 REAL_VALUE_TYPE r;
9556 tree unit_type, *elts;
9557 machine_mode mode;
9558 unsigned vec_nelts, i;
9559
9560 switch (TREE_CODE (cst))
9561 {
9562 case REAL_CST:
9563 r = TREE_REAL_CST (cst);
9564
9565 if (exact_real_inverse (TYPE_MODE (type), &r))
9566 return build_real (type, r);
9567
9568 return NULL_TREE;
9569
9570 case VECTOR_CST:
9571 vec_nelts = VECTOR_CST_NELTS (cst);
9572 elts = XALLOCAVEC (tree, vec_nelts);
9573 unit_type = TREE_TYPE (type);
9574 mode = TYPE_MODE (unit_type);
9575
9576 for (i = 0; i < vec_nelts; i++)
9577 {
9578 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9579 if (!exact_real_inverse (mode, &r))
9580 return NULL_TREE;
9581 elts[i] = build_real (unit_type, r);
9582 }
9583
9584 return build_vector (type, elts);
9585
9586 default:
9587 return NULL_TREE;
9588 }
9589 }
9590
9591 /* Mask out the tz least significant bits of X of type TYPE where
9592 tz is the number of trailing zeroes in Y. */
9593 static wide_int
9594 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9595 {
9596 int tz = wi::ctz (y);
9597 if (tz > 0)
9598 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9599 return x;
9600 }
9601
9602 /* Return true when T is an address and is known to be nonzero.
9603 For floating point we further ensure that T is not denormal.
9604 Similar logic is present in nonzero_address in rtlanal.h.
9605
9606 If the return value is based on the assumption that signed overflow
9607 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9608 change *STRICT_OVERFLOW_P. */
9609
9610 static bool
9611 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9612 {
9613 tree type = TREE_TYPE (t);
9614 enum tree_code code;
9615
9616 /* Doing something useful for floating point would need more work. */
9617 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9618 return false;
9619
9620 code = TREE_CODE (t);
9621 switch (TREE_CODE_CLASS (code))
9622 {
9623 case tcc_unary:
9624 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9625 strict_overflow_p);
9626 case tcc_binary:
9627 case tcc_comparison:
9628 return tree_binary_nonzero_warnv_p (code, type,
9629 TREE_OPERAND (t, 0),
9630 TREE_OPERAND (t, 1),
9631 strict_overflow_p);
9632 case tcc_constant:
9633 case tcc_declaration:
9634 case tcc_reference:
9635 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9636
9637 default:
9638 break;
9639 }
9640
9641 switch (code)
9642 {
9643 case TRUTH_NOT_EXPR:
9644 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9645 strict_overflow_p);
9646
9647 case TRUTH_AND_EXPR:
9648 case TRUTH_OR_EXPR:
9649 case TRUTH_XOR_EXPR:
9650 return tree_binary_nonzero_warnv_p (code, type,
9651 TREE_OPERAND (t, 0),
9652 TREE_OPERAND (t, 1),
9653 strict_overflow_p);
9654
9655 case COND_EXPR:
9656 case CONSTRUCTOR:
9657 case OBJ_TYPE_REF:
9658 case ASSERT_EXPR:
9659 case ADDR_EXPR:
9660 case WITH_SIZE_EXPR:
9661 case SSA_NAME:
9662 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9663
9664 case COMPOUND_EXPR:
9665 case MODIFY_EXPR:
9666 case BIND_EXPR:
9667 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9668 strict_overflow_p);
9669
9670 case SAVE_EXPR:
9671 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9672 strict_overflow_p);
9673
9674 case CALL_EXPR:
9675 {
9676 tree fndecl = get_callee_fndecl (t);
9677 if (!fndecl) return false;
9678 if (flag_delete_null_pointer_checks && !flag_check_new
9679 && DECL_IS_OPERATOR_NEW (fndecl)
9680 && !TREE_NOTHROW (fndecl))
9681 return true;
9682 if (flag_delete_null_pointer_checks
9683 && lookup_attribute ("returns_nonnull",
9684 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9685 return true;
9686 return alloca_call_p (t);
9687 }
9688
9689 default:
9690 break;
9691 }
9692 return false;
9693 }
9694
9695 /* Return true when T is an address and is known to be nonzero.
9696 Handle warnings about undefined signed overflow. */
9697
9698 static bool
9699 tree_expr_nonzero_p (tree t)
9700 {
9701 bool ret, strict_overflow_p;
9702
9703 strict_overflow_p = false;
9704 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9705 if (strict_overflow_p)
9706 fold_overflow_warning (("assuming signed overflow does not occur when "
9707 "determining that expression is always "
9708 "non-zero"),
9709 WARN_STRICT_OVERFLOW_MISC);
9710 return ret;
9711 }
9712
9713 /* Fold a binary expression of code CODE and type TYPE with operands
9714 OP0 and OP1. LOC is the location of the resulting expression.
9715 Return the folded expression if folding is successful. Otherwise,
9716 return NULL_TREE. */
9717
9718 tree
9719 fold_binary_loc (location_t loc,
9720 enum tree_code code, tree type, tree op0, tree op1)
9721 {
9722 enum tree_code_class kind = TREE_CODE_CLASS (code);
9723 tree arg0, arg1, tem;
9724 tree t1 = NULL_TREE;
9725 bool strict_overflow_p;
9726 unsigned int prec;
9727
9728 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9729 && TREE_CODE_LENGTH (code) == 2
9730 && op0 != NULL_TREE
9731 && op1 != NULL_TREE);
9732
9733 arg0 = op0;
9734 arg1 = op1;
9735
9736 /* Strip any conversions that don't change the mode. This is
9737 safe for every expression, except for a comparison expression
9738 because its signedness is derived from its operands. So, in
9739 the latter case, only strip conversions that don't change the
9740 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9741 preserved.
9742
9743 Note that this is done as an internal manipulation within the
9744 constant folder, in order to find the simplest representation
9745 of the arguments so that their form can be studied. In any
9746 cases, the appropriate type conversions should be put back in
9747 the tree that will get out of the constant folder. */
9748
9749 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9750 {
9751 STRIP_SIGN_NOPS (arg0);
9752 STRIP_SIGN_NOPS (arg1);
9753 }
9754 else
9755 {
9756 STRIP_NOPS (arg0);
9757 STRIP_NOPS (arg1);
9758 }
9759
9760 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9761 constant but we can't do arithmetic on them. */
9762 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9763 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9764 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9765 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9766 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9767 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9768 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9769 {
9770 if (kind == tcc_binary)
9771 {
9772 /* Make sure type and arg0 have the same saturating flag. */
9773 gcc_assert (TYPE_SATURATING (type)
9774 == TYPE_SATURATING (TREE_TYPE (arg0)));
9775 tem = const_binop (code, arg0, arg1);
9776 }
9777 else if (kind == tcc_comparison)
9778 tem = fold_relational_const (code, type, arg0, arg1);
9779 else
9780 tem = NULL_TREE;
9781
9782 if (tem != NULL_TREE)
9783 {
9784 if (TREE_TYPE (tem) != type)
9785 tem = fold_convert_loc (loc, type, tem);
9786 return tem;
9787 }
9788 }
9789
9790 /* If this is a commutative operation, and ARG0 is a constant, move it
9791 to ARG1 to reduce the number of tests below. */
9792 if (commutative_tree_code (code)
9793 && tree_swap_operands_p (arg0, arg1, true))
9794 return fold_build2_loc (loc, code, type, op1, op0);
9795
9796 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9797 to ARG1 to reduce the number of tests below. */
9798 if (kind == tcc_comparison
9799 && tree_swap_operands_p (arg0, arg1, true))
9800 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9801
9802 tem = generic_simplify (loc, code, type, op0, op1);
9803 if (tem)
9804 return tem;
9805
9806 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9807
9808 First check for cases where an arithmetic operation is applied to a
9809 compound, conditional, or comparison operation. Push the arithmetic
9810 operation inside the compound or conditional to see if any folding
9811 can then be done. Convert comparison to conditional for this purpose.
9812 The also optimizes non-constant cases that used to be done in
9813 expand_expr.
9814
9815 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9816 one of the operands is a comparison and the other is a comparison, a
9817 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9818 code below would make the expression more complex. Change it to a
9819 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9820 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9821
9822 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9823 || code == EQ_EXPR || code == NE_EXPR)
9824 && TREE_CODE (type) != VECTOR_TYPE
9825 && ((truth_value_p (TREE_CODE (arg0))
9826 && (truth_value_p (TREE_CODE (arg1))
9827 || (TREE_CODE (arg1) == BIT_AND_EXPR
9828 && integer_onep (TREE_OPERAND (arg1, 1)))))
9829 || (truth_value_p (TREE_CODE (arg1))
9830 && (truth_value_p (TREE_CODE (arg0))
9831 || (TREE_CODE (arg0) == BIT_AND_EXPR
9832 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9833 {
9834 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9835 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9836 : TRUTH_XOR_EXPR,
9837 boolean_type_node,
9838 fold_convert_loc (loc, boolean_type_node, arg0),
9839 fold_convert_loc (loc, boolean_type_node, arg1));
9840
9841 if (code == EQ_EXPR)
9842 tem = invert_truthvalue_loc (loc, tem);
9843
9844 return fold_convert_loc (loc, type, tem);
9845 }
9846
9847 if (TREE_CODE_CLASS (code) == tcc_binary
9848 || TREE_CODE_CLASS (code) == tcc_comparison)
9849 {
9850 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9851 {
9852 tem = fold_build2_loc (loc, code, type,
9853 fold_convert_loc (loc, TREE_TYPE (op0),
9854 TREE_OPERAND (arg0, 1)), op1);
9855 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9856 tem);
9857 }
9858 if (TREE_CODE (arg1) == COMPOUND_EXPR
9859 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9860 {
9861 tem = fold_build2_loc (loc, code, type, op0,
9862 fold_convert_loc (loc, TREE_TYPE (op1),
9863 TREE_OPERAND (arg1, 1)));
9864 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9865 tem);
9866 }
9867
9868 if (TREE_CODE (arg0) == COND_EXPR
9869 || TREE_CODE (arg0) == VEC_COND_EXPR
9870 || COMPARISON_CLASS_P (arg0))
9871 {
9872 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9873 arg0, arg1,
9874 /*cond_first_p=*/1);
9875 if (tem != NULL_TREE)
9876 return tem;
9877 }
9878
9879 if (TREE_CODE (arg1) == COND_EXPR
9880 || TREE_CODE (arg1) == VEC_COND_EXPR
9881 || COMPARISON_CLASS_P (arg1))
9882 {
9883 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9884 arg1, arg0,
9885 /*cond_first_p=*/0);
9886 if (tem != NULL_TREE)
9887 return tem;
9888 }
9889 }
9890
9891 switch (code)
9892 {
9893 case MEM_REF:
9894 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9895 if (TREE_CODE (arg0) == ADDR_EXPR
9896 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9897 {
9898 tree iref = TREE_OPERAND (arg0, 0);
9899 return fold_build2 (MEM_REF, type,
9900 TREE_OPERAND (iref, 0),
9901 int_const_binop (PLUS_EXPR, arg1,
9902 TREE_OPERAND (iref, 1)));
9903 }
9904
9905 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9906 if (TREE_CODE (arg0) == ADDR_EXPR
9907 && handled_component_p (TREE_OPERAND (arg0, 0)))
9908 {
9909 tree base;
9910 HOST_WIDE_INT coffset;
9911 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9912 &coffset);
9913 if (!base)
9914 return NULL_TREE;
9915 return fold_build2 (MEM_REF, type,
9916 build_fold_addr_expr (base),
9917 int_const_binop (PLUS_EXPR, arg1,
9918 size_int (coffset)));
9919 }
9920
9921 return NULL_TREE;
9922
9923 case POINTER_PLUS_EXPR:
9924 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9925 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9926 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9927 return fold_convert_loc (loc, type,
9928 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9929 fold_convert_loc (loc, sizetype,
9930 arg1),
9931 fold_convert_loc (loc, sizetype,
9932 arg0)));
9933
9934 /* PTR_CST +p CST -> CST1 */
9935 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9936 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9937 fold_convert_loc (loc, type, arg1));
9938
9939 return NULL_TREE;
9940
9941 case PLUS_EXPR:
9942 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9943 {
9944 /* X + (X / CST) * -CST is X % CST. */
9945 if (TREE_CODE (arg1) == MULT_EXPR
9946 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9947 && operand_equal_p (arg0,
9948 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9949 {
9950 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9951 tree cst1 = TREE_OPERAND (arg1, 1);
9952 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9953 cst1, cst0);
9954 if (sum && integer_zerop (sum))
9955 return fold_convert_loc (loc, type,
9956 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9957 TREE_TYPE (arg0), arg0,
9958 cst0));
9959 }
9960 }
9961
9962 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9963 one. Make sure the type is not saturating and has the signedness of
9964 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9965 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9966 if ((TREE_CODE (arg0) == MULT_EXPR
9967 || TREE_CODE (arg1) == MULT_EXPR)
9968 && !TYPE_SATURATING (type)
9969 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9970 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9971 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9972 {
9973 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9974 if (tem)
9975 return tem;
9976 }
9977
9978 if (! FLOAT_TYPE_P (type))
9979 {
9980 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9981 with a constant, and the two constants have no bits in common,
9982 we should treat this as a BIT_IOR_EXPR since this may produce more
9983 simplifications. */
9984 if (TREE_CODE (arg0) == BIT_AND_EXPR
9985 && TREE_CODE (arg1) == BIT_AND_EXPR
9986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9987 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9988 && wi::bit_and (TREE_OPERAND (arg0, 1),
9989 TREE_OPERAND (arg1, 1)) == 0)
9990 {
9991 code = BIT_IOR_EXPR;
9992 goto bit_ior;
9993 }
9994
9995 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9996 (plus (plus (mult) (mult)) (foo)) so that we can
9997 take advantage of the factoring cases below. */
9998 if (TYPE_OVERFLOW_WRAPS (type)
9999 && (((TREE_CODE (arg0) == PLUS_EXPR
10000 || TREE_CODE (arg0) == MINUS_EXPR)
10001 && TREE_CODE (arg1) == MULT_EXPR)
10002 || ((TREE_CODE (arg1) == PLUS_EXPR
10003 || TREE_CODE (arg1) == MINUS_EXPR)
10004 && TREE_CODE (arg0) == MULT_EXPR)))
10005 {
10006 tree parg0, parg1, parg, marg;
10007 enum tree_code pcode;
10008
10009 if (TREE_CODE (arg1) == MULT_EXPR)
10010 parg = arg0, marg = arg1;
10011 else
10012 parg = arg1, marg = arg0;
10013 pcode = TREE_CODE (parg);
10014 parg0 = TREE_OPERAND (parg, 0);
10015 parg1 = TREE_OPERAND (parg, 1);
10016 STRIP_NOPS (parg0);
10017 STRIP_NOPS (parg1);
10018
10019 if (TREE_CODE (parg0) == MULT_EXPR
10020 && TREE_CODE (parg1) != MULT_EXPR)
10021 return fold_build2_loc (loc, pcode, type,
10022 fold_build2_loc (loc, PLUS_EXPR, type,
10023 fold_convert_loc (loc, type,
10024 parg0),
10025 fold_convert_loc (loc, type,
10026 marg)),
10027 fold_convert_loc (loc, type, parg1));
10028 if (TREE_CODE (parg0) != MULT_EXPR
10029 && TREE_CODE (parg1) == MULT_EXPR)
10030 return
10031 fold_build2_loc (loc, PLUS_EXPR, type,
10032 fold_convert_loc (loc, type, parg0),
10033 fold_build2_loc (loc, pcode, type,
10034 fold_convert_loc (loc, type, marg),
10035 fold_convert_loc (loc, type,
10036 parg1)));
10037 }
10038 }
10039 else
10040 {
10041 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10042 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10043 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10044
10045 /* Likewise if the operands are reversed. */
10046 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10047 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10048
10049 /* Convert X + -C into X - C. */
10050 if (TREE_CODE (arg1) == REAL_CST
10051 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10052 {
10053 tem = fold_negate_const (arg1, type);
10054 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10055 return fold_build2_loc (loc, MINUS_EXPR, type,
10056 fold_convert_loc (loc, type, arg0),
10057 fold_convert_loc (loc, type, tem));
10058 }
10059
10060 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10061 to __complex__ ( x, y ). This is not the same for SNaNs or
10062 if signed zeros are involved. */
10063 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10064 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10065 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10066 {
10067 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10068 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10069 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10070 bool arg0rz = false, arg0iz = false;
10071 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10072 || (arg0i && (arg0iz = real_zerop (arg0i))))
10073 {
10074 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10075 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10076 if (arg0rz && arg1i && real_zerop (arg1i))
10077 {
10078 tree rp = arg1r ? arg1r
10079 : build1 (REALPART_EXPR, rtype, arg1);
10080 tree ip = arg0i ? arg0i
10081 : build1 (IMAGPART_EXPR, rtype, arg0);
10082 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10083 }
10084 else if (arg0iz && arg1r && real_zerop (arg1r))
10085 {
10086 tree rp = arg0r ? arg0r
10087 : build1 (REALPART_EXPR, rtype, arg0);
10088 tree ip = arg1i ? arg1i
10089 : build1 (IMAGPART_EXPR, rtype, arg1);
10090 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10091 }
10092 }
10093 }
10094
10095 if (flag_unsafe_math_optimizations
10096 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10097 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10098 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10099 return tem;
10100
10101 /* Convert x+x into x*2.0. */
10102 if (operand_equal_p (arg0, arg1, 0)
10103 && SCALAR_FLOAT_TYPE_P (type))
10104 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10105 build_real (type, dconst2));
10106
10107 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10108 We associate floats only if the user has specified
10109 -fassociative-math. */
10110 if (flag_associative_math
10111 && TREE_CODE (arg1) == PLUS_EXPR
10112 && TREE_CODE (arg0) != MULT_EXPR)
10113 {
10114 tree tree10 = TREE_OPERAND (arg1, 0);
10115 tree tree11 = TREE_OPERAND (arg1, 1);
10116 if (TREE_CODE (tree11) == MULT_EXPR
10117 && TREE_CODE (tree10) == MULT_EXPR)
10118 {
10119 tree tree0;
10120 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10121 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10122 }
10123 }
10124 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10125 We associate floats only if the user has specified
10126 -fassociative-math. */
10127 if (flag_associative_math
10128 && TREE_CODE (arg0) == PLUS_EXPR
10129 && TREE_CODE (arg1) != MULT_EXPR)
10130 {
10131 tree tree00 = TREE_OPERAND (arg0, 0);
10132 tree tree01 = TREE_OPERAND (arg0, 1);
10133 if (TREE_CODE (tree01) == MULT_EXPR
10134 && TREE_CODE (tree00) == MULT_EXPR)
10135 {
10136 tree tree0;
10137 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10138 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10139 }
10140 }
10141 }
10142
10143 bit_rotate:
10144 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10145 is a rotate of A by C1 bits. */
10146 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10147 is a rotate of A by B bits. */
10148 {
10149 enum tree_code code0, code1;
10150 tree rtype;
10151 code0 = TREE_CODE (arg0);
10152 code1 = TREE_CODE (arg1);
10153 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10154 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10155 && operand_equal_p (TREE_OPERAND (arg0, 0),
10156 TREE_OPERAND (arg1, 0), 0)
10157 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10158 TYPE_UNSIGNED (rtype))
10159 /* Only create rotates in complete modes. Other cases are not
10160 expanded properly. */
10161 && (element_precision (rtype)
10162 == element_precision (TYPE_MODE (rtype))))
10163 {
10164 tree tree01, tree11;
10165 enum tree_code code01, code11;
10166
10167 tree01 = TREE_OPERAND (arg0, 1);
10168 tree11 = TREE_OPERAND (arg1, 1);
10169 STRIP_NOPS (tree01);
10170 STRIP_NOPS (tree11);
10171 code01 = TREE_CODE (tree01);
10172 code11 = TREE_CODE (tree11);
10173 if (code01 == INTEGER_CST
10174 && code11 == INTEGER_CST
10175 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10176 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10177 {
10178 tem = build2_loc (loc, LROTATE_EXPR,
10179 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10180 TREE_OPERAND (arg0, 0),
10181 code0 == LSHIFT_EXPR ? tree01 : tree11);
10182 return fold_convert_loc (loc, type, tem);
10183 }
10184 else if (code11 == MINUS_EXPR)
10185 {
10186 tree tree110, tree111;
10187 tree110 = TREE_OPERAND (tree11, 0);
10188 tree111 = TREE_OPERAND (tree11, 1);
10189 STRIP_NOPS (tree110);
10190 STRIP_NOPS (tree111);
10191 if (TREE_CODE (tree110) == INTEGER_CST
10192 && 0 == compare_tree_int (tree110,
10193 element_precision
10194 (TREE_TYPE (TREE_OPERAND
10195 (arg0, 0))))
10196 && operand_equal_p (tree01, tree111, 0))
10197 return
10198 fold_convert_loc (loc, type,
10199 build2 ((code0 == LSHIFT_EXPR
10200 ? LROTATE_EXPR
10201 : RROTATE_EXPR),
10202 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10203 TREE_OPERAND (arg0, 0), tree01));
10204 }
10205 else if (code01 == MINUS_EXPR)
10206 {
10207 tree tree010, tree011;
10208 tree010 = TREE_OPERAND (tree01, 0);
10209 tree011 = TREE_OPERAND (tree01, 1);
10210 STRIP_NOPS (tree010);
10211 STRIP_NOPS (tree011);
10212 if (TREE_CODE (tree010) == INTEGER_CST
10213 && 0 == compare_tree_int (tree010,
10214 element_precision
10215 (TREE_TYPE (TREE_OPERAND
10216 (arg0, 0))))
10217 && operand_equal_p (tree11, tree011, 0))
10218 return fold_convert_loc
10219 (loc, type,
10220 build2 ((code0 != LSHIFT_EXPR
10221 ? LROTATE_EXPR
10222 : RROTATE_EXPR),
10223 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10224 TREE_OPERAND (arg0, 0), tree11));
10225 }
10226 }
10227 }
10228
10229 associate:
10230 /* In most languages, can't associate operations on floats through
10231 parentheses. Rather than remember where the parentheses were, we
10232 don't associate floats at all, unless the user has specified
10233 -fassociative-math.
10234 And, we need to make sure type is not saturating. */
10235
10236 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10237 && !TYPE_SATURATING (type))
10238 {
10239 tree var0, con0, lit0, minus_lit0;
10240 tree var1, con1, lit1, minus_lit1;
10241 tree atype = type;
10242 bool ok = true;
10243
10244 /* Split both trees into variables, constants, and literals. Then
10245 associate each group together, the constants with literals,
10246 then the result with variables. This increases the chances of
10247 literals being recombined later and of generating relocatable
10248 expressions for the sum of a constant and literal. */
10249 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10250 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10251 code == MINUS_EXPR);
10252
10253 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10254 if (code == MINUS_EXPR)
10255 code = PLUS_EXPR;
10256
10257 /* With undefined overflow prefer doing association in a type
10258 which wraps on overflow, if that is one of the operand types. */
10259 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10260 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10261 {
10262 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10263 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10264 atype = TREE_TYPE (arg0);
10265 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10266 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10267 atype = TREE_TYPE (arg1);
10268 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10269 }
10270
10271 /* With undefined overflow we can only associate constants with one
10272 variable, and constants whose association doesn't overflow. */
10273 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10274 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10275 {
10276 if (var0 && var1)
10277 {
10278 tree tmp0 = var0;
10279 tree tmp1 = var1;
10280
10281 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10282 tmp0 = TREE_OPERAND (tmp0, 0);
10283 if (CONVERT_EXPR_P (tmp0)
10284 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10285 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10286 <= TYPE_PRECISION (atype)))
10287 tmp0 = TREE_OPERAND (tmp0, 0);
10288 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10289 tmp1 = TREE_OPERAND (tmp1, 0);
10290 if (CONVERT_EXPR_P (tmp1)
10291 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10292 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10293 <= TYPE_PRECISION (atype)))
10294 tmp1 = TREE_OPERAND (tmp1, 0);
10295 /* The only case we can still associate with two variables
10296 is if they are the same, modulo negation and bit-pattern
10297 preserving conversions. */
10298 if (!operand_equal_p (tmp0, tmp1, 0))
10299 ok = false;
10300 }
10301 }
10302
10303 /* Only do something if we found more than two objects. Otherwise,
10304 nothing has changed and we risk infinite recursion. */
10305 if (ok
10306 && (2 < ((var0 != 0) + (var1 != 0)
10307 + (con0 != 0) + (con1 != 0)
10308 + (lit0 != 0) + (lit1 != 0)
10309 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10310 {
10311 bool any_overflows = false;
10312 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10313 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10314 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10315 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10316 var0 = associate_trees (loc, var0, var1, code, atype);
10317 con0 = associate_trees (loc, con0, con1, code, atype);
10318 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10319 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10320 code, atype);
10321
10322 /* Preserve the MINUS_EXPR if the negative part of the literal is
10323 greater than the positive part. Otherwise, the multiplicative
10324 folding code (i.e extract_muldiv) may be fooled in case
10325 unsigned constants are subtracted, like in the following
10326 example: ((X*2 + 4) - 8U)/2. */
10327 if (minus_lit0 && lit0)
10328 {
10329 if (TREE_CODE (lit0) == INTEGER_CST
10330 && TREE_CODE (minus_lit0) == INTEGER_CST
10331 && tree_int_cst_lt (lit0, minus_lit0))
10332 {
10333 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10334 MINUS_EXPR, atype);
10335 lit0 = 0;
10336 }
10337 else
10338 {
10339 lit0 = associate_trees (loc, lit0, minus_lit0,
10340 MINUS_EXPR, atype);
10341 minus_lit0 = 0;
10342 }
10343 }
10344
10345 /* Don't introduce overflows through reassociation. */
10346 if (!any_overflows
10347 && ((lit0 && TREE_OVERFLOW (lit0))
10348 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10349 return NULL_TREE;
10350
10351 if (minus_lit0)
10352 {
10353 if (con0 == 0)
10354 return
10355 fold_convert_loc (loc, type,
10356 associate_trees (loc, var0, minus_lit0,
10357 MINUS_EXPR, atype));
10358 else
10359 {
10360 con0 = associate_trees (loc, con0, minus_lit0,
10361 MINUS_EXPR, atype);
10362 return
10363 fold_convert_loc (loc, type,
10364 associate_trees (loc, var0, con0,
10365 PLUS_EXPR, atype));
10366 }
10367 }
10368
10369 con0 = associate_trees (loc, con0, lit0, code, atype);
10370 return
10371 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10372 code, atype));
10373 }
10374 }
10375
10376 return NULL_TREE;
10377
10378 case MINUS_EXPR:
10379 /* Pointer simplifications for subtraction, simple reassociations. */
10380 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10381 {
10382 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10383 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10384 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10385 {
10386 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10387 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10388 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10389 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10390 return fold_build2_loc (loc, PLUS_EXPR, type,
10391 fold_build2_loc (loc, MINUS_EXPR, type,
10392 arg00, arg10),
10393 fold_build2_loc (loc, MINUS_EXPR, type,
10394 arg01, arg11));
10395 }
10396 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10397 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10398 {
10399 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10400 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10401 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10402 fold_convert_loc (loc, type, arg1));
10403 if (tmp)
10404 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10405 }
10406 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10407 simplifies. */
10408 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10409 {
10410 tree arg10 = fold_convert_loc (loc, type,
10411 TREE_OPERAND (arg1, 0));
10412 tree arg11 = fold_convert_loc (loc, type,
10413 TREE_OPERAND (arg1, 1));
10414 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10415 fold_convert_loc (loc, type, arg0),
10416 arg10);
10417 if (tmp)
10418 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10419 }
10420 }
10421 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10422 if (TREE_CODE (arg0) == NEGATE_EXPR
10423 && negate_expr_p (arg1)
10424 && reorder_operands_p (arg0, arg1))
10425 return fold_build2_loc (loc, MINUS_EXPR, type,
10426 fold_convert_loc (loc, type,
10427 negate_expr (arg1)),
10428 fold_convert_loc (loc, type,
10429 TREE_OPERAND (arg0, 0)));
10430 /* Convert -A - 1 to ~A. */
10431 if (TREE_CODE (arg0) == NEGATE_EXPR
10432 && integer_each_onep (arg1)
10433 && !TYPE_OVERFLOW_TRAPS (type))
10434 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10435 fold_convert_loc (loc, type,
10436 TREE_OPERAND (arg0, 0)));
10437
10438 /* Convert -1 - A to ~A. */
10439 if (TREE_CODE (type) != COMPLEX_TYPE
10440 && integer_all_onesp (arg0))
10441 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10442
10443
10444 /* X - (X / Y) * Y is X % Y. */
10445 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10446 && TREE_CODE (arg1) == MULT_EXPR
10447 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10448 && operand_equal_p (arg0,
10449 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10450 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10451 TREE_OPERAND (arg1, 1), 0))
10452 return
10453 fold_convert_loc (loc, type,
10454 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10455 arg0, TREE_OPERAND (arg1, 1)));
10456
10457 if (! FLOAT_TYPE_P (type))
10458 {
10459 if (integer_zerop (arg0))
10460 return negate_expr (fold_convert_loc (loc, type, arg1));
10461
10462 /* Fold A - (A & B) into ~B & A. */
10463 if (!TREE_SIDE_EFFECTS (arg0)
10464 && TREE_CODE (arg1) == BIT_AND_EXPR)
10465 {
10466 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10467 {
10468 tree arg10 = fold_convert_loc (loc, type,
10469 TREE_OPERAND (arg1, 0));
10470 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10471 fold_build1_loc (loc, BIT_NOT_EXPR,
10472 type, arg10),
10473 fold_convert_loc (loc, type, arg0));
10474 }
10475 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10476 {
10477 tree arg11 = fold_convert_loc (loc,
10478 type, TREE_OPERAND (arg1, 1));
10479 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10480 fold_build1_loc (loc, BIT_NOT_EXPR,
10481 type, arg11),
10482 fold_convert_loc (loc, type, arg0));
10483 }
10484 }
10485
10486 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10487 any power of 2 minus 1. */
10488 if (TREE_CODE (arg0) == BIT_AND_EXPR
10489 && TREE_CODE (arg1) == BIT_AND_EXPR
10490 && operand_equal_p (TREE_OPERAND (arg0, 0),
10491 TREE_OPERAND (arg1, 0), 0))
10492 {
10493 tree mask0 = TREE_OPERAND (arg0, 1);
10494 tree mask1 = TREE_OPERAND (arg1, 1);
10495 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10496
10497 if (operand_equal_p (tem, mask1, 0))
10498 {
10499 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10500 TREE_OPERAND (arg0, 0), mask1);
10501 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10502 }
10503 }
10504 }
10505
10506 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10507 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10508 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10509
10510 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10511 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10512 (-ARG1 + ARG0) reduces to -ARG1. */
10513 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10514 return negate_expr (fold_convert_loc (loc, type, arg1));
10515
10516 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10517 __complex__ ( x, -y ). This is not the same for SNaNs or if
10518 signed zeros are involved. */
10519 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10520 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10521 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10522 {
10523 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10524 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10525 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10526 bool arg0rz = false, arg0iz = false;
10527 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10528 || (arg0i && (arg0iz = real_zerop (arg0i))))
10529 {
10530 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10531 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10532 if (arg0rz && arg1i && real_zerop (arg1i))
10533 {
10534 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10535 arg1r ? arg1r
10536 : build1 (REALPART_EXPR, rtype, arg1));
10537 tree ip = arg0i ? arg0i
10538 : build1 (IMAGPART_EXPR, rtype, arg0);
10539 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10540 }
10541 else if (arg0iz && arg1r && real_zerop (arg1r))
10542 {
10543 tree rp = arg0r ? arg0r
10544 : build1 (REALPART_EXPR, rtype, arg0);
10545 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10546 arg1i ? arg1i
10547 : build1 (IMAGPART_EXPR, rtype, arg1));
10548 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10549 }
10550 }
10551 }
10552
10553 /* A - B -> A + (-B) if B is easily negatable. */
10554 if (negate_expr_p (arg1)
10555 && ((FLOAT_TYPE_P (type)
10556 /* Avoid this transformation if B is a positive REAL_CST. */
10557 && (TREE_CODE (arg1) != REAL_CST
10558 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10559 || INTEGRAL_TYPE_P (type)))
10560 return fold_build2_loc (loc, PLUS_EXPR, type,
10561 fold_convert_loc (loc, type, arg0),
10562 fold_convert_loc (loc, type,
10563 negate_expr (arg1)));
10564
10565 /* Try folding difference of addresses. */
10566 {
10567 HOST_WIDE_INT diff;
10568
10569 if ((TREE_CODE (arg0) == ADDR_EXPR
10570 || TREE_CODE (arg1) == ADDR_EXPR)
10571 && ptr_difference_const (arg0, arg1, &diff))
10572 return build_int_cst_type (type, diff);
10573 }
10574
10575 /* Fold &a[i] - &a[j] to i-j. */
10576 if (TREE_CODE (arg0) == ADDR_EXPR
10577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10578 && TREE_CODE (arg1) == ADDR_EXPR
10579 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10580 {
10581 tree tem = fold_addr_of_array_ref_difference (loc, type,
10582 TREE_OPERAND (arg0, 0),
10583 TREE_OPERAND (arg1, 0));
10584 if (tem)
10585 return tem;
10586 }
10587
10588 if (FLOAT_TYPE_P (type)
10589 && flag_unsafe_math_optimizations
10590 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10591 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10592 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10593 return tem;
10594
10595 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10596 one. Make sure the type is not saturating and has the signedness of
10597 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10598 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10599 if ((TREE_CODE (arg0) == MULT_EXPR
10600 || TREE_CODE (arg1) == MULT_EXPR)
10601 && !TYPE_SATURATING (type)
10602 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10603 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10604 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10605 {
10606 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10607 if (tem)
10608 return tem;
10609 }
10610
10611 goto associate;
10612
10613 case MULT_EXPR:
10614 /* (-A) * (-B) -> A * B */
10615 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10616 return fold_build2_loc (loc, MULT_EXPR, type,
10617 fold_convert_loc (loc, type,
10618 TREE_OPERAND (arg0, 0)),
10619 fold_convert_loc (loc, type,
10620 negate_expr (arg1)));
10621 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10622 return fold_build2_loc (loc, MULT_EXPR, type,
10623 fold_convert_loc (loc, type,
10624 negate_expr (arg0)),
10625 fold_convert_loc (loc, type,
10626 TREE_OPERAND (arg1, 0)));
10627
10628 if (! FLOAT_TYPE_P (type))
10629 {
10630 /* Transform x * -1 into -x. Make sure to do the negation
10631 on the original operand with conversions not stripped
10632 because we can only strip non-sign-changing conversions. */
10633 if (integer_minus_onep (arg1))
10634 return fold_convert_loc (loc, type, negate_expr (op0));
10635 /* Transform x * -C into -x * C if x is easily negatable. */
10636 if (TREE_CODE (arg1) == INTEGER_CST
10637 && tree_int_cst_sgn (arg1) == -1
10638 && negate_expr_p (arg0)
10639 && (tem = negate_expr (arg1)) != arg1
10640 && !TREE_OVERFLOW (tem))
10641 return fold_build2_loc (loc, MULT_EXPR, type,
10642 fold_convert_loc (loc, type,
10643 negate_expr (arg0)),
10644 tem);
10645
10646 /* (a * (1 << b)) is (a << b) */
10647 if (TREE_CODE (arg1) == LSHIFT_EXPR
10648 && integer_onep (TREE_OPERAND (arg1, 0)))
10649 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10650 TREE_OPERAND (arg1, 1));
10651 if (TREE_CODE (arg0) == LSHIFT_EXPR
10652 && integer_onep (TREE_OPERAND (arg0, 0)))
10653 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10654 TREE_OPERAND (arg0, 1));
10655
10656 /* (A + A) * C -> A * 2 * C */
10657 if (TREE_CODE (arg0) == PLUS_EXPR
10658 && TREE_CODE (arg1) == INTEGER_CST
10659 && operand_equal_p (TREE_OPERAND (arg0, 0),
10660 TREE_OPERAND (arg0, 1), 0))
10661 return fold_build2_loc (loc, MULT_EXPR, type,
10662 omit_one_operand_loc (loc, type,
10663 TREE_OPERAND (arg0, 0),
10664 TREE_OPERAND (arg0, 1)),
10665 fold_build2_loc (loc, MULT_EXPR, type,
10666 build_int_cst (type, 2) , arg1));
10667
10668 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10669 sign-changing only. */
10670 if (TREE_CODE (arg1) == INTEGER_CST
10671 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10672 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10673 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10674
10675 strict_overflow_p = false;
10676 if (TREE_CODE (arg1) == INTEGER_CST
10677 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10678 &strict_overflow_p)))
10679 {
10680 if (strict_overflow_p)
10681 fold_overflow_warning (("assuming signed overflow does not "
10682 "occur when simplifying "
10683 "multiplication"),
10684 WARN_STRICT_OVERFLOW_MISC);
10685 return fold_convert_loc (loc, type, tem);
10686 }
10687
10688 /* Optimize z * conj(z) for integer complex numbers. */
10689 if (TREE_CODE (arg0) == CONJ_EXPR
10690 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10691 return fold_mult_zconjz (loc, type, arg1);
10692 if (TREE_CODE (arg1) == CONJ_EXPR
10693 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10694 return fold_mult_zconjz (loc, type, arg0);
10695 }
10696 else
10697 {
10698 /* Maybe fold x * 0 to 0. The expressions aren't the same
10699 when x is NaN, since x * 0 is also NaN. Nor are they the
10700 same in modes with signed zeros, since multiplying a
10701 negative value by 0 gives -0, not +0. */
10702 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10703 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10704 && real_zerop (arg1))
10705 return omit_one_operand_loc (loc, type, arg1, arg0);
10706 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10707 Likewise for complex arithmetic with signed zeros. */
10708 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10709 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10710 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10711 && real_onep (arg1))
10712 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10713
10714 /* Transform x * -1.0 into -x. */
10715 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10716 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10717 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10718 && real_minus_onep (arg1))
10719 return fold_convert_loc (loc, type, negate_expr (arg0));
10720
10721 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10722 the result for floating point types due to rounding so it is applied
10723 only if -fassociative-math was specify. */
10724 if (flag_associative_math
10725 && TREE_CODE (arg0) == RDIV_EXPR
10726 && TREE_CODE (arg1) == REAL_CST
10727 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10728 {
10729 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10730 arg1);
10731 if (tem)
10732 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10733 TREE_OPERAND (arg0, 1));
10734 }
10735
10736 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10737 if (operand_equal_p (arg0, arg1, 0))
10738 {
10739 tree tem = fold_strip_sign_ops (arg0);
10740 if (tem != NULL_TREE)
10741 {
10742 tem = fold_convert_loc (loc, type, tem);
10743 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10744 }
10745 }
10746
10747 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10748 This is not the same for NaNs or if signed zeros are
10749 involved. */
10750 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10751 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10752 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10753 && TREE_CODE (arg1) == COMPLEX_CST
10754 && real_zerop (TREE_REALPART (arg1)))
10755 {
10756 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10757 if (real_onep (TREE_IMAGPART (arg1)))
10758 return
10759 fold_build2_loc (loc, COMPLEX_EXPR, type,
10760 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10761 rtype, arg0)),
10762 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10763 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10764 return
10765 fold_build2_loc (loc, COMPLEX_EXPR, type,
10766 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10767 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10768 rtype, arg0)));
10769 }
10770
10771 /* Optimize z * conj(z) for floating point complex numbers.
10772 Guarded by flag_unsafe_math_optimizations as non-finite
10773 imaginary components don't produce scalar results. */
10774 if (flag_unsafe_math_optimizations
10775 && TREE_CODE (arg0) == CONJ_EXPR
10776 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10777 return fold_mult_zconjz (loc, type, arg1);
10778 if (flag_unsafe_math_optimizations
10779 && TREE_CODE (arg1) == CONJ_EXPR
10780 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10781 return fold_mult_zconjz (loc, type, arg0);
10782
10783 if (flag_unsafe_math_optimizations)
10784 {
10785 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10786 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10787
10788 /* Optimizations of root(...)*root(...). */
10789 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10790 {
10791 tree rootfn, arg;
10792 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10793 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10794
10795 /* Optimize sqrt(x)*sqrt(x) as x. */
10796 if (BUILTIN_SQRT_P (fcode0)
10797 && operand_equal_p (arg00, arg10, 0)
10798 && ! HONOR_SNANS (TYPE_MODE (type)))
10799 return arg00;
10800
10801 /* Optimize root(x)*root(y) as root(x*y). */
10802 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10803 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10804 return build_call_expr_loc (loc, rootfn, 1, arg);
10805 }
10806
10807 /* Optimize expN(x)*expN(y) as expN(x+y). */
10808 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10809 {
10810 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10811 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10812 CALL_EXPR_ARG (arg0, 0),
10813 CALL_EXPR_ARG (arg1, 0));
10814 return build_call_expr_loc (loc, expfn, 1, arg);
10815 }
10816
10817 /* Optimizations of pow(...)*pow(...). */
10818 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10819 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10820 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10821 {
10822 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10823 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10824 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10825 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10826
10827 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10828 if (operand_equal_p (arg01, arg11, 0))
10829 {
10830 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10831 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10832 arg00, arg10);
10833 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10834 }
10835
10836 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10837 if (operand_equal_p (arg00, arg10, 0))
10838 {
10839 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10840 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10841 arg01, arg11);
10842 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10843 }
10844 }
10845
10846 /* Optimize tan(x)*cos(x) as sin(x). */
10847 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10848 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10849 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10850 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10851 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10852 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10853 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10854 CALL_EXPR_ARG (arg1, 0), 0))
10855 {
10856 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10857
10858 if (sinfn != NULL_TREE)
10859 return build_call_expr_loc (loc, sinfn, 1,
10860 CALL_EXPR_ARG (arg0, 0));
10861 }
10862
10863 /* Optimize x*pow(x,c) as pow(x,c+1). */
10864 if (fcode1 == BUILT_IN_POW
10865 || fcode1 == BUILT_IN_POWF
10866 || fcode1 == BUILT_IN_POWL)
10867 {
10868 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10869 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10870 if (TREE_CODE (arg11) == REAL_CST
10871 && !TREE_OVERFLOW (arg11)
10872 && operand_equal_p (arg0, arg10, 0))
10873 {
10874 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10875 REAL_VALUE_TYPE c;
10876 tree arg;
10877
10878 c = TREE_REAL_CST (arg11);
10879 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10880 arg = build_real (type, c);
10881 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10882 }
10883 }
10884
10885 /* Optimize pow(x,c)*x as pow(x,c+1). */
10886 if (fcode0 == BUILT_IN_POW
10887 || fcode0 == BUILT_IN_POWF
10888 || fcode0 == BUILT_IN_POWL)
10889 {
10890 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10891 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10892 if (TREE_CODE (arg01) == REAL_CST
10893 && !TREE_OVERFLOW (arg01)
10894 && operand_equal_p (arg1, arg00, 0))
10895 {
10896 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10897 REAL_VALUE_TYPE c;
10898 tree arg;
10899
10900 c = TREE_REAL_CST (arg01);
10901 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10902 arg = build_real (type, c);
10903 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10904 }
10905 }
10906
10907 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10908 if (!in_gimple_form
10909 && optimize
10910 && operand_equal_p (arg0, arg1, 0))
10911 {
10912 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10913
10914 if (powfn)
10915 {
10916 tree arg = build_real (type, dconst2);
10917 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10918 }
10919 }
10920 }
10921 }
10922 goto associate;
10923
10924 case BIT_IOR_EXPR:
10925 bit_ior:
10926 /* ~X | X is -1. */
10927 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10928 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10929 {
10930 t1 = build_zero_cst (type);
10931 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10932 return omit_one_operand_loc (loc, type, t1, arg1);
10933 }
10934
10935 /* X | ~X is -1. */
10936 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10937 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10938 {
10939 t1 = build_zero_cst (type);
10940 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10941 return omit_one_operand_loc (loc, type, t1, arg0);
10942 }
10943
10944 /* Canonicalize (X & C1) | C2. */
10945 if (TREE_CODE (arg0) == BIT_AND_EXPR
10946 && TREE_CODE (arg1) == INTEGER_CST
10947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10948 {
10949 int width = TYPE_PRECISION (type), w;
10950 wide_int c1 = TREE_OPERAND (arg0, 1);
10951 wide_int c2 = arg1;
10952
10953 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10954 if ((c1 & c2) == c1)
10955 return omit_one_operand_loc (loc, type, arg1,
10956 TREE_OPERAND (arg0, 0));
10957
10958 wide_int msk = wi::mask (width, false,
10959 TYPE_PRECISION (TREE_TYPE (arg1)));
10960
10961 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10962 if (msk.and_not (c1 | c2) == 0)
10963 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10964 TREE_OPERAND (arg0, 0), arg1);
10965
10966 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10967 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10968 mode which allows further optimizations. */
10969 c1 &= msk;
10970 c2 &= msk;
10971 wide_int c3 = c1.and_not (c2);
10972 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10973 {
10974 wide_int mask = wi::mask (w, false,
10975 TYPE_PRECISION (type));
10976 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10977 {
10978 c3 = mask;
10979 break;
10980 }
10981 }
10982
10983 if (c3 != c1)
10984 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10985 fold_build2_loc (loc, BIT_AND_EXPR, type,
10986 TREE_OPERAND (arg0, 0),
10987 wide_int_to_tree (type,
10988 c3)),
10989 arg1);
10990 }
10991
10992 /* (X & ~Y) | (~X & Y) is X ^ Y */
10993 if (TREE_CODE (arg0) == BIT_AND_EXPR
10994 && TREE_CODE (arg1) == BIT_AND_EXPR)
10995 {
10996 tree a0, a1, l0, l1, n0, n1;
10997
10998 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10999 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11000
11001 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11002 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11003
11004 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11005 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11006
11007 if ((operand_equal_p (n0, a0, 0)
11008 && operand_equal_p (n1, a1, 0))
11009 || (operand_equal_p (n0, a1, 0)
11010 && operand_equal_p (n1, a0, 0)))
11011 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11012 }
11013
11014 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11015 if (t1 != NULL_TREE)
11016 return t1;
11017
11018 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11019
11020 This results in more efficient code for machines without a NAND
11021 instruction. Combine will canonicalize to the first form
11022 which will allow use of NAND instructions provided by the
11023 backend if they exist. */
11024 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11025 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11026 {
11027 return
11028 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11029 build2 (BIT_AND_EXPR, type,
11030 fold_convert_loc (loc, type,
11031 TREE_OPERAND (arg0, 0)),
11032 fold_convert_loc (loc, type,
11033 TREE_OPERAND (arg1, 0))));
11034 }
11035
11036 /* See if this can be simplified into a rotate first. If that
11037 is unsuccessful continue in the association code. */
11038 goto bit_rotate;
11039
11040 case BIT_XOR_EXPR:
11041 /* ~X ^ X is -1. */
11042 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11044 {
11045 t1 = build_zero_cst (type);
11046 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11047 return omit_one_operand_loc (loc, type, t1, arg1);
11048 }
11049
11050 /* X ^ ~X is -1. */
11051 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11052 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11053 {
11054 t1 = build_zero_cst (type);
11055 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11056 return omit_one_operand_loc (loc, type, t1, arg0);
11057 }
11058
11059 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11060 with a constant, and the two constants have no bits in common,
11061 we should treat this as a BIT_IOR_EXPR since this may produce more
11062 simplifications. */
11063 if (TREE_CODE (arg0) == BIT_AND_EXPR
11064 && TREE_CODE (arg1) == BIT_AND_EXPR
11065 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11066 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11067 && wi::bit_and (TREE_OPERAND (arg0, 1),
11068 TREE_OPERAND (arg1, 1)) == 0)
11069 {
11070 code = BIT_IOR_EXPR;
11071 goto bit_ior;
11072 }
11073
11074 /* (X | Y) ^ X -> Y & ~ X*/
11075 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11076 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11077 {
11078 tree t2 = TREE_OPERAND (arg0, 1);
11079 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11080 arg1);
11081 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11082 fold_convert_loc (loc, type, t2),
11083 fold_convert_loc (loc, type, t1));
11084 return t1;
11085 }
11086
11087 /* (Y | X) ^ X -> Y & ~ X*/
11088 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11089 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11090 {
11091 tree t2 = TREE_OPERAND (arg0, 0);
11092 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11093 arg1);
11094 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11095 fold_convert_loc (loc, type, t2),
11096 fold_convert_loc (loc, type, t1));
11097 return t1;
11098 }
11099
11100 /* X ^ (X | Y) -> Y & ~ X*/
11101 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11102 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11103 {
11104 tree t2 = TREE_OPERAND (arg1, 1);
11105 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11106 arg0);
11107 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11108 fold_convert_loc (loc, type, t2),
11109 fold_convert_loc (loc, type, t1));
11110 return t1;
11111 }
11112
11113 /* X ^ (Y | X) -> Y & ~ X*/
11114 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11116 {
11117 tree t2 = TREE_OPERAND (arg1, 0);
11118 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11119 arg0);
11120 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11121 fold_convert_loc (loc, type, t2),
11122 fold_convert_loc (loc, type, t1));
11123 return t1;
11124 }
11125
11126 /* Convert ~X ^ ~Y to X ^ Y. */
11127 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11128 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11129 return fold_build2_loc (loc, code, type,
11130 fold_convert_loc (loc, type,
11131 TREE_OPERAND (arg0, 0)),
11132 fold_convert_loc (loc, type,
11133 TREE_OPERAND (arg1, 0)));
11134
11135 /* Convert ~X ^ C to X ^ ~C. */
11136 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11137 && TREE_CODE (arg1) == INTEGER_CST)
11138 return fold_build2_loc (loc, code, type,
11139 fold_convert_loc (loc, type,
11140 TREE_OPERAND (arg0, 0)),
11141 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11142
11143 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11144 if (TREE_CODE (arg0) == BIT_AND_EXPR
11145 && INTEGRAL_TYPE_P (type)
11146 && integer_onep (TREE_OPERAND (arg0, 1))
11147 && integer_onep (arg1))
11148 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11149 build_zero_cst (TREE_TYPE (arg0)));
11150
11151 /* Fold (X & Y) ^ Y as ~X & Y. */
11152 if (TREE_CODE (arg0) == BIT_AND_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11154 {
11155 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11156 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11157 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11158 fold_convert_loc (loc, type, arg1));
11159 }
11160 /* Fold (X & Y) ^ X as ~Y & X. */
11161 if (TREE_CODE (arg0) == BIT_AND_EXPR
11162 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11163 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11164 {
11165 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11166 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11167 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11168 fold_convert_loc (loc, type, arg1));
11169 }
11170 /* Fold X ^ (X & Y) as X & ~Y. */
11171 if (TREE_CODE (arg1) == BIT_AND_EXPR
11172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11173 {
11174 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11175 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11176 fold_convert_loc (loc, type, arg0),
11177 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11178 }
11179 /* Fold X ^ (Y & X) as ~Y & X. */
11180 if (TREE_CODE (arg1) == BIT_AND_EXPR
11181 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11182 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11183 {
11184 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11185 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11186 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11187 fold_convert_loc (loc, type, arg0));
11188 }
11189
11190 /* See if this can be simplified into a rotate first. If that
11191 is unsuccessful continue in the association code. */
11192 goto bit_rotate;
11193
11194 case BIT_AND_EXPR:
11195 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11196 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11197 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11198 || (TREE_CODE (arg0) == EQ_EXPR
11199 && integer_zerop (TREE_OPERAND (arg0, 1))))
11200 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11201 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11202
11203 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11204 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11205 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11206 || (TREE_CODE (arg1) == EQ_EXPR
11207 && integer_zerop (TREE_OPERAND (arg1, 1))))
11208 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11209 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11210
11211 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11212 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11213 && INTEGRAL_TYPE_P (type)
11214 && integer_onep (TREE_OPERAND (arg0, 1))
11215 && integer_onep (arg1))
11216 {
11217 tree tem2;
11218 tem = TREE_OPERAND (arg0, 0);
11219 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11220 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11221 tem, tem2);
11222 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11223 build_zero_cst (TREE_TYPE (tem)));
11224 }
11225 /* Fold ~X & 1 as (X & 1) == 0. */
11226 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11227 && INTEGRAL_TYPE_P (type)
11228 && integer_onep (arg1))
11229 {
11230 tree tem2;
11231 tem = TREE_OPERAND (arg0, 0);
11232 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11233 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11234 tem, tem2);
11235 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11236 build_zero_cst (TREE_TYPE (tem)));
11237 }
11238 /* Fold !X & 1 as X == 0. */
11239 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11240 && integer_onep (arg1))
11241 {
11242 tem = TREE_OPERAND (arg0, 0);
11243 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11244 build_zero_cst (TREE_TYPE (tem)));
11245 }
11246
11247 /* Fold (X ^ Y) & Y as ~X & Y. */
11248 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11249 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11250 {
11251 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11252 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11253 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11254 fold_convert_loc (loc, type, arg1));
11255 }
11256 /* Fold (X ^ Y) & X as ~Y & X. */
11257 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11258 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11259 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11260 {
11261 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11262 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11263 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11264 fold_convert_loc (loc, type, arg1));
11265 }
11266 /* Fold X & (X ^ Y) as X & ~Y. */
11267 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11268 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11269 {
11270 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11271 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11272 fold_convert_loc (loc, type, arg0),
11273 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11274 }
11275 /* Fold X & (Y ^ X) as ~Y & X. */
11276 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11277 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11278 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11279 {
11280 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11281 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11282 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11283 fold_convert_loc (loc, type, arg0));
11284 }
11285
11286 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11287 multiple of 1 << CST. */
11288 if (TREE_CODE (arg1) == INTEGER_CST)
11289 {
11290 wide_int cst1 = arg1;
11291 wide_int ncst1 = -cst1;
11292 if ((cst1 & ncst1) == ncst1
11293 && multiple_of_p (type, arg0,
11294 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11295 return fold_convert_loc (loc, type, arg0);
11296 }
11297
11298 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11299 bits from CST2. */
11300 if (TREE_CODE (arg1) == INTEGER_CST
11301 && TREE_CODE (arg0) == MULT_EXPR
11302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11303 {
11304 wide_int warg1 = arg1;
11305 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11306
11307 if (masked == 0)
11308 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11309 arg0, arg1);
11310 else if (masked != warg1)
11311 {
11312 /* Avoid the transform if arg1 is a mask of some
11313 mode which allows further optimizations. */
11314 int pop = wi::popcount (warg1);
11315 if (!(pop >= BITS_PER_UNIT
11316 && exact_log2 (pop) != -1
11317 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11318 return fold_build2_loc (loc, code, type, op0,
11319 wide_int_to_tree (type, masked));
11320 }
11321 }
11322
11323 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11324 ((A & N) + B) & M -> (A + B) & M
11325 Similarly if (N & M) == 0,
11326 ((A | N) + B) & M -> (A + B) & M
11327 and for - instead of + (or unary - instead of +)
11328 and/or ^ instead of |.
11329 If B is constant and (B & M) == 0, fold into A & M. */
11330 if (TREE_CODE (arg1) == INTEGER_CST)
11331 {
11332 wide_int cst1 = arg1;
11333 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11334 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11335 && (TREE_CODE (arg0) == PLUS_EXPR
11336 || TREE_CODE (arg0) == MINUS_EXPR
11337 || TREE_CODE (arg0) == NEGATE_EXPR)
11338 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11339 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11340 {
11341 tree pmop[2];
11342 int which = 0;
11343 wide_int cst0;
11344
11345 /* Now we know that arg0 is (C + D) or (C - D) or
11346 -C and arg1 (M) is == (1LL << cst) - 1.
11347 Store C into PMOP[0] and D into PMOP[1]. */
11348 pmop[0] = TREE_OPERAND (arg0, 0);
11349 pmop[1] = NULL;
11350 if (TREE_CODE (arg0) != NEGATE_EXPR)
11351 {
11352 pmop[1] = TREE_OPERAND (arg0, 1);
11353 which = 1;
11354 }
11355
11356 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11357 which = -1;
11358
11359 for (; which >= 0; which--)
11360 switch (TREE_CODE (pmop[which]))
11361 {
11362 case BIT_AND_EXPR:
11363 case BIT_IOR_EXPR:
11364 case BIT_XOR_EXPR:
11365 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11366 != INTEGER_CST)
11367 break;
11368 cst0 = TREE_OPERAND (pmop[which], 1);
11369 cst0 &= cst1;
11370 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11371 {
11372 if (cst0 != cst1)
11373 break;
11374 }
11375 else if (cst0 != 0)
11376 break;
11377 /* If C or D is of the form (A & N) where
11378 (N & M) == M, or of the form (A | N) or
11379 (A ^ N) where (N & M) == 0, replace it with A. */
11380 pmop[which] = TREE_OPERAND (pmop[which], 0);
11381 break;
11382 case INTEGER_CST:
11383 /* If C or D is a N where (N & M) == 0, it can be
11384 omitted (assumed 0). */
11385 if ((TREE_CODE (arg0) == PLUS_EXPR
11386 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11387 && (cst1 & pmop[which]) == 0)
11388 pmop[which] = NULL;
11389 break;
11390 default:
11391 break;
11392 }
11393
11394 /* Only build anything new if we optimized one or both arguments
11395 above. */
11396 if (pmop[0] != TREE_OPERAND (arg0, 0)
11397 || (TREE_CODE (arg0) != NEGATE_EXPR
11398 && pmop[1] != TREE_OPERAND (arg0, 1)))
11399 {
11400 tree utype = TREE_TYPE (arg0);
11401 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11402 {
11403 /* Perform the operations in a type that has defined
11404 overflow behavior. */
11405 utype = unsigned_type_for (TREE_TYPE (arg0));
11406 if (pmop[0] != NULL)
11407 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11408 if (pmop[1] != NULL)
11409 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11410 }
11411
11412 if (TREE_CODE (arg0) == NEGATE_EXPR)
11413 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11414 else if (TREE_CODE (arg0) == PLUS_EXPR)
11415 {
11416 if (pmop[0] != NULL && pmop[1] != NULL)
11417 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11418 pmop[0], pmop[1]);
11419 else if (pmop[0] != NULL)
11420 tem = pmop[0];
11421 else if (pmop[1] != NULL)
11422 tem = pmop[1];
11423 else
11424 return build_int_cst (type, 0);
11425 }
11426 else if (pmop[0] == NULL)
11427 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11428 else
11429 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11430 pmop[0], pmop[1]);
11431 /* TEM is now the new binary +, - or unary - replacement. */
11432 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11433 fold_convert_loc (loc, utype, arg1));
11434 return fold_convert_loc (loc, type, tem);
11435 }
11436 }
11437 }
11438
11439 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11440 if (t1 != NULL_TREE)
11441 return t1;
11442 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11443 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11444 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11445 {
11446 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11447
11448 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11449 if (mask == -1)
11450 return
11451 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11452 }
11453
11454 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11455
11456 This results in more efficient code for machines without a NOR
11457 instruction. Combine will canonicalize to the first form
11458 which will allow use of NOR instructions provided by the
11459 backend if they exist. */
11460 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11461 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11462 {
11463 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11464 build2 (BIT_IOR_EXPR, type,
11465 fold_convert_loc (loc, type,
11466 TREE_OPERAND (arg0, 0)),
11467 fold_convert_loc (loc, type,
11468 TREE_OPERAND (arg1, 0))));
11469 }
11470
11471 /* If arg0 is derived from the address of an object or function, we may
11472 be able to fold this expression using the object or function's
11473 alignment. */
11474 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11475 {
11476 unsigned HOST_WIDE_INT modulus, residue;
11477 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11478
11479 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11480 integer_onep (arg1));
11481
11482 /* This works because modulus is a power of 2. If this weren't the
11483 case, we'd have to replace it by its greatest power-of-2
11484 divisor: modulus & -modulus. */
11485 if (low < modulus)
11486 return build_int_cst (type, residue & low);
11487 }
11488
11489 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11490 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11491 if the new mask might be further optimized. */
11492 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11493 || TREE_CODE (arg0) == RSHIFT_EXPR)
11494 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11495 && TREE_CODE (arg1) == INTEGER_CST
11496 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11497 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11498 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11499 < TYPE_PRECISION (TREE_TYPE (arg0))))
11500 {
11501 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11502 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11503 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11504 tree shift_type = TREE_TYPE (arg0);
11505
11506 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11507 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11508 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11509 && TYPE_PRECISION (TREE_TYPE (arg0))
11510 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11511 {
11512 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11513 tree arg00 = TREE_OPERAND (arg0, 0);
11514 /* See if more bits can be proven as zero because of
11515 zero extension. */
11516 if (TREE_CODE (arg00) == NOP_EXPR
11517 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11518 {
11519 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11520 if (TYPE_PRECISION (inner_type)
11521 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11522 && TYPE_PRECISION (inner_type) < prec)
11523 {
11524 prec = TYPE_PRECISION (inner_type);
11525 /* See if we can shorten the right shift. */
11526 if (shiftc < prec)
11527 shift_type = inner_type;
11528 /* Otherwise X >> C1 is all zeros, so we'll optimize
11529 it into (X, 0) later on by making sure zerobits
11530 is all ones. */
11531 }
11532 }
11533 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11534 if (shiftc < prec)
11535 {
11536 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11537 zerobits <<= prec - shiftc;
11538 }
11539 /* For arithmetic shift if sign bit could be set, zerobits
11540 can contain actually sign bits, so no transformation is
11541 possible, unless MASK masks them all away. In that
11542 case the shift needs to be converted into logical shift. */
11543 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11544 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11545 {
11546 if ((mask & zerobits) == 0)
11547 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11548 else
11549 zerobits = 0;
11550 }
11551 }
11552
11553 /* ((X << 16) & 0xff00) is (X, 0). */
11554 if ((mask & zerobits) == mask)
11555 return omit_one_operand_loc (loc, type,
11556 build_int_cst (type, 0), arg0);
11557
11558 newmask = mask | zerobits;
11559 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11560 {
11561 /* Only do the transformation if NEWMASK is some integer
11562 mode's mask. */
11563 for (prec = BITS_PER_UNIT;
11564 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11565 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11566 break;
11567 if (prec < HOST_BITS_PER_WIDE_INT
11568 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11569 {
11570 tree newmaskt;
11571
11572 if (shift_type != TREE_TYPE (arg0))
11573 {
11574 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11575 fold_convert_loc (loc, shift_type,
11576 TREE_OPERAND (arg0, 0)),
11577 TREE_OPERAND (arg0, 1));
11578 tem = fold_convert_loc (loc, type, tem);
11579 }
11580 else
11581 tem = op0;
11582 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11583 if (!tree_int_cst_equal (newmaskt, arg1))
11584 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11585 }
11586 }
11587 }
11588
11589 goto associate;
11590
11591 case RDIV_EXPR:
11592 /* Don't touch a floating-point divide by zero unless the mode
11593 of the constant can represent infinity. */
11594 if (TREE_CODE (arg1) == REAL_CST
11595 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11596 && real_zerop (arg1))
11597 return NULL_TREE;
11598
11599 /* Optimize A / A to 1.0 if we don't care about
11600 NaNs or Infinities. Skip the transformation
11601 for non-real operands. */
11602 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11603 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11604 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11605 && operand_equal_p (arg0, arg1, 0))
11606 {
11607 tree r = build_real (TREE_TYPE (arg0), dconst1);
11608
11609 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11610 }
11611
11612 /* The complex version of the above A / A optimization. */
11613 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11614 && operand_equal_p (arg0, arg1, 0))
11615 {
11616 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11617 if (! HONOR_NANS (TYPE_MODE (elem_type))
11618 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11619 {
11620 tree r = build_real (elem_type, dconst1);
11621 /* omit_two_operands will call fold_convert for us. */
11622 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11623 }
11624 }
11625
11626 /* (-A) / (-B) -> A / B */
11627 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11628 return fold_build2_loc (loc, RDIV_EXPR, type,
11629 TREE_OPERAND (arg0, 0),
11630 negate_expr (arg1));
11631 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11632 return fold_build2_loc (loc, RDIV_EXPR, type,
11633 negate_expr (arg0),
11634 TREE_OPERAND (arg1, 0));
11635
11636 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11637 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11638 && real_onep (arg1))
11639 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11640
11641 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11642 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11643 && real_minus_onep (arg1))
11644 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11645 negate_expr (arg0)));
11646
11647 /* If ARG1 is a constant, we can convert this to a multiply by the
11648 reciprocal. This does not have the same rounding properties,
11649 so only do this if -freciprocal-math. We can actually
11650 always safely do it if ARG1 is a power of two, but it's hard to
11651 tell if it is or not in a portable manner. */
11652 if (optimize
11653 && (TREE_CODE (arg1) == REAL_CST
11654 || (TREE_CODE (arg1) == COMPLEX_CST
11655 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11656 || (TREE_CODE (arg1) == VECTOR_CST
11657 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11658 {
11659 if (flag_reciprocal_math
11660 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11661 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11662 /* Find the reciprocal if optimizing and the result is exact.
11663 TODO: Complex reciprocal not implemented. */
11664 if (TREE_CODE (arg1) != COMPLEX_CST)
11665 {
11666 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11667
11668 if (inverse)
11669 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11670 }
11671 }
11672 /* Convert A/B/C to A/(B*C). */
11673 if (flag_reciprocal_math
11674 && TREE_CODE (arg0) == RDIV_EXPR)
11675 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11676 fold_build2_loc (loc, MULT_EXPR, type,
11677 TREE_OPERAND (arg0, 1), arg1));
11678
11679 /* Convert A/(B/C) to (A/B)*C. */
11680 if (flag_reciprocal_math
11681 && TREE_CODE (arg1) == RDIV_EXPR)
11682 return fold_build2_loc (loc, MULT_EXPR, type,
11683 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11684 TREE_OPERAND (arg1, 0)),
11685 TREE_OPERAND (arg1, 1));
11686
11687 /* Convert C1/(X*C2) into (C1/C2)/X. */
11688 if (flag_reciprocal_math
11689 && TREE_CODE (arg1) == MULT_EXPR
11690 && TREE_CODE (arg0) == REAL_CST
11691 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11692 {
11693 tree tem = const_binop (RDIV_EXPR, arg0,
11694 TREE_OPERAND (arg1, 1));
11695 if (tem)
11696 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11697 TREE_OPERAND (arg1, 0));
11698 }
11699
11700 if (flag_unsafe_math_optimizations)
11701 {
11702 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11703 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11704
11705 /* Optimize sin(x)/cos(x) as tan(x). */
11706 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11707 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11708 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11709 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11710 CALL_EXPR_ARG (arg1, 0), 0))
11711 {
11712 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11713
11714 if (tanfn != NULL_TREE)
11715 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11716 }
11717
11718 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11719 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11720 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11721 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11722 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11723 CALL_EXPR_ARG (arg1, 0), 0))
11724 {
11725 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11726
11727 if (tanfn != NULL_TREE)
11728 {
11729 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11730 CALL_EXPR_ARG (arg0, 0));
11731 return fold_build2_loc (loc, RDIV_EXPR, type,
11732 build_real (type, dconst1), tmp);
11733 }
11734 }
11735
11736 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11737 NaNs or Infinities. */
11738 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11739 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11740 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11741 {
11742 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11743 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11744
11745 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11746 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11747 && operand_equal_p (arg00, arg01, 0))
11748 {
11749 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11750
11751 if (cosfn != NULL_TREE)
11752 return build_call_expr_loc (loc, cosfn, 1, arg00);
11753 }
11754 }
11755
11756 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11757 NaNs or Infinities. */
11758 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11759 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11760 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11761 {
11762 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11763 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11764
11765 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11766 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11767 && operand_equal_p (arg00, arg01, 0))
11768 {
11769 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11770
11771 if (cosfn != NULL_TREE)
11772 {
11773 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11774 return fold_build2_loc (loc, RDIV_EXPR, type,
11775 build_real (type, dconst1),
11776 tmp);
11777 }
11778 }
11779 }
11780
11781 /* Optimize pow(x,c)/x as pow(x,c-1). */
11782 if (fcode0 == BUILT_IN_POW
11783 || fcode0 == BUILT_IN_POWF
11784 || fcode0 == BUILT_IN_POWL)
11785 {
11786 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11787 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11788 if (TREE_CODE (arg01) == REAL_CST
11789 && !TREE_OVERFLOW (arg01)
11790 && operand_equal_p (arg1, arg00, 0))
11791 {
11792 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11793 REAL_VALUE_TYPE c;
11794 tree arg;
11795
11796 c = TREE_REAL_CST (arg01);
11797 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11798 arg = build_real (type, c);
11799 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11800 }
11801 }
11802
11803 /* Optimize a/root(b/c) into a*root(c/b). */
11804 if (BUILTIN_ROOT_P (fcode1))
11805 {
11806 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11807
11808 if (TREE_CODE (rootarg) == RDIV_EXPR)
11809 {
11810 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11811 tree b = TREE_OPERAND (rootarg, 0);
11812 tree c = TREE_OPERAND (rootarg, 1);
11813
11814 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11815
11816 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11817 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11818 }
11819 }
11820
11821 /* Optimize x/expN(y) into x*expN(-y). */
11822 if (BUILTIN_EXPONENT_P (fcode1))
11823 {
11824 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11825 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11826 arg1 = build_call_expr_loc (loc,
11827 expfn, 1,
11828 fold_convert_loc (loc, type, arg));
11829 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11830 }
11831
11832 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11833 if (fcode1 == BUILT_IN_POW
11834 || fcode1 == BUILT_IN_POWF
11835 || fcode1 == BUILT_IN_POWL)
11836 {
11837 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11838 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11839 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11840 tree neg11 = fold_convert_loc (loc, type,
11841 negate_expr (arg11));
11842 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11843 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11844 }
11845 }
11846 return NULL_TREE;
11847
11848 case TRUNC_DIV_EXPR:
11849 /* Optimize (X & (-A)) / A where A is a power of 2,
11850 to X >> log2(A) */
11851 if (TREE_CODE (arg0) == BIT_AND_EXPR
11852 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11853 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11854 {
11855 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11856 arg1, TREE_OPERAND (arg0, 1));
11857 if (sum && integer_zerop (sum)) {
11858 tree pow2 = build_int_cst (integer_type_node,
11859 wi::exact_log2 (arg1));
11860 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11861 TREE_OPERAND (arg0, 0), pow2);
11862 }
11863 }
11864
11865 /* Fall through */
11866
11867 case FLOOR_DIV_EXPR:
11868 /* Simplify A / (B << N) where A and B are positive and B is
11869 a power of 2, to A >> (N + log2(B)). */
11870 strict_overflow_p = false;
11871 if (TREE_CODE (arg1) == LSHIFT_EXPR
11872 && (TYPE_UNSIGNED (type)
11873 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11874 {
11875 tree sval = TREE_OPERAND (arg1, 0);
11876 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11877 {
11878 tree sh_cnt = TREE_OPERAND (arg1, 1);
11879 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11880 wi::exact_log2 (sval));
11881
11882 if (strict_overflow_p)
11883 fold_overflow_warning (("assuming signed overflow does not "
11884 "occur when simplifying A / (B << N)"),
11885 WARN_STRICT_OVERFLOW_MISC);
11886
11887 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11888 sh_cnt, pow2);
11889 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11890 fold_convert_loc (loc, type, arg0), sh_cnt);
11891 }
11892 }
11893
11894 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11895 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11896 if (INTEGRAL_TYPE_P (type)
11897 && TYPE_UNSIGNED (type)
11898 && code == FLOOR_DIV_EXPR)
11899 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11900
11901 /* Fall through */
11902
11903 case ROUND_DIV_EXPR:
11904 case CEIL_DIV_EXPR:
11905 case EXACT_DIV_EXPR:
11906 if (integer_zerop (arg1))
11907 return NULL_TREE;
11908 /* X / -1 is -X. */
11909 if (!TYPE_UNSIGNED (type)
11910 && TREE_CODE (arg1) == INTEGER_CST
11911 && wi::eq_p (arg1, -1))
11912 return fold_convert_loc (loc, type, negate_expr (arg0));
11913
11914 /* Convert -A / -B to A / B when the type is signed and overflow is
11915 undefined. */
11916 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11917 && TREE_CODE (arg0) == NEGATE_EXPR
11918 && negate_expr_p (arg1))
11919 {
11920 if (INTEGRAL_TYPE_P (type))
11921 fold_overflow_warning (("assuming signed overflow does not occur "
11922 "when distributing negation across "
11923 "division"),
11924 WARN_STRICT_OVERFLOW_MISC);
11925 return fold_build2_loc (loc, code, type,
11926 fold_convert_loc (loc, type,
11927 TREE_OPERAND (arg0, 0)),
11928 fold_convert_loc (loc, type,
11929 negate_expr (arg1)));
11930 }
11931 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11932 && TREE_CODE (arg1) == NEGATE_EXPR
11933 && negate_expr_p (arg0))
11934 {
11935 if (INTEGRAL_TYPE_P (type))
11936 fold_overflow_warning (("assuming signed overflow does not occur "
11937 "when distributing negation across "
11938 "division"),
11939 WARN_STRICT_OVERFLOW_MISC);
11940 return fold_build2_loc (loc, code, type,
11941 fold_convert_loc (loc, type,
11942 negate_expr (arg0)),
11943 fold_convert_loc (loc, type,
11944 TREE_OPERAND (arg1, 0)));
11945 }
11946
11947 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11948 operation, EXACT_DIV_EXPR.
11949
11950 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11951 At one time others generated faster code, it's not clear if they do
11952 after the last round to changes to the DIV code in expmed.c. */
11953 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11954 && multiple_of_p (type, arg0, arg1))
11955 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11956
11957 strict_overflow_p = false;
11958 if (TREE_CODE (arg1) == INTEGER_CST
11959 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11960 &strict_overflow_p)))
11961 {
11962 if (strict_overflow_p)
11963 fold_overflow_warning (("assuming signed overflow does not occur "
11964 "when simplifying division"),
11965 WARN_STRICT_OVERFLOW_MISC);
11966 return fold_convert_loc (loc, type, tem);
11967 }
11968
11969 return NULL_TREE;
11970
11971 case CEIL_MOD_EXPR:
11972 case FLOOR_MOD_EXPR:
11973 case ROUND_MOD_EXPR:
11974 case TRUNC_MOD_EXPR:
11975 /* X % -1 is zero. */
11976 if (!TYPE_UNSIGNED (type)
11977 && TREE_CODE (arg1) == INTEGER_CST
11978 && wi::eq_p (arg1, -1))
11979 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11980
11981 /* X % -C is the same as X % C. */
11982 if (code == TRUNC_MOD_EXPR
11983 && TYPE_SIGN (type) == SIGNED
11984 && TREE_CODE (arg1) == INTEGER_CST
11985 && !TREE_OVERFLOW (arg1)
11986 && wi::neg_p (arg1)
11987 && !TYPE_OVERFLOW_TRAPS (type)
11988 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11989 && !sign_bit_p (arg1, arg1))
11990 return fold_build2_loc (loc, code, type,
11991 fold_convert_loc (loc, type, arg0),
11992 fold_convert_loc (loc, type,
11993 negate_expr (arg1)));
11994
11995 /* X % -Y is the same as X % Y. */
11996 if (code == TRUNC_MOD_EXPR
11997 && !TYPE_UNSIGNED (type)
11998 && TREE_CODE (arg1) == NEGATE_EXPR
11999 && !TYPE_OVERFLOW_TRAPS (type))
12000 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12001 fold_convert_loc (loc, type,
12002 TREE_OPERAND (arg1, 0)));
12003
12004 strict_overflow_p = false;
12005 if (TREE_CODE (arg1) == INTEGER_CST
12006 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12007 &strict_overflow_p)))
12008 {
12009 if (strict_overflow_p)
12010 fold_overflow_warning (("assuming signed overflow does not occur "
12011 "when simplifying modulus"),
12012 WARN_STRICT_OVERFLOW_MISC);
12013 return fold_convert_loc (loc, type, tem);
12014 }
12015
12016 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12017 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12018 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12019 && (TYPE_UNSIGNED (type)
12020 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12021 {
12022 tree c = arg1;
12023 /* Also optimize A % (C << N) where C is a power of 2,
12024 to A & ((C << N) - 1). */
12025 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12026 c = TREE_OPERAND (arg1, 0);
12027
12028 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12029 {
12030 tree mask
12031 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12032 build_int_cst (TREE_TYPE (arg1), 1));
12033 if (strict_overflow_p)
12034 fold_overflow_warning (("assuming signed overflow does not "
12035 "occur when simplifying "
12036 "X % (power of two)"),
12037 WARN_STRICT_OVERFLOW_MISC);
12038 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12039 fold_convert_loc (loc, type, arg0),
12040 fold_convert_loc (loc, type, mask));
12041 }
12042 }
12043
12044 return NULL_TREE;
12045
12046 case LROTATE_EXPR:
12047 case RROTATE_EXPR:
12048 if (integer_all_onesp (arg0))
12049 return omit_one_operand_loc (loc, type, arg0, arg1);
12050 goto shift;
12051
12052 case RSHIFT_EXPR:
12053 /* Optimize -1 >> x for arithmetic right shifts. */
12054 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12055 && tree_expr_nonnegative_p (arg1))
12056 return omit_one_operand_loc (loc, type, arg0, arg1);
12057 /* ... fall through ... */
12058
12059 case LSHIFT_EXPR:
12060 shift:
12061 if (integer_zerop (arg1))
12062 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12063 if (integer_zerop (arg0))
12064 return omit_one_operand_loc (loc, type, arg0, arg1);
12065
12066 /* Prefer vector1 << scalar to vector1 << vector2
12067 if vector2 is uniform. */
12068 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12069 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12070 return fold_build2_loc (loc, code, type, op0, tem);
12071
12072 /* Since negative shift count is not well-defined,
12073 don't try to compute it in the compiler. */
12074 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12075 return NULL_TREE;
12076
12077 prec = element_precision (type);
12078
12079 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12080 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12081 && tree_to_uhwi (arg1) < prec
12082 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12083 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12084 {
12085 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12086 + tree_to_uhwi (arg1));
12087
12088 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12089 being well defined. */
12090 if (low >= prec)
12091 {
12092 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12093 low = low % prec;
12094 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12095 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12096 TREE_OPERAND (arg0, 0));
12097 else
12098 low = prec - 1;
12099 }
12100
12101 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12102 build_int_cst (TREE_TYPE (arg1), low));
12103 }
12104
12105 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12106 into x & ((unsigned)-1 >> c) for unsigned types. */
12107 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12108 || (TYPE_UNSIGNED (type)
12109 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12110 && tree_fits_uhwi_p (arg1)
12111 && tree_to_uhwi (arg1) < prec
12112 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12113 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12114 {
12115 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12116 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12117 tree lshift;
12118 tree arg00;
12119
12120 if (low0 == low1)
12121 {
12122 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12123
12124 lshift = build_minus_one_cst (type);
12125 lshift = const_binop (code, lshift, arg1);
12126
12127 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12128 }
12129 }
12130
12131 /* Rewrite an LROTATE_EXPR by a constant into an
12132 RROTATE_EXPR by a new constant. */
12133 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12134 {
12135 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12136 tem = const_binop (MINUS_EXPR, tem, arg1);
12137 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12138 }
12139
12140 /* If we have a rotate of a bit operation with the rotate count and
12141 the second operand of the bit operation both constant,
12142 permute the two operations. */
12143 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12144 && (TREE_CODE (arg0) == BIT_AND_EXPR
12145 || TREE_CODE (arg0) == BIT_IOR_EXPR
12146 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12147 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12148 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12149 fold_build2_loc (loc, code, type,
12150 TREE_OPERAND (arg0, 0), arg1),
12151 fold_build2_loc (loc, code, type,
12152 TREE_OPERAND (arg0, 1), arg1));
12153
12154 /* Two consecutive rotates adding up to the some integer
12155 multiple of the precision of the type can be ignored. */
12156 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12157 && TREE_CODE (arg0) == RROTATE_EXPR
12158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12159 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12160 prec) == 0)
12161 return TREE_OPERAND (arg0, 0);
12162
12163 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12164 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12165 if the latter can be further optimized. */
12166 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12167 && TREE_CODE (arg0) == BIT_AND_EXPR
12168 && TREE_CODE (arg1) == INTEGER_CST
12169 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12170 {
12171 tree mask = fold_build2_loc (loc, code, type,
12172 fold_convert_loc (loc, type,
12173 TREE_OPERAND (arg0, 1)),
12174 arg1);
12175 tree shift = fold_build2_loc (loc, code, type,
12176 fold_convert_loc (loc, type,
12177 TREE_OPERAND (arg0, 0)),
12178 arg1);
12179 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12180 if (tem)
12181 return tem;
12182 }
12183
12184 return NULL_TREE;
12185
12186 case MIN_EXPR:
12187 if (operand_equal_p (arg0, arg1, 0))
12188 return omit_one_operand_loc (loc, type, arg0, arg1);
12189 if (INTEGRAL_TYPE_P (type)
12190 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12191 return omit_one_operand_loc (loc, type, arg1, arg0);
12192 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12193 if (tem)
12194 return tem;
12195 goto associate;
12196
12197 case MAX_EXPR:
12198 if (operand_equal_p (arg0, arg1, 0))
12199 return omit_one_operand_loc (loc, type, arg0, arg1);
12200 if (INTEGRAL_TYPE_P (type)
12201 && TYPE_MAX_VALUE (type)
12202 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12203 return omit_one_operand_loc (loc, type, arg1, arg0);
12204 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12205 if (tem)
12206 return tem;
12207 goto associate;
12208
12209 case TRUTH_ANDIF_EXPR:
12210 /* Note that the operands of this must be ints
12211 and their values must be 0 or 1.
12212 ("true" is a fixed value perhaps depending on the language.) */
12213 /* If first arg is constant zero, return it. */
12214 if (integer_zerop (arg0))
12215 return fold_convert_loc (loc, type, arg0);
12216 case TRUTH_AND_EXPR:
12217 /* If either arg is constant true, drop it. */
12218 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12219 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12220 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12221 /* Preserve sequence points. */
12222 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12223 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12224 /* If second arg is constant zero, result is zero, but first arg
12225 must be evaluated. */
12226 if (integer_zerop (arg1))
12227 return omit_one_operand_loc (loc, type, arg1, arg0);
12228 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12229 case will be handled here. */
12230 if (integer_zerop (arg0))
12231 return omit_one_operand_loc (loc, type, arg0, arg1);
12232
12233 /* !X && X is always false. */
12234 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12235 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12236 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12237 /* X && !X is always false. */
12238 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12239 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12240 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12241
12242 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12243 means A >= Y && A != MAX, but in this case we know that
12244 A < X <= MAX. */
12245
12246 if (!TREE_SIDE_EFFECTS (arg0)
12247 && !TREE_SIDE_EFFECTS (arg1))
12248 {
12249 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12250 if (tem && !operand_equal_p (tem, arg0, 0))
12251 return fold_build2_loc (loc, code, type, tem, arg1);
12252
12253 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12254 if (tem && !operand_equal_p (tem, arg1, 0))
12255 return fold_build2_loc (loc, code, type, arg0, tem);
12256 }
12257
12258 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12259 != NULL_TREE)
12260 return tem;
12261
12262 return NULL_TREE;
12263
12264 case TRUTH_ORIF_EXPR:
12265 /* Note that the operands of this must be ints
12266 and their values must be 0 or true.
12267 ("true" is a fixed value perhaps depending on the language.) */
12268 /* If first arg is constant true, return it. */
12269 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12270 return fold_convert_loc (loc, type, arg0);
12271 case TRUTH_OR_EXPR:
12272 /* If either arg is constant zero, drop it. */
12273 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12274 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12275 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12276 /* Preserve sequence points. */
12277 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12278 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12279 /* If second arg is constant true, result is true, but we must
12280 evaluate first arg. */
12281 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12282 return omit_one_operand_loc (loc, type, arg1, arg0);
12283 /* Likewise for first arg, but note this only occurs here for
12284 TRUTH_OR_EXPR. */
12285 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12286 return omit_one_operand_loc (loc, type, arg0, arg1);
12287
12288 /* !X || X is always true. */
12289 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12290 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12291 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12292 /* X || !X is always true. */
12293 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12294 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12295 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12296
12297 /* (X && !Y) || (!X && Y) is X ^ Y */
12298 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12299 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12300 {
12301 tree a0, a1, l0, l1, n0, n1;
12302
12303 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12304 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12305
12306 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12307 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12308
12309 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12310 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12311
12312 if ((operand_equal_p (n0, a0, 0)
12313 && operand_equal_p (n1, a1, 0))
12314 || (operand_equal_p (n0, a1, 0)
12315 && operand_equal_p (n1, a0, 0)))
12316 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12317 }
12318
12319 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12320 != NULL_TREE)
12321 return tem;
12322
12323 return NULL_TREE;
12324
12325 case TRUTH_XOR_EXPR:
12326 /* If the second arg is constant zero, drop it. */
12327 if (integer_zerop (arg1))
12328 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12329 /* If the second arg is constant true, this is a logical inversion. */
12330 if (integer_onep (arg1))
12331 {
12332 tem = invert_truthvalue_loc (loc, arg0);
12333 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12334 }
12335 /* Identical arguments cancel to zero. */
12336 if (operand_equal_p (arg0, arg1, 0))
12337 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12338
12339 /* !X ^ X is always true. */
12340 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12341 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12342 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12343
12344 /* X ^ !X is always true. */
12345 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12346 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12347 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12348
12349 return NULL_TREE;
12350
12351 case EQ_EXPR:
12352 case NE_EXPR:
12353 STRIP_NOPS (arg0);
12354 STRIP_NOPS (arg1);
12355
12356 tem = fold_comparison (loc, code, type, op0, op1);
12357 if (tem != NULL_TREE)
12358 return tem;
12359
12360 /* bool_var != 0 becomes bool_var. */
12361 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12362 && code == NE_EXPR)
12363 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12364
12365 /* bool_var == 1 becomes bool_var. */
12366 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12367 && code == EQ_EXPR)
12368 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12369
12370 /* bool_var != 1 becomes !bool_var. */
12371 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12372 && code == NE_EXPR)
12373 return fold_convert_loc (loc, type,
12374 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12375 TREE_TYPE (arg0), arg0));
12376
12377 /* bool_var == 0 becomes !bool_var. */
12378 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12379 && code == EQ_EXPR)
12380 return fold_convert_loc (loc, type,
12381 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12382 TREE_TYPE (arg0), arg0));
12383
12384 /* !exp != 0 becomes !exp */
12385 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12386 && code == NE_EXPR)
12387 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12388
12389 /* If this is an equality comparison of the address of two non-weak,
12390 unaliased symbols neither of which are extern (since we do not
12391 have access to attributes for externs), then we know the result. */
12392 if (TREE_CODE (arg0) == ADDR_EXPR
12393 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12394 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12395 && ! lookup_attribute ("alias",
12396 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12397 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12398 && TREE_CODE (arg1) == ADDR_EXPR
12399 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12400 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12401 && ! lookup_attribute ("alias",
12402 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12403 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12404 {
12405 /* We know that we're looking at the address of two
12406 non-weak, unaliased, static _DECL nodes.
12407
12408 It is both wasteful and incorrect to call operand_equal_p
12409 to compare the two ADDR_EXPR nodes. It is wasteful in that
12410 all we need to do is test pointer equality for the arguments
12411 to the two ADDR_EXPR nodes. It is incorrect to use
12412 operand_equal_p as that function is NOT equivalent to a
12413 C equality test. It can in fact return false for two
12414 objects which would test as equal using the C equality
12415 operator. */
12416 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12417 return constant_boolean_node (equal
12418 ? code == EQ_EXPR : code != EQ_EXPR,
12419 type);
12420 }
12421
12422 /* Similarly for a NEGATE_EXPR. */
12423 if (TREE_CODE (arg0) == NEGATE_EXPR
12424 && TREE_CODE (arg1) == INTEGER_CST
12425 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12426 arg1)))
12427 && TREE_CODE (tem) == INTEGER_CST
12428 && !TREE_OVERFLOW (tem))
12429 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12430
12431 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12432 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12433 && TREE_CODE (arg1) == INTEGER_CST
12434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12435 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12436 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12437 fold_convert_loc (loc,
12438 TREE_TYPE (arg0),
12439 arg1),
12440 TREE_OPERAND (arg0, 1)));
12441
12442 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12443 if ((TREE_CODE (arg0) == PLUS_EXPR
12444 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12445 || TREE_CODE (arg0) == MINUS_EXPR)
12446 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12447 0)),
12448 arg1, 0)
12449 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12450 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12451 {
12452 tree val = TREE_OPERAND (arg0, 1);
12453 return omit_two_operands_loc (loc, type,
12454 fold_build2_loc (loc, code, type,
12455 val,
12456 build_int_cst (TREE_TYPE (val),
12457 0)),
12458 TREE_OPERAND (arg0, 0), arg1);
12459 }
12460
12461 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12462 if (TREE_CODE (arg0) == MINUS_EXPR
12463 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12464 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12465 1)),
12466 arg1, 0)
12467 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12468 {
12469 return omit_two_operands_loc (loc, type,
12470 code == NE_EXPR
12471 ? boolean_true_node : boolean_false_node,
12472 TREE_OPERAND (arg0, 1), arg1);
12473 }
12474
12475 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12476 if (TREE_CODE (arg0) == ABS_EXPR
12477 && (integer_zerop (arg1) || real_zerop (arg1)))
12478 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12479
12480 /* If this is an EQ or NE comparison with zero and ARG0 is
12481 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12482 two operations, but the latter can be done in one less insn
12483 on machines that have only two-operand insns or on which a
12484 constant cannot be the first operand. */
12485 if (TREE_CODE (arg0) == BIT_AND_EXPR
12486 && integer_zerop (arg1))
12487 {
12488 tree arg00 = TREE_OPERAND (arg0, 0);
12489 tree arg01 = TREE_OPERAND (arg0, 1);
12490 if (TREE_CODE (arg00) == LSHIFT_EXPR
12491 && integer_onep (TREE_OPERAND (arg00, 0)))
12492 {
12493 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12494 arg01, TREE_OPERAND (arg00, 1));
12495 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12496 build_int_cst (TREE_TYPE (arg0), 1));
12497 return fold_build2_loc (loc, code, type,
12498 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12499 arg1);
12500 }
12501 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12502 && integer_onep (TREE_OPERAND (arg01, 0)))
12503 {
12504 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12505 arg00, TREE_OPERAND (arg01, 1));
12506 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12507 build_int_cst (TREE_TYPE (arg0), 1));
12508 return fold_build2_loc (loc, code, type,
12509 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12510 arg1);
12511 }
12512 }
12513
12514 /* If this is an NE or EQ comparison of zero against the result of a
12515 signed MOD operation whose second operand is a power of 2, make
12516 the MOD operation unsigned since it is simpler and equivalent. */
12517 if (integer_zerop (arg1)
12518 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12519 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12520 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12521 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12522 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12523 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12524 {
12525 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12526 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12527 fold_convert_loc (loc, newtype,
12528 TREE_OPERAND (arg0, 0)),
12529 fold_convert_loc (loc, newtype,
12530 TREE_OPERAND (arg0, 1)));
12531
12532 return fold_build2_loc (loc, code, type, newmod,
12533 fold_convert_loc (loc, newtype, arg1));
12534 }
12535
12536 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12537 C1 is a valid shift constant, and C2 is a power of two, i.e.
12538 a single bit. */
12539 if (TREE_CODE (arg0) == BIT_AND_EXPR
12540 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12541 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12542 == INTEGER_CST
12543 && integer_pow2p (TREE_OPERAND (arg0, 1))
12544 && integer_zerop (arg1))
12545 {
12546 tree itype = TREE_TYPE (arg0);
12547 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12548 prec = TYPE_PRECISION (itype);
12549
12550 /* Check for a valid shift count. */
12551 if (wi::ltu_p (arg001, prec))
12552 {
12553 tree arg01 = TREE_OPERAND (arg0, 1);
12554 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12555 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12556 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12557 can be rewritten as (X & (C2 << C1)) != 0. */
12558 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12559 {
12560 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12561 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12562 return fold_build2_loc (loc, code, type, tem,
12563 fold_convert_loc (loc, itype, arg1));
12564 }
12565 /* Otherwise, for signed (arithmetic) shifts,
12566 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12567 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12568 else if (!TYPE_UNSIGNED (itype))
12569 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12570 arg000, build_int_cst (itype, 0));
12571 /* Otherwise, of unsigned (logical) shifts,
12572 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12573 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12574 else
12575 return omit_one_operand_loc (loc, type,
12576 code == EQ_EXPR ? integer_one_node
12577 : integer_zero_node,
12578 arg000);
12579 }
12580 }
12581
12582 /* If we have (A & C) == C where C is a power of 2, convert this into
12583 (A & C) != 0. Similarly for NE_EXPR. */
12584 if (TREE_CODE (arg0) == BIT_AND_EXPR
12585 && integer_pow2p (TREE_OPERAND (arg0, 1))
12586 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12587 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12588 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12589 integer_zero_node));
12590
12591 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12592 bit, then fold the expression into A < 0 or A >= 0. */
12593 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12594 if (tem)
12595 return tem;
12596
12597 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12598 Similarly for NE_EXPR. */
12599 if (TREE_CODE (arg0) == BIT_AND_EXPR
12600 && TREE_CODE (arg1) == INTEGER_CST
12601 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12602 {
12603 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12604 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12605 TREE_OPERAND (arg0, 1));
12606 tree dandnotc
12607 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12608 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12609 notc);
12610 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12611 if (integer_nonzerop (dandnotc))
12612 return omit_one_operand_loc (loc, type, rslt, arg0);
12613 }
12614
12615 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12616 Similarly for NE_EXPR. */
12617 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12618 && TREE_CODE (arg1) == INTEGER_CST
12619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12620 {
12621 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12622 tree candnotd
12623 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12624 TREE_OPERAND (arg0, 1),
12625 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12626 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12627 if (integer_nonzerop (candnotd))
12628 return omit_one_operand_loc (loc, type, rslt, arg0);
12629 }
12630
12631 /* If this is a comparison of a field, we may be able to simplify it. */
12632 if ((TREE_CODE (arg0) == COMPONENT_REF
12633 || TREE_CODE (arg0) == BIT_FIELD_REF)
12634 /* Handle the constant case even without -O
12635 to make sure the warnings are given. */
12636 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12637 {
12638 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12639 if (t1)
12640 return t1;
12641 }
12642
12643 /* Optimize comparisons of strlen vs zero to a compare of the
12644 first character of the string vs zero. To wit,
12645 strlen(ptr) == 0 => *ptr == 0
12646 strlen(ptr) != 0 => *ptr != 0
12647 Other cases should reduce to one of these two (or a constant)
12648 due to the return value of strlen being unsigned. */
12649 if (TREE_CODE (arg0) == CALL_EXPR
12650 && integer_zerop (arg1))
12651 {
12652 tree fndecl = get_callee_fndecl (arg0);
12653
12654 if (fndecl
12655 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12656 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12657 && call_expr_nargs (arg0) == 1
12658 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12659 {
12660 tree iref = build_fold_indirect_ref_loc (loc,
12661 CALL_EXPR_ARG (arg0, 0));
12662 return fold_build2_loc (loc, code, type, iref,
12663 build_int_cst (TREE_TYPE (iref), 0));
12664 }
12665 }
12666
12667 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12668 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12669 if (TREE_CODE (arg0) == RSHIFT_EXPR
12670 && integer_zerop (arg1)
12671 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12672 {
12673 tree arg00 = TREE_OPERAND (arg0, 0);
12674 tree arg01 = TREE_OPERAND (arg0, 1);
12675 tree itype = TREE_TYPE (arg00);
12676 if (wi::eq_p (arg01, element_precision (itype) - 1))
12677 {
12678 if (TYPE_UNSIGNED (itype))
12679 {
12680 itype = signed_type_for (itype);
12681 arg00 = fold_convert_loc (loc, itype, arg00);
12682 }
12683 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12684 type, arg00, build_zero_cst (itype));
12685 }
12686 }
12687
12688 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12689 if (integer_zerop (arg1)
12690 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12691 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12692 TREE_OPERAND (arg0, 1));
12693
12694 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12695 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12696 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12697 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12698 build_zero_cst (TREE_TYPE (arg0)));
12699 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12700 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12701 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12702 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12703 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12704 build_zero_cst (TREE_TYPE (arg0)));
12705
12706 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12707 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12708 && TREE_CODE (arg1) == INTEGER_CST
12709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12710 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12711 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12712 TREE_OPERAND (arg0, 1), arg1));
12713
12714 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12715 (X & C) == 0 when C is a single bit. */
12716 if (TREE_CODE (arg0) == BIT_AND_EXPR
12717 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12718 && integer_zerop (arg1)
12719 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12720 {
12721 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12722 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12723 TREE_OPERAND (arg0, 1));
12724 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12725 type, tem,
12726 fold_convert_loc (loc, TREE_TYPE (arg0),
12727 arg1));
12728 }
12729
12730 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12731 constant C is a power of two, i.e. a single bit. */
12732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12733 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12734 && integer_zerop (arg1)
12735 && integer_pow2p (TREE_OPERAND (arg0, 1))
12736 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12737 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12738 {
12739 tree arg00 = TREE_OPERAND (arg0, 0);
12740 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12741 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12742 }
12743
12744 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12745 when is C is a power of two, i.e. a single bit. */
12746 if (TREE_CODE (arg0) == BIT_AND_EXPR
12747 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12748 && integer_zerop (arg1)
12749 && integer_pow2p (TREE_OPERAND (arg0, 1))
12750 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12751 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12752 {
12753 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12754 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12755 arg000, TREE_OPERAND (arg0, 1));
12756 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12757 tem, build_int_cst (TREE_TYPE (tem), 0));
12758 }
12759
12760 if (integer_zerop (arg1)
12761 && tree_expr_nonzero_p (arg0))
12762 {
12763 tree res = constant_boolean_node (code==NE_EXPR, type);
12764 return omit_one_operand_loc (loc, type, res, arg0);
12765 }
12766
12767 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12768 if (TREE_CODE (arg0) == NEGATE_EXPR
12769 && TREE_CODE (arg1) == NEGATE_EXPR)
12770 return fold_build2_loc (loc, code, type,
12771 TREE_OPERAND (arg0, 0),
12772 fold_convert_loc (loc, TREE_TYPE (arg0),
12773 TREE_OPERAND (arg1, 0)));
12774
12775 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12776 if (TREE_CODE (arg0) == BIT_AND_EXPR
12777 && TREE_CODE (arg1) == BIT_AND_EXPR)
12778 {
12779 tree arg00 = TREE_OPERAND (arg0, 0);
12780 tree arg01 = TREE_OPERAND (arg0, 1);
12781 tree arg10 = TREE_OPERAND (arg1, 0);
12782 tree arg11 = TREE_OPERAND (arg1, 1);
12783 tree itype = TREE_TYPE (arg0);
12784
12785 if (operand_equal_p (arg01, arg11, 0))
12786 return fold_build2_loc (loc, code, type,
12787 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12788 fold_build2_loc (loc,
12789 BIT_XOR_EXPR, itype,
12790 arg00, arg10),
12791 arg01),
12792 build_zero_cst (itype));
12793
12794 if (operand_equal_p (arg01, arg10, 0))
12795 return fold_build2_loc (loc, code, type,
12796 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12797 fold_build2_loc (loc,
12798 BIT_XOR_EXPR, itype,
12799 arg00, arg11),
12800 arg01),
12801 build_zero_cst (itype));
12802
12803 if (operand_equal_p (arg00, arg11, 0))
12804 return fold_build2_loc (loc, code, type,
12805 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12806 fold_build2_loc (loc,
12807 BIT_XOR_EXPR, itype,
12808 arg01, arg10),
12809 arg00),
12810 build_zero_cst (itype));
12811
12812 if (operand_equal_p (arg00, arg10, 0))
12813 return fold_build2_loc (loc, code, type,
12814 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12815 fold_build2_loc (loc,
12816 BIT_XOR_EXPR, itype,
12817 arg01, arg11),
12818 arg00),
12819 build_zero_cst (itype));
12820 }
12821
12822 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12823 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12824 {
12825 tree arg00 = TREE_OPERAND (arg0, 0);
12826 tree arg01 = TREE_OPERAND (arg0, 1);
12827 tree arg10 = TREE_OPERAND (arg1, 0);
12828 tree arg11 = TREE_OPERAND (arg1, 1);
12829 tree itype = TREE_TYPE (arg0);
12830
12831 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12832 operand_equal_p guarantees no side-effects so we don't need
12833 to use omit_one_operand on Z. */
12834 if (operand_equal_p (arg01, arg11, 0))
12835 return fold_build2_loc (loc, code, type, arg00,
12836 fold_convert_loc (loc, TREE_TYPE (arg00),
12837 arg10));
12838 if (operand_equal_p (arg01, arg10, 0))
12839 return fold_build2_loc (loc, code, type, arg00,
12840 fold_convert_loc (loc, TREE_TYPE (arg00),
12841 arg11));
12842 if (operand_equal_p (arg00, arg11, 0))
12843 return fold_build2_loc (loc, code, type, arg01,
12844 fold_convert_loc (loc, TREE_TYPE (arg01),
12845 arg10));
12846 if (operand_equal_p (arg00, arg10, 0))
12847 return fold_build2_loc (loc, code, type, arg01,
12848 fold_convert_loc (loc, TREE_TYPE (arg01),
12849 arg11));
12850
12851 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12852 if (TREE_CODE (arg01) == INTEGER_CST
12853 && TREE_CODE (arg11) == INTEGER_CST)
12854 {
12855 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12856 fold_convert_loc (loc, itype, arg11));
12857 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12858 return fold_build2_loc (loc, code, type, tem,
12859 fold_convert_loc (loc, itype, arg10));
12860 }
12861 }
12862
12863 /* Attempt to simplify equality/inequality comparisons of complex
12864 values. Only lower the comparison if the result is known or
12865 can be simplified to a single scalar comparison. */
12866 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12867 || TREE_CODE (arg0) == COMPLEX_CST)
12868 && (TREE_CODE (arg1) == COMPLEX_EXPR
12869 || TREE_CODE (arg1) == COMPLEX_CST))
12870 {
12871 tree real0, imag0, real1, imag1;
12872 tree rcond, icond;
12873
12874 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12875 {
12876 real0 = TREE_OPERAND (arg0, 0);
12877 imag0 = TREE_OPERAND (arg0, 1);
12878 }
12879 else
12880 {
12881 real0 = TREE_REALPART (arg0);
12882 imag0 = TREE_IMAGPART (arg0);
12883 }
12884
12885 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12886 {
12887 real1 = TREE_OPERAND (arg1, 0);
12888 imag1 = TREE_OPERAND (arg1, 1);
12889 }
12890 else
12891 {
12892 real1 = TREE_REALPART (arg1);
12893 imag1 = TREE_IMAGPART (arg1);
12894 }
12895
12896 rcond = fold_binary_loc (loc, code, type, real0, real1);
12897 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12898 {
12899 if (integer_zerop (rcond))
12900 {
12901 if (code == EQ_EXPR)
12902 return omit_two_operands_loc (loc, type, boolean_false_node,
12903 imag0, imag1);
12904 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12905 }
12906 else
12907 {
12908 if (code == NE_EXPR)
12909 return omit_two_operands_loc (loc, type, boolean_true_node,
12910 imag0, imag1);
12911 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12912 }
12913 }
12914
12915 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12916 if (icond && TREE_CODE (icond) == INTEGER_CST)
12917 {
12918 if (integer_zerop (icond))
12919 {
12920 if (code == EQ_EXPR)
12921 return omit_two_operands_loc (loc, type, boolean_false_node,
12922 real0, real1);
12923 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12924 }
12925 else
12926 {
12927 if (code == NE_EXPR)
12928 return omit_two_operands_loc (loc, type, boolean_true_node,
12929 real0, real1);
12930 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12931 }
12932 }
12933 }
12934
12935 return NULL_TREE;
12936
12937 case LT_EXPR:
12938 case GT_EXPR:
12939 case LE_EXPR:
12940 case GE_EXPR:
12941 tem = fold_comparison (loc, code, type, op0, op1);
12942 if (tem != NULL_TREE)
12943 return tem;
12944
12945 /* Transform comparisons of the form X +- C CMP X. */
12946 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12947 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12948 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12949 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12950 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12951 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12952 {
12953 tree arg01 = TREE_OPERAND (arg0, 1);
12954 enum tree_code code0 = TREE_CODE (arg0);
12955 int is_positive;
12956
12957 if (TREE_CODE (arg01) == REAL_CST)
12958 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12959 else
12960 is_positive = tree_int_cst_sgn (arg01);
12961
12962 /* (X - c) > X becomes false. */
12963 if (code == GT_EXPR
12964 && ((code0 == MINUS_EXPR && is_positive >= 0)
12965 || (code0 == PLUS_EXPR && is_positive <= 0)))
12966 {
12967 if (TREE_CODE (arg01) == INTEGER_CST
12968 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12969 fold_overflow_warning (("assuming signed overflow does not "
12970 "occur when assuming that (X - c) > X "
12971 "is always false"),
12972 WARN_STRICT_OVERFLOW_ALL);
12973 return constant_boolean_node (0, type);
12974 }
12975
12976 /* Likewise (X + c) < X becomes false. */
12977 if (code == LT_EXPR
12978 && ((code0 == PLUS_EXPR && is_positive >= 0)
12979 || (code0 == MINUS_EXPR && is_positive <= 0)))
12980 {
12981 if (TREE_CODE (arg01) == INTEGER_CST
12982 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12983 fold_overflow_warning (("assuming signed overflow does not "
12984 "occur when assuming that "
12985 "(X + c) < X is always false"),
12986 WARN_STRICT_OVERFLOW_ALL);
12987 return constant_boolean_node (0, type);
12988 }
12989
12990 /* Convert (X - c) <= X to true. */
12991 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12992 && code == LE_EXPR
12993 && ((code0 == MINUS_EXPR && is_positive >= 0)
12994 || (code0 == PLUS_EXPR && is_positive <= 0)))
12995 {
12996 if (TREE_CODE (arg01) == INTEGER_CST
12997 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12998 fold_overflow_warning (("assuming signed overflow does not "
12999 "occur when assuming that "
13000 "(X - c) <= X is always true"),
13001 WARN_STRICT_OVERFLOW_ALL);
13002 return constant_boolean_node (1, type);
13003 }
13004
13005 /* Convert (X + c) >= X to true. */
13006 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13007 && code == GE_EXPR
13008 && ((code0 == PLUS_EXPR && is_positive >= 0)
13009 || (code0 == MINUS_EXPR && is_positive <= 0)))
13010 {
13011 if (TREE_CODE (arg01) == INTEGER_CST
13012 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13013 fold_overflow_warning (("assuming signed overflow does not "
13014 "occur when assuming that "
13015 "(X + c) >= X is always true"),
13016 WARN_STRICT_OVERFLOW_ALL);
13017 return constant_boolean_node (1, type);
13018 }
13019
13020 if (TREE_CODE (arg01) == INTEGER_CST)
13021 {
13022 /* Convert X + c > X and X - c < X to true for integers. */
13023 if (code == GT_EXPR
13024 && ((code0 == PLUS_EXPR && is_positive > 0)
13025 || (code0 == MINUS_EXPR && is_positive < 0)))
13026 {
13027 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13028 fold_overflow_warning (("assuming signed overflow does "
13029 "not occur when assuming that "
13030 "(X + c) > X is always true"),
13031 WARN_STRICT_OVERFLOW_ALL);
13032 return constant_boolean_node (1, type);
13033 }
13034
13035 if (code == LT_EXPR
13036 && ((code0 == MINUS_EXPR && is_positive > 0)
13037 || (code0 == PLUS_EXPR && is_positive < 0)))
13038 {
13039 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13040 fold_overflow_warning (("assuming signed overflow does "
13041 "not occur when assuming that "
13042 "(X - c) < X is always true"),
13043 WARN_STRICT_OVERFLOW_ALL);
13044 return constant_boolean_node (1, type);
13045 }
13046
13047 /* Convert X + c <= X and X - c >= X to false for integers. */
13048 if (code == LE_EXPR
13049 && ((code0 == PLUS_EXPR && is_positive > 0)
13050 || (code0 == MINUS_EXPR && is_positive < 0)))
13051 {
13052 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13053 fold_overflow_warning (("assuming signed overflow does "
13054 "not occur when assuming that "
13055 "(X + c) <= X is always false"),
13056 WARN_STRICT_OVERFLOW_ALL);
13057 return constant_boolean_node (0, type);
13058 }
13059
13060 if (code == GE_EXPR
13061 && ((code0 == MINUS_EXPR && is_positive > 0)
13062 || (code0 == PLUS_EXPR && is_positive < 0)))
13063 {
13064 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13065 fold_overflow_warning (("assuming signed overflow does "
13066 "not occur when assuming that "
13067 "(X - c) >= X is always false"),
13068 WARN_STRICT_OVERFLOW_ALL);
13069 return constant_boolean_node (0, type);
13070 }
13071 }
13072 }
13073
13074 /* Comparisons with the highest or lowest possible integer of
13075 the specified precision will have known values. */
13076 {
13077 tree arg1_type = TREE_TYPE (arg1);
13078 unsigned int prec = TYPE_PRECISION (arg1_type);
13079
13080 if (TREE_CODE (arg1) == INTEGER_CST
13081 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13082 {
13083 wide_int max = wi::max_value (arg1_type);
13084 wide_int signed_max = wi::max_value (prec, SIGNED);
13085 wide_int min = wi::min_value (arg1_type);
13086
13087 if (wi::eq_p (arg1, max))
13088 switch (code)
13089 {
13090 case GT_EXPR:
13091 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13092
13093 case GE_EXPR:
13094 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13095
13096 case LE_EXPR:
13097 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13098
13099 case LT_EXPR:
13100 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13101
13102 /* The GE_EXPR and LT_EXPR cases above are not normally
13103 reached because of previous transformations. */
13104
13105 default:
13106 break;
13107 }
13108 else if (wi::eq_p (arg1, max - 1))
13109 switch (code)
13110 {
13111 case GT_EXPR:
13112 arg1 = const_binop (PLUS_EXPR, arg1,
13113 build_int_cst (TREE_TYPE (arg1), 1));
13114 return fold_build2_loc (loc, EQ_EXPR, type,
13115 fold_convert_loc (loc,
13116 TREE_TYPE (arg1), arg0),
13117 arg1);
13118 case LE_EXPR:
13119 arg1 = const_binop (PLUS_EXPR, arg1,
13120 build_int_cst (TREE_TYPE (arg1), 1));
13121 return fold_build2_loc (loc, NE_EXPR, type,
13122 fold_convert_loc (loc, TREE_TYPE (arg1),
13123 arg0),
13124 arg1);
13125 default:
13126 break;
13127 }
13128 else if (wi::eq_p (arg1, min))
13129 switch (code)
13130 {
13131 case LT_EXPR:
13132 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13133
13134 case LE_EXPR:
13135 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13136
13137 case GE_EXPR:
13138 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13139
13140 case GT_EXPR:
13141 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13142
13143 default:
13144 break;
13145 }
13146 else if (wi::eq_p (arg1, min + 1))
13147 switch (code)
13148 {
13149 case GE_EXPR:
13150 arg1 = const_binop (MINUS_EXPR, arg1,
13151 build_int_cst (TREE_TYPE (arg1), 1));
13152 return fold_build2_loc (loc, NE_EXPR, type,
13153 fold_convert_loc (loc,
13154 TREE_TYPE (arg1), arg0),
13155 arg1);
13156 case LT_EXPR:
13157 arg1 = const_binop (MINUS_EXPR, arg1,
13158 build_int_cst (TREE_TYPE (arg1), 1));
13159 return fold_build2_loc (loc, EQ_EXPR, type,
13160 fold_convert_loc (loc, TREE_TYPE (arg1),
13161 arg0),
13162 arg1);
13163 default:
13164 break;
13165 }
13166
13167 else if (wi::eq_p (arg1, signed_max)
13168 && TYPE_UNSIGNED (arg1_type)
13169 /* We will flip the signedness of the comparison operator
13170 associated with the mode of arg1, so the sign bit is
13171 specified by this mode. Check that arg1 is the signed
13172 max associated with this sign bit. */
13173 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13174 /* signed_type does not work on pointer types. */
13175 && INTEGRAL_TYPE_P (arg1_type))
13176 {
13177 /* The following case also applies to X < signed_max+1
13178 and X >= signed_max+1 because previous transformations. */
13179 if (code == LE_EXPR || code == GT_EXPR)
13180 {
13181 tree st = signed_type_for (arg1_type);
13182 return fold_build2_loc (loc,
13183 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13184 type, fold_convert_loc (loc, st, arg0),
13185 build_int_cst (st, 0));
13186 }
13187 }
13188 }
13189 }
13190
13191 /* If we are comparing an ABS_EXPR with a constant, we can
13192 convert all the cases into explicit comparisons, but they may
13193 well not be faster than doing the ABS and one comparison.
13194 But ABS (X) <= C is a range comparison, which becomes a subtraction
13195 and a comparison, and is probably faster. */
13196 if (code == LE_EXPR
13197 && TREE_CODE (arg1) == INTEGER_CST
13198 && TREE_CODE (arg0) == ABS_EXPR
13199 && ! TREE_SIDE_EFFECTS (arg0)
13200 && (0 != (tem = negate_expr (arg1)))
13201 && TREE_CODE (tem) == INTEGER_CST
13202 && !TREE_OVERFLOW (tem))
13203 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13204 build2 (GE_EXPR, type,
13205 TREE_OPERAND (arg0, 0), tem),
13206 build2 (LE_EXPR, type,
13207 TREE_OPERAND (arg0, 0), arg1));
13208
13209 /* Convert ABS_EXPR<x> >= 0 to true. */
13210 strict_overflow_p = false;
13211 if (code == GE_EXPR
13212 && (integer_zerop (arg1)
13213 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13214 && real_zerop (arg1)))
13215 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13216 {
13217 if (strict_overflow_p)
13218 fold_overflow_warning (("assuming signed overflow does not occur "
13219 "when simplifying comparison of "
13220 "absolute value and zero"),
13221 WARN_STRICT_OVERFLOW_CONDITIONAL);
13222 return omit_one_operand_loc (loc, type,
13223 constant_boolean_node (true, type),
13224 arg0);
13225 }
13226
13227 /* Convert ABS_EXPR<x> < 0 to false. */
13228 strict_overflow_p = false;
13229 if (code == LT_EXPR
13230 && (integer_zerop (arg1) || real_zerop (arg1))
13231 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13232 {
13233 if (strict_overflow_p)
13234 fold_overflow_warning (("assuming signed overflow does not occur "
13235 "when simplifying comparison of "
13236 "absolute value and zero"),
13237 WARN_STRICT_OVERFLOW_CONDITIONAL);
13238 return omit_one_operand_loc (loc, type,
13239 constant_boolean_node (false, type),
13240 arg0);
13241 }
13242
13243 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13244 and similarly for >= into !=. */
13245 if ((code == LT_EXPR || code == GE_EXPR)
13246 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13247 && TREE_CODE (arg1) == LSHIFT_EXPR
13248 && integer_onep (TREE_OPERAND (arg1, 0)))
13249 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13250 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13251 TREE_OPERAND (arg1, 1)),
13252 build_zero_cst (TREE_TYPE (arg0)));
13253
13254 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13255 otherwise Y might be >= # of bits in X's type and thus e.g.
13256 (unsigned char) (1 << Y) for Y 15 might be 0.
13257 If the cast is widening, then 1 << Y should have unsigned type,
13258 otherwise if Y is number of bits in the signed shift type minus 1,
13259 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13260 31 might be 0xffffffff80000000. */
13261 if ((code == LT_EXPR || code == GE_EXPR)
13262 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13263 && CONVERT_EXPR_P (arg1)
13264 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13265 && (TYPE_PRECISION (TREE_TYPE (arg1))
13266 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13267 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13268 || (TYPE_PRECISION (TREE_TYPE (arg1))
13269 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13270 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13271 {
13272 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13273 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13274 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13275 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13276 build_zero_cst (TREE_TYPE (arg0)));
13277 }
13278
13279 return NULL_TREE;
13280
13281 case UNORDERED_EXPR:
13282 case ORDERED_EXPR:
13283 case UNLT_EXPR:
13284 case UNLE_EXPR:
13285 case UNGT_EXPR:
13286 case UNGE_EXPR:
13287 case UNEQ_EXPR:
13288 case LTGT_EXPR:
13289 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13290 {
13291 t1 = fold_relational_const (code, type, arg0, arg1);
13292 if (t1 != NULL_TREE)
13293 return t1;
13294 }
13295
13296 /* If the first operand is NaN, the result is constant. */
13297 if (TREE_CODE (arg0) == REAL_CST
13298 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13299 && (code != LTGT_EXPR || ! flag_trapping_math))
13300 {
13301 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13302 ? integer_zero_node
13303 : integer_one_node;
13304 return omit_one_operand_loc (loc, type, t1, arg1);
13305 }
13306
13307 /* If the second operand is NaN, the result is constant. */
13308 if (TREE_CODE (arg1) == REAL_CST
13309 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13310 && (code != LTGT_EXPR || ! flag_trapping_math))
13311 {
13312 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13313 ? integer_zero_node
13314 : integer_one_node;
13315 return omit_one_operand_loc (loc, type, t1, arg0);
13316 }
13317
13318 /* Simplify unordered comparison of something with itself. */
13319 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13320 && operand_equal_p (arg0, arg1, 0))
13321 return constant_boolean_node (1, type);
13322
13323 if (code == LTGT_EXPR
13324 && !flag_trapping_math
13325 && operand_equal_p (arg0, arg1, 0))
13326 return constant_boolean_node (0, type);
13327
13328 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13329 {
13330 tree targ0 = strip_float_extensions (arg0);
13331 tree targ1 = strip_float_extensions (arg1);
13332 tree newtype = TREE_TYPE (targ0);
13333
13334 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13335 newtype = TREE_TYPE (targ1);
13336
13337 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13338 return fold_build2_loc (loc, code, type,
13339 fold_convert_loc (loc, newtype, targ0),
13340 fold_convert_loc (loc, newtype, targ1));
13341 }
13342
13343 return NULL_TREE;
13344
13345 case COMPOUND_EXPR:
13346 /* When pedantic, a compound expression can be neither an lvalue
13347 nor an integer constant expression. */
13348 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13349 return NULL_TREE;
13350 /* Don't let (0, 0) be null pointer constant. */
13351 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13352 : fold_convert_loc (loc, type, arg1);
13353 return pedantic_non_lvalue_loc (loc, tem);
13354
13355 case COMPLEX_EXPR:
13356 if ((TREE_CODE (arg0) == REAL_CST
13357 && TREE_CODE (arg1) == REAL_CST)
13358 || (TREE_CODE (arg0) == INTEGER_CST
13359 && TREE_CODE (arg1) == INTEGER_CST))
13360 return build_complex (type, arg0, arg1);
13361 if (TREE_CODE (arg0) == REALPART_EXPR
13362 && TREE_CODE (arg1) == IMAGPART_EXPR
13363 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13364 && operand_equal_p (TREE_OPERAND (arg0, 0),
13365 TREE_OPERAND (arg1, 0), 0))
13366 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13367 TREE_OPERAND (arg1, 0));
13368 return NULL_TREE;
13369
13370 case ASSERT_EXPR:
13371 /* An ASSERT_EXPR should never be passed to fold_binary. */
13372 gcc_unreachable ();
13373
13374 case VEC_PACK_TRUNC_EXPR:
13375 case VEC_PACK_FIX_TRUNC_EXPR:
13376 {
13377 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13378 tree *elts;
13379
13380 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13381 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13382 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13383 return NULL_TREE;
13384
13385 elts = XALLOCAVEC (tree, nelts);
13386 if (!vec_cst_ctor_to_array (arg0, elts)
13387 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13388 return NULL_TREE;
13389
13390 for (i = 0; i < nelts; i++)
13391 {
13392 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13393 ? NOP_EXPR : FIX_TRUNC_EXPR,
13394 TREE_TYPE (type), elts[i]);
13395 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13396 return NULL_TREE;
13397 }
13398
13399 return build_vector (type, elts);
13400 }
13401
13402 case VEC_WIDEN_MULT_LO_EXPR:
13403 case VEC_WIDEN_MULT_HI_EXPR:
13404 case VEC_WIDEN_MULT_EVEN_EXPR:
13405 case VEC_WIDEN_MULT_ODD_EXPR:
13406 {
13407 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13408 unsigned int out, ofs, scale;
13409 tree *elts;
13410
13411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13412 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13413 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13414 return NULL_TREE;
13415
13416 elts = XALLOCAVEC (tree, nelts * 4);
13417 if (!vec_cst_ctor_to_array (arg0, elts)
13418 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13419 return NULL_TREE;
13420
13421 if (code == VEC_WIDEN_MULT_LO_EXPR)
13422 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13423 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13424 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13425 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13426 scale = 1, ofs = 0;
13427 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13428 scale = 1, ofs = 1;
13429
13430 for (out = 0; out < nelts; out++)
13431 {
13432 unsigned int in1 = (out << scale) + ofs;
13433 unsigned int in2 = in1 + nelts * 2;
13434 tree t1, t2;
13435
13436 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13437 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13438
13439 if (t1 == NULL_TREE || t2 == NULL_TREE)
13440 return NULL_TREE;
13441 elts[out] = const_binop (MULT_EXPR, t1, t2);
13442 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13443 return NULL_TREE;
13444 }
13445
13446 return build_vector (type, elts);
13447 }
13448
13449 default:
13450 return NULL_TREE;
13451 } /* switch (code) */
13452 }
13453
13454 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13455 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13456 of GOTO_EXPR. */
13457
13458 static tree
13459 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13460 {
13461 switch (TREE_CODE (*tp))
13462 {
13463 case LABEL_EXPR:
13464 return *tp;
13465
13466 case GOTO_EXPR:
13467 *walk_subtrees = 0;
13468
13469 /* ... fall through ... */
13470
13471 default:
13472 return NULL_TREE;
13473 }
13474 }
13475
13476 /* Return whether the sub-tree ST contains a label which is accessible from
13477 outside the sub-tree. */
13478
13479 static bool
13480 contains_label_p (tree st)
13481 {
13482 return
13483 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13484 }
13485
13486 /* Fold a ternary expression of code CODE and type TYPE with operands
13487 OP0, OP1, and OP2. Return the folded expression if folding is
13488 successful. Otherwise, return NULL_TREE. */
13489
13490 tree
13491 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13492 tree op0, tree op1, tree op2)
13493 {
13494 tree tem;
13495 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13496 enum tree_code_class kind = TREE_CODE_CLASS (code);
13497
13498 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13499 && TREE_CODE_LENGTH (code) == 3);
13500
13501 /* If this is a commutative operation, and OP0 is a constant, move it
13502 to OP1 to reduce the number of tests below. */
13503 if (commutative_ternary_tree_code (code)
13504 && tree_swap_operands_p (op0, op1, true))
13505 return fold_build3_loc (loc, code, type, op1, op0, op2);
13506
13507 tem = generic_simplify (loc, code, type, op0, op1, op2);
13508 if (tem)
13509 return tem;
13510
13511 /* Strip any conversions that don't change the mode. This is safe
13512 for every expression, except for a comparison expression because
13513 its signedness is derived from its operands. So, in the latter
13514 case, only strip conversions that don't change the signedness.
13515
13516 Note that this is done as an internal manipulation within the
13517 constant folder, in order to find the simplest representation of
13518 the arguments so that their form can be studied. In any cases,
13519 the appropriate type conversions should be put back in the tree
13520 that will get out of the constant folder. */
13521 if (op0)
13522 {
13523 arg0 = op0;
13524 STRIP_NOPS (arg0);
13525 }
13526
13527 if (op1)
13528 {
13529 arg1 = op1;
13530 STRIP_NOPS (arg1);
13531 }
13532
13533 if (op2)
13534 {
13535 arg2 = op2;
13536 STRIP_NOPS (arg2);
13537 }
13538
13539 switch (code)
13540 {
13541 case COMPONENT_REF:
13542 if (TREE_CODE (arg0) == CONSTRUCTOR
13543 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13544 {
13545 unsigned HOST_WIDE_INT idx;
13546 tree field, value;
13547 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13548 if (field == arg1)
13549 return value;
13550 }
13551 return NULL_TREE;
13552
13553 case COND_EXPR:
13554 case VEC_COND_EXPR:
13555 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13556 so all simple results must be passed through pedantic_non_lvalue. */
13557 if (TREE_CODE (arg0) == INTEGER_CST)
13558 {
13559 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13560 tem = integer_zerop (arg0) ? op2 : op1;
13561 /* Only optimize constant conditions when the selected branch
13562 has the same type as the COND_EXPR. This avoids optimizing
13563 away "c ? x : throw", where the throw has a void type.
13564 Avoid throwing away that operand which contains label. */
13565 if ((!TREE_SIDE_EFFECTS (unused_op)
13566 || !contains_label_p (unused_op))
13567 && (! VOID_TYPE_P (TREE_TYPE (tem))
13568 || VOID_TYPE_P (type)))
13569 return pedantic_non_lvalue_loc (loc, tem);
13570 return NULL_TREE;
13571 }
13572 else if (TREE_CODE (arg0) == VECTOR_CST)
13573 {
13574 if (integer_all_onesp (arg0))
13575 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13576 if (integer_zerop (arg0))
13577 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13578
13579 if ((TREE_CODE (arg1) == VECTOR_CST
13580 || TREE_CODE (arg1) == CONSTRUCTOR)
13581 && (TREE_CODE (arg2) == VECTOR_CST
13582 || TREE_CODE (arg2) == CONSTRUCTOR))
13583 {
13584 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13585 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13586 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13587 for (i = 0; i < nelts; i++)
13588 {
13589 tree val = VECTOR_CST_ELT (arg0, i);
13590 if (integer_all_onesp (val))
13591 sel[i] = i;
13592 else if (integer_zerop (val))
13593 sel[i] = nelts + i;
13594 else /* Currently unreachable. */
13595 return NULL_TREE;
13596 }
13597 tree t = fold_vec_perm (type, arg1, arg2, sel);
13598 if (t != NULL_TREE)
13599 return t;
13600 }
13601 }
13602
13603 if (operand_equal_p (arg1, op2, 0))
13604 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13605
13606 /* If we have A op B ? A : C, we may be able to convert this to a
13607 simpler expression, depending on the operation and the values
13608 of B and C. Signed zeros prevent all of these transformations,
13609 for reasons given above each one.
13610
13611 Also try swapping the arguments and inverting the conditional. */
13612 if (COMPARISON_CLASS_P (arg0)
13613 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13614 arg1, TREE_OPERAND (arg0, 1))
13615 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13616 {
13617 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13618 if (tem)
13619 return tem;
13620 }
13621
13622 if (COMPARISON_CLASS_P (arg0)
13623 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13624 op2,
13625 TREE_OPERAND (arg0, 1))
13626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13627 {
13628 location_t loc0 = expr_location_or (arg0, loc);
13629 tem = fold_invert_truthvalue (loc0, arg0);
13630 if (tem && COMPARISON_CLASS_P (tem))
13631 {
13632 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13633 if (tem)
13634 return tem;
13635 }
13636 }
13637
13638 /* If the second operand is simpler than the third, swap them
13639 since that produces better jump optimization results. */
13640 if (truth_value_p (TREE_CODE (arg0))
13641 && tree_swap_operands_p (op1, op2, false))
13642 {
13643 location_t loc0 = expr_location_or (arg0, loc);
13644 /* See if this can be inverted. If it can't, possibly because
13645 it was a floating-point inequality comparison, don't do
13646 anything. */
13647 tem = fold_invert_truthvalue (loc0, arg0);
13648 if (tem)
13649 return fold_build3_loc (loc, code, type, tem, op2, op1);
13650 }
13651
13652 /* Convert A ? 1 : 0 to simply A. */
13653 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13654 : (integer_onep (op1)
13655 && !VECTOR_TYPE_P (type)))
13656 && integer_zerop (op2)
13657 /* If we try to convert OP0 to our type, the
13658 call to fold will try to move the conversion inside
13659 a COND, which will recurse. In that case, the COND_EXPR
13660 is probably the best choice, so leave it alone. */
13661 && type == TREE_TYPE (arg0))
13662 return pedantic_non_lvalue_loc (loc, arg0);
13663
13664 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13665 over COND_EXPR in cases such as floating point comparisons. */
13666 if (integer_zerop (op1)
13667 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13668 : (integer_onep (op2)
13669 && !VECTOR_TYPE_P (type)))
13670 && truth_value_p (TREE_CODE (arg0)))
13671 return pedantic_non_lvalue_loc (loc,
13672 fold_convert_loc (loc, type,
13673 invert_truthvalue_loc (loc,
13674 arg0)));
13675
13676 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13677 if (TREE_CODE (arg0) == LT_EXPR
13678 && integer_zerop (TREE_OPERAND (arg0, 1))
13679 && integer_zerop (op2)
13680 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13681 {
13682 /* sign_bit_p looks through both zero and sign extensions,
13683 but for this optimization only sign extensions are
13684 usable. */
13685 tree tem2 = TREE_OPERAND (arg0, 0);
13686 while (tem != tem2)
13687 {
13688 if (TREE_CODE (tem2) != NOP_EXPR
13689 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13690 {
13691 tem = NULL_TREE;
13692 break;
13693 }
13694 tem2 = TREE_OPERAND (tem2, 0);
13695 }
13696 /* sign_bit_p only checks ARG1 bits within A's precision.
13697 If <sign bit of A> has wider type than A, bits outside
13698 of A's precision in <sign bit of A> need to be checked.
13699 If they are all 0, this optimization needs to be done
13700 in unsigned A's type, if they are all 1 in signed A's type,
13701 otherwise this can't be done. */
13702 if (tem
13703 && TYPE_PRECISION (TREE_TYPE (tem))
13704 < TYPE_PRECISION (TREE_TYPE (arg1))
13705 && TYPE_PRECISION (TREE_TYPE (tem))
13706 < TYPE_PRECISION (type))
13707 {
13708 int inner_width, outer_width;
13709 tree tem_type;
13710
13711 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13712 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13713 if (outer_width > TYPE_PRECISION (type))
13714 outer_width = TYPE_PRECISION (type);
13715
13716 wide_int mask = wi::shifted_mask
13717 (inner_width, outer_width - inner_width, false,
13718 TYPE_PRECISION (TREE_TYPE (arg1)));
13719
13720 wide_int common = mask & arg1;
13721 if (common == mask)
13722 {
13723 tem_type = signed_type_for (TREE_TYPE (tem));
13724 tem = fold_convert_loc (loc, tem_type, tem);
13725 }
13726 else if (common == 0)
13727 {
13728 tem_type = unsigned_type_for (TREE_TYPE (tem));
13729 tem = fold_convert_loc (loc, tem_type, tem);
13730 }
13731 else
13732 tem = NULL;
13733 }
13734
13735 if (tem)
13736 return
13737 fold_convert_loc (loc, type,
13738 fold_build2_loc (loc, BIT_AND_EXPR,
13739 TREE_TYPE (tem), tem,
13740 fold_convert_loc (loc,
13741 TREE_TYPE (tem),
13742 arg1)));
13743 }
13744
13745 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13746 already handled above. */
13747 if (TREE_CODE (arg0) == BIT_AND_EXPR
13748 && integer_onep (TREE_OPERAND (arg0, 1))
13749 && integer_zerop (op2)
13750 && integer_pow2p (arg1))
13751 {
13752 tree tem = TREE_OPERAND (arg0, 0);
13753 STRIP_NOPS (tem);
13754 if (TREE_CODE (tem) == RSHIFT_EXPR
13755 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13756 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13757 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13758 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13759 TREE_OPERAND (tem, 0), arg1);
13760 }
13761
13762 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13763 is probably obsolete because the first operand should be a
13764 truth value (that's why we have the two cases above), but let's
13765 leave it in until we can confirm this for all front-ends. */
13766 if (integer_zerop (op2)
13767 && TREE_CODE (arg0) == NE_EXPR
13768 && integer_zerop (TREE_OPERAND (arg0, 1))
13769 && integer_pow2p (arg1)
13770 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13771 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13772 arg1, OEP_ONLY_CONST))
13773 return pedantic_non_lvalue_loc (loc,
13774 fold_convert_loc (loc, type,
13775 TREE_OPERAND (arg0, 0)));
13776
13777 /* Disable the transformations below for vectors, since
13778 fold_binary_op_with_conditional_arg may undo them immediately,
13779 yielding an infinite loop. */
13780 if (code == VEC_COND_EXPR)
13781 return NULL_TREE;
13782
13783 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13784 if (integer_zerop (op2)
13785 && truth_value_p (TREE_CODE (arg0))
13786 && truth_value_p (TREE_CODE (arg1))
13787 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13788 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13789 : TRUTH_ANDIF_EXPR,
13790 type, fold_convert_loc (loc, type, arg0), arg1);
13791
13792 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13793 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13794 && truth_value_p (TREE_CODE (arg0))
13795 && truth_value_p (TREE_CODE (arg1))
13796 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13797 {
13798 location_t loc0 = expr_location_or (arg0, loc);
13799 /* Only perform transformation if ARG0 is easily inverted. */
13800 tem = fold_invert_truthvalue (loc0, arg0);
13801 if (tem)
13802 return fold_build2_loc (loc, code == VEC_COND_EXPR
13803 ? BIT_IOR_EXPR
13804 : TRUTH_ORIF_EXPR,
13805 type, fold_convert_loc (loc, type, tem),
13806 arg1);
13807 }
13808
13809 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13810 if (integer_zerop (arg1)
13811 && truth_value_p (TREE_CODE (arg0))
13812 && truth_value_p (TREE_CODE (op2))
13813 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13814 {
13815 location_t loc0 = expr_location_or (arg0, loc);
13816 /* Only perform transformation if ARG0 is easily inverted. */
13817 tem = fold_invert_truthvalue (loc0, arg0);
13818 if (tem)
13819 return fold_build2_loc (loc, code == VEC_COND_EXPR
13820 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13821 type, fold_convert_loc (loc, type, tem),
13822 op2);
13823 }
13824
13825 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13826 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13827 && truth_value_p (TREE_CODE (arg0))
13828 && truth_value_p (TREE_CODE (op2))
13829 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13830 return fold_build2_loc (loc, code == VEC_COND_EXPR
13831 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13832 type, fold_convert_loc (loc, type, arg0), op2);
13833
13834 return NULL_TREE;
13835
13836 case CALL_EXPR:
13837 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13838 of fold_ternary on them. */
13839 gcc_unreachable ();
13840
13841 case BIT_FIELD_REF:
13842 if ((TREE_CODE (arg0) == VECTOR_CST
13843 || (TREE_CODE (arg0) == CONSTRUCTOR
13844 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13845 && (type == TREE_TYPE (TREE_TYPE (arg0))
13846 || (TREE_CODE (type) == VECTOR_TYPE
13847 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13848 {
13849 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13850 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13851 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13852 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13853
13854 if (n != 0
13855 && (idx % width) == 0
13856 && (n % width) == 0
13857 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13858 {
13859 idx = idx / width;
13860 n = n / width;
13861
13862 if (TREE_CODE (arg0) == VECTOR_CST)
13863 {
13864 if (n == 1)
13865 return VECTOR_CST_ELT (arg0, idx);
13866
13867 tree *vals = XALLOCAVEC (tree, n);
13868 for (unsigned i = 0; i < n; ++i)
13869 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13870 return build_vector (type, vals);
13871 }
13872
13873 /* Constructor elements can be subvectors. */
13874 unsigned HOST_WIDE_INT k = 1;
13875 if (CONSTRUCTOR_NELTS (arg0) != 0)
13876 {
13877 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13878 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13879 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13880 }
13881
13882 /* We keep an exact subset of the constructor elements. */
13883 if ((idx % k) == 0 && (n % k) == 0)
13884 {
13885 if (CONSTRUCTOR_NELTS (arg0) == 0)
13886 return build_constructor (type, NULL);
13887 idx /= k;
13888 n /= k;
13889 if (n == 1)
13890 {
13891 if (idx < CONSTRUCTOR_NELTS (arg0))
13892 return CONSTRUCTOR_ELT (arg0, idx)->value;
13893 return build_zero_cst (type);
13894 }
13895
13896 vec<constructor_elt, va_gc> *vals;
13897 vec_alloc (vals, n);
13898 for (unsigned i = 0;
13899 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13900 ++i)
13901 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13902 CONSTRUCTOR_ELT
13903 (arg0, idx + i)->value);
13904 return build_constructor (type, vals);
13905 }
13906 /* The bitfield references a single constructor element. */
13907 else if (idx + n <= (idx / k + 1) * k)
13908 {
13909 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13910 return build_zero_cst (type);
13911 else if (n == k)
13912 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13913 else
13914 return fold_build3_loc (loc, code, type,
13915 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13916 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13917 }
13918 }
13919 }
13920
13921 /* A bit-field-ref that referenced the full argument can be stripped. */
13922 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13923 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13924 && integer_zerop (op2))
13925 return fold_convert_loc (loc, type, arg0);
13926
13927 /* On constants we can use native encode/interpret to constant
13928 fold (nearly) all BIT_FIELD_REFs. */
13929 if (CONSTANT_CLASS_P (arg0)
13930 && can_native_interpret_type_p (type)
13931 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13932 /* This limitation should not be necessary, we just need to
13933 round this up to mode size. */
13934 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13935 /* Need bit-shifting of the buffer to relax the following. */
13936 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13937 {
13938 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13939 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13940 unsigned HOST_WIDE_INT clen;
13941 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13942 /* ??? We cannot tell native_encode_expr to start at
13943 some random byte only. So limit us to a reasonable amount
13944 of work. */
13945 if (clen <= 4096)
13946 {
13947 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13948 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13949 if (len > 0
13950 && len * BITS_PER_UNIT >= bitpos + bitsize)
13951 {
13952 tree v = native_interpret_expr (type,
13953 b + bitpos / BITS_PER_UNIT,
13954 bitsize / BITS_PER_UNIT);
13955 if (v)
13956 return v;
13957 }
13958 }
13959 }
13960
13961 return NULL_TREE;
13962
13963 case FMA_EXPR:
13964 /* For integers we can decompose the FMA if possible. */
13965 if (TREE_CODE (arg0) == INTEGER_CST
13966 && TREE_CODE (arg1) == INTEGER_CST)
13967 return fold_build2_loc (loc, PLUS_EXPR, type,
13968 const_binop (MULT_EXPR, arg0, arg1), arg2);
13969 if (integer_zerop (arg2))
13970 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13971
13972 return fold_fma (loc, type, arg0, arg1, arg2);
13973
13974 case VEC_PERM_EXPR:
13975 if (TREE_CODE (arg2) == VECTOR_CST)
13976 {
13977 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13978 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13979 unsigned char *sel2 = sel + nelts;
13980 bool need_mask_canon = false;
13981 bool need_mask_canon2 = false;
13982 bool all_in_vec0 = true;
13983 bool all_in_vec1 = true;
13984 bool maybe_identity = true;
13985 bool single_arg = (op0 == op1);
13986 bool changed = false;
13987
13988 mask2 = 2 * nelts - 1;
13989 mask = single_arg ? (nelts - 1) : mask2;
13990 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13991 for (i = 0; i < nelts; i++)
13992 {
13993 tree val = VECTOR_CST_ELT (arg2, i);
13994 if (TREE_CODE (val) != INTEGER_CST)
13995 return NULL_TREE;
13996
13997 /* Make sure that the perm value is in an acceptable
13998 range. */
13999 wide_int t = val;
14000 need_mask_canon |= wi::gtu_p (t, mask);
14001 need_mask_canon2 |= wi::gtu_p (t, mask2);
14002 sel[i] = t.to_uhwi () & mask;
14003 sel2[i] = t.to_uhwi () & mask2;
14004
14005 if (sel[i] < nelts)
14006 all_in_vec1 = false;
14007 else
14008 all_in_vec0 = false;
14009
14010 if ((sel[i] & (nelts-1)) != i)
14011 maybe_identity = false;
14012 }
14013
14014 if (maybe_identity)
14015 {
14016 if (all_in_vec0)
14017 return op0;
14018 if (all_in_vec1)
14019 return op1;
14020 }
14021
14022 if (all_in_vec0)
14023 op1 = op0;
14024 else if (all_in_vec1)
14025 {
14026 op0 = op1;
14027 for (i = 0; i < nelts; i++)
14028 sel[i] -= nelts;
14029 need_mask_canon = true;
14030 }
14031
14032 if ((TREE_CODE (op0) == VECTOR_CST
14033 || TREE_CODE (op0) == CONSTRUCTOR)
14034 && (TREE_CODE (op1) == VECTOR_CST
14035 || TREE_CODE (op1) == CONSTRUCTOR))
14036 {
14037 tree t = fold_vec_perm (type, op0, op1, sel);
14038 if (t != NULL_TREE)
14039 return t;
14040 }
14041
14042 if (op0 == op1 && !single_arg)
14043 changed = true;
14044
14045 /* Some targets are deficient and fail to expand a single
14046 argument permutation while still allowing an equivalent
14047 2-argument version. */
14048 if (need_mask_canon && arg2 == op2
14049 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
14050 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
14051 {
14052 need_mask_canon = need_mask_canon2;
14053 sel = sel2;
14054 }
14055
14056 if (need_mask_canon && arg2 == op2)
14057 {
14058 tree *tsel = XALLOCAVEC (tree, nelts);
14059 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14060 for (i = 0; i < nelts; i++)
14061 tsel[i] = build_int_cst (eltype, sel[i]);
14062 op2 = build_vector (TREE_TYPE (arg2), tsel);
14063 changed = true;
14064 }
14065
14066 if (changed)
14067 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14068 }
14069 return NULL_TREE;
14070
14071 default:
14072 return NULL_TREE;
14073 } /* switch (code) */
14074 }
14075
14076 /* Perform constant folding and related simplification of EXPR.
14077 The related simplifications include x*1 => x, x*0 => 0, etc.,
14078 and application of the associative law.
14079 NOP_EXPR conversions may be removed freely (as long as we
14080 are careful not to change the type of the overall expression).
14081 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14082 but we can constant-fold them if they have constant operands. */
14083
14084 #ifdef ENABLE_FOLD_CHECKING
14085 # define fold(x) fold_1 (x)
14086 static tree fold_1 (tree);
14087 static
14088 #endif
14089 tree
14090 fold (tree expr)
14091 {
14092 const tree t = expr;
14093 enum tree_code code = TREE_CODE (t);
14094 enum tree_code_class kind = TREE_CODE_CLASS (code);
14095 tree tem;
14096 location_t loc = EXPR_LOCATION (expr);
14097
14098 /* Return right away if a constant. */
14099 if (kind == tcc_constant)
14100 return t;
14101
14102 /* CALL_EXPR-like objects with variable numbers of operands are
14103 treated specially. */
14104 if (kind == tcc_vl_exp)
14105 {
14106 if (code == CALL_EXPR)
14107 {
14108 tem = fold_call_expr (loc, expr, false);
14109 return tem ? tem : expr;
14110 }
14111 return expr;
14112 }
14113
14114 if (IS_EXPR_CODE_CLASS (kind))
14115 {
14116 tree type = TREE_TYPE (t);
14117 tree op0, op1, op2;
14118
14119 switch (TREE_CODE_LENGTH (code))
14120 {
14121 case 1:
14122 op0 = TREE_OPERAND (t, 0);
14123 tem = fold_unary_loc (loc, code, type, op0);
14124 return tem ? tem : expr;
14125 case 2:
14126 op0 = TREE_OPERAND (t, 0);
14127 op1 = TREE_OPERAND (t, 1);
14128 tem = fold_binary_loc (loc, code, type, op0, op1);
14129 return tem ? tem : expr;
14130 case 3:
14131 op0 = TREE_OPERAND (t, 0);
14132 op1 = TREE_OPERAND (t, 1);
14133 op2 = TREE_OPERAND (t, 2);
14134 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14135 return tem ? tem : expr;
14136 default:
14137 break;
14138 }
14139 }
14140
14141 switch (code)
14142 {
14143 case ARRAY_REF:
14144 {
14145 tree op0 = TREE_OPERAND (t, 0);
14146 tree op1 = TREE_OPERAND (t, 1);
14147
14148 if (TREE_CODE (op1) == INTEGER_CST
14149 && TREE_CODE (op0) == CONSTRUCTOR
14150 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14151 {
14152 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14153 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14154 unsigned HOST_WIDE_INT begin = 0;
14155
14156 /* Find a matching index by means of a binary search. */
14157 while (begin != end)
14158 {
14159 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14160 tree index = (*elts)[middle].index;
14161
14162 if (TREE_CODE (index) == INTEGER_CST
14163 && tree_int_cst_lt (index, op1))
14164 begin = middle + 1;
14165 else if (TREE_CODE (index) == INTEGER_CST
14166 && tree_int_cst_lt (op1, index))
14167 end = middle;
14168 else if (TREE_CODE (index) == RANGE_EXPR
14169 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14170 begin = middle + 1;
14171 else if (TREE_CODE (index) == RANGE_EXPR
14172 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14173 end = middle;
14174 else
14175 return (*elts)[middle].value;
14176 }
14177 }
14178
14179 return t;
14180 }
14181
14182 /* Return a VECTOR_CST if possible. */
14183 case CONSTRUCTOR:
14184 {
14185 tree type = TREE_TYPE (t);
14186 if (TREE_CODE (type) != VECTOR_TYPE)
14187 return t;
14188
14189 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14190 unsigned HOST_WIDE_INT idx, pos = 0;
14191 tree value;
14192
14193 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14194 {
14195 if (!CONSTANT_CLASS_P (value))
14196 return t;
14197 if (TREE_CODE (value) == VECTOR_CST)
14198 {
14199 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14200 vec[pos++] = VECTOR_CST_ELT (value, i);
14201 }
14202 else
14203 vec[pos++] = value;
14204 }
14205 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14206 vec[pos] = build_zero_cst (TREE_TYPE (type));
14207
14208 return build_vector (type, vec);
14209 }
14210
14211 case CONST_DECL:
14212 return fold (DECL_INITIAL (t));
14213
14214 default:
14215 return t;
14216 } /* switch (code) */
14217 }
14218
14219 #ifdef ENABLE_FOLD_CHECKING
14220 #undef fold
14221
14222 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14223 hash_table<pointer_hash<const tree_node> > *);
14224 static void fold_check_failed (const_tree, const_tree);
14225 void print_fold_checksum (const_tree);
14226
14227 /* When --enable-checking=fold, compute a digest of expr before
14228 and after actual fold call to see if fold did not accidentally
14229 change original expr. */
14230
14231 tree
14232 fold (tree expr)
14233 {
14234 tree ret;
14235 struct md5_ctx ctx;
14236 unsigned char checksum_before[16], checksum_after[16];
14237 hash_table<pointer_hash<const tree_node> > ht (32);
14238
14239 md5_init_ctx (&ctx);
14240 fold_checksum_tree (expr, &ctx, &ht);
14241 md5_finish_ctx (&ctx, checksum_before);
14242 ht.empty ();
14243
14244 ret = fold_1 (expr);
14245
14246 md5_init_ctx (&ctx);
14247 fold_checksum_tree (expr, &ctx, &ht);
14248 md5_finish_ctx (&ctx, checksum_after);
14249
14250 if (memcmp (checksum_before, checksum_after, 16))
14251 fold_check_failed (expr, ret);
14252
14253 return ret;
14254 }
14255
14256 void
14257 print_fold_checksum (const_tree expr)
14258 {
14259 struct md5_ctx ctx;
14260 unsigned char checksum[16], cnt;
14261 hash_table<pointer_hash<const tree_node> > ht (32);
14262
14263 md5_init_ctx (&ctx);
14264 fold_checksum_tree (expr, &ctx, &ht);
14265 md5_finish_ctx (&ctx, checksum);
14266 for (cnt = 0; cnt < 16; ++cnt)
14267 fprintf (stderr, "%02x", checksum[cnt]);
14268 putc ('\n', stderr);
14269 }
14270
14271 static void
14272 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14273 {
14274 internal_error ("fold check: original tree changed by fold");
14275 }
14276
14277 static void
14278 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14279 hash_table<pointer_hash <const tree_node> > *ht)
14280 {
14281 const tree_node **slot;
14282 enum tree_code code;
14283 union tree_node buf;
14284 int i, len;
14285
14286 recursive_label:
14287 if (expr == NULL)
14288 return;
14289 slot = ht->find_slot (expr, INSERT);
14290 if (*slot != NULL)
14291 return;
14292 *slot = expr;
14293 code = TREE_CODE (expr);
14294 if (TREE_CODE_CLASS (code) == tcc_declaration
14295 && DECL_ASSEMBLER_NAME_SET_P (expr))
14296 {
14297 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14298 memcpy ((char *) &buf, expr, tree_size (expr));
14299 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14300 expr = (tree) &buf;
14301 }
14302 else if (TREE_CODE_CLASS (code) == tcc_type
14303 && (TYPE_POINTER_TO (expr)
14304 || TYPE_REFERENCE_TO (expr)
14305 || TYPE_CACHED_VALUES_P (expr)
14306 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14307 || TYPE_NEXT_VARIANT (expr)))
14308 {
14309 /* Allow these fields to be modified. */
14310 tree tmp;
14311 memcpy ((char *) &buf, expr, tree_size (expr));
14312 expr = tmp = (tree) &buf;
14313 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14314 TYPE_POINTER_TO (tmp) = NULL;
14315 TYPE_REFERENCE_TO (tmp) = NULL;
14316 TYPE_NEXT_VARIANT (tmp) = NULL;
14317 if (TYPE_CACHED_VALUES_P (tmp))
14318 {
14319 TYPE_CACHED_VALUES_P (tmp) = 0;
14320 TYPE_CACHED_VALUES (tmp) = NULL;
14321 }
14322 }
14323 md5_process_bytes (expr, tree_size (expr), ctx);
14324 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14325 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14326 if (TREE_CODE_CLASS (code) != tcc_type
14327 && TREE_CODE_CLASS (code) != tcc_declaration
14328 && code != TREE_LIST
14329 && code != SSA_NAME
14330 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14331 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14332 switch (TREE_CODE_CLASS (code))
14333 {
14334 case tcc_constant:
14335 switch (code)
14336 {
14337 case STRING_CST:
14338 md5_process_bytes (TREE_STRING_POINTER (expr),
14339 TREE_STRING_LENGTH (expr), ctx);
14340 break;
14341 case COMPLEX_CST:
14342 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14343 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14344 break;
14345 case VECTOR_CST:
14346 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14347 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14348 break;
14349 default:
14350 break;
14351 }
14352 break;
14353 case tcc_exceptional:
14354 switch (code)
14355 {
14356 case TREE_LIST:
14357 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14358 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14359 expr = TREE_CHAIN (expr);
14360 goto recursive_label;
14361 break;
14362 case TREE_VEC:
14363 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14364 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14365 break;
14366 default:
14367 break;
14368 }
14369 break;
14370 case tcc_expression:
14371 case tcc_reference:
14372 case tcc_comparison:
14373 case tcc_unary:
14374 case tcc_binary:
14375 case tcc_statement:
14376 case tcc_vl_exp:
14377 len = TREE_OPERAND_LENGTH (expr);
14378 for (i = 0; i < len; ++i)
14379 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14380 break;
14381 case tcc_declaration:
14382 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14383 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14384 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14385 {
14386 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14387 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14388 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14389 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14390 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14391 }
14392
14393 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14394 {
14395 if (TREE_CODE (expr) == FUNCTION_DECL)
14396 {
14397 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14398 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14399 }
14400 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14401 }
14402 break;
14403 case tcc_type:
14404 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14405 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14406 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14407 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14408 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14409 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14410 if (INTEGRAL_TYPE_P (expr)
14411 || SCALAR_FLOAT_TYPE_P (expr))
14412 {
14413 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14414 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14415 }
14416 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14417 if (TREE_CODE (expr) == RECORD_TYPE
14418 || TREE_CODE (expr) == UNION_TYPE
14419 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14420 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14421 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14422 break;
14423 default:
14424 break;
14425 }
14426 }
14427
14428 /* Helper function for outputting the checksum of a tree T. When
14429 debugging with gdb, you can "define mynext" to be "next" followed
14430 by "call debug_fold_checksum (op0)", then just trace down till the
14431 outputs differ. */
14432
14433 DEBUG_FUNCTION void
14434 debug_fold_checksum (const_tree t)
14435 {
14436 int i;
14437 unsigned char checksum[16];
14438 struct md5_ctx ctx;
14439 hash_table<pointer_hash<const tree_node> > ht (32);
14440
14441 md5_init_ctx (&ctx);
14442 fold_checksum_tree (t, &ctx, &ht);
14443 md5_finish_ctx (&ctx, checksum);
14444 ht.empty ();
14445
14446 for (i = 0; i < 16; i++)
14447 fprintf (stderr, "%d ", checksum[i]);
14448
14449 fprintf (stderr, "\n");
14450 }
14451
14452 #endif
14453
14454 /* Fold a unary tree expression with code CODE of type TYPE with an
14455 operand OP0. LOC is the location of the resulting expression.
14456 Return a folded expression if successful. Otherwise, return a tree
14457 expression with code CODE of type TYPE with an operand OP0. */
14458
14459 tree
14460 fold_build1_stat_loc (location_t loc,
14461 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14462 {
14463 tree tem;
14464 #ifdef ENABLE_FOLD_CHECKING
14465 unsigned char checksum_before[16], checksum_after[16];
14466 struct md5_ctx ctx;
14467 hash_table<pointer_hash<const tree_node> > ht (32);
14468
14469 md5_init_ctx (&ctx);
14470 fold_checksum_tree (op0, &ctx, &ht);
14471 md5_finish_ctx (&ctx, checksum_before);
14472 ht.empty ();
14473 #endif
14474
14475 tem = fold_unary_loc (loc, code, type, op0);
14476 if (!tem)
14477 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14478
14479 #ifdef ENABLE_FOLD_CHECKING
14480 md5_init_ctx (&ctx);
14481 fold_checksum_tree (op0, &ctx, &ht);
14482 md5_finish_ctx (&ctx, checksum_after);
14483
14484 if (memcmp (checksum_before, checksum_after, 16))
14485 fold_check_failed (op0, tem);
14486 #endif
14487 return tem;
14488 }
14489
14490 /* Fold a binary tree expression with code CODE of type TYPE with
14491 operands OP0 and OP1. LOC is the location of the resulting
14492 expression. Return a folded expression if successful. Otherwise,
14493 return a tree expression with code CODE of type TYPE with operands
14494 OP0 and OP1. */
14495
14496 tree
14497 fold_build2_stat_loc (location_t loc,
14498 enum tree_code code, tree type, tree op0, tree op1
14499 MEM_STAT_DECL)
14500 {
14501 tree tem;
14502 #ifdef ENABLE_FOLD_CHECKING
14503 unsigned char checksum_before_op0[16],
14504 checksum_before_op1[16],
14505 checksum_after_op0[16],
14506 checksum_after_op1[16];
14507 struct md5_ctx ctx;
14508 hash_table<pointer_hash<const tree_node> > ht (32);
14509
14510 md5_init_ctx (&ctx);
14511 fold_checksum_tree (op0, &ctx, &ht);
14512 md5_finish_ctx (&ctx, checksum_before_op0);
14513 ht.empty ();
14514
14515 md5_init_ctx (&ctx);
14516 fold_checksum_tree (op1, &ctx, &ht);
14517 md5_finish_ctx (&ctx, checksum_before_op1);
14518 ht.empty ();
14519 #endif
14520
14521 tem = fold_binary_loc (loc, code, type, op0, op1);
14522 if (!tem)
14523 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14524
14525 #ifdef ENABLE_FOLD_CHECKING
14526 md5_init_ctx (&ctx);
14527 fold_checksum_tree (op0, &ctx, &ht);
14528 md5_finish_ctx (&ctx, checksum_after_op0);
14529 ht.empty ();
14530
14531 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14532 fold_check_failed (op0, tem);
14533
14534 md5_init_ctx (&ctx);
14535 fold_checksum_tree (op1, &ctx, &ht);
14536 md5_finish_ctx (&ctx, checksum_after_op1);
14537
14538 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14539 fold_check_failed (op1, tem);
14540 #endif
14541 return tem;
14542 }
14543
14544 /* Fold a ternary tree expression with code CODE of type TYPE with
14545 operands OP0, OP1, and OP2. Return a folded expression if
14546 successful. Otherwise, return a tree expression with code CODE of
14547 type TYPE with operands OP0, OP1, and OP2. */
14548
14549 tree
14550 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14551 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14552 {
14553 tree tem;
14554 #ifdef ENABLE_FOLD_CHECKING
14555 unsigned char checksum_before_op0[16],
14556 checksum_before_op1[16],
14557 checksum_before_op2[16],
14558 checksum_after_op0[16],
14559 checksum_after_op1[16],
14560 checksum_after_op2[16];
14561 struct md5_ctx ctx;
14562 hash_table<pointer_hash<const tree_node> > ht (32);
14563
14564 md5_init_ctx (&ctx);
14565 fold_checksum_tree (op0, &ctx, &ht);
14566 md5_finish_ctx (&ctx, checksum_before_op0);
14567 ht.empty ();
14568
14569 md5_init_ctx (&ctx);
14570 fold_checksum_tree (op1, &ctx, &ht);
14571 md5_finish_ctx (&ctx, checksum_before_op1);
14572 ht.empty ();
14573
14574 md5_init_ctx (&ctx);
14575 fold_checksum_tree (op2, &ctx, &ht);
14576 md5_finish_ctx (&ctx, checksum_before_op2);
14577 ht.empty ();
14578 #endif
14579
14580 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14581 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14582 if (!tem)
14583 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14584
14585 #ifdef ENABLE_FOLD_CHECKING
14586 md5_init_ctx (&ctx);
14587 fold_checksum_tree (op0, &ctx, &ht);
14588 md5_finish_ctx (&ctx, checksum_after_op0);
14589 ht.empty ();
14590
14591 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14592 fold_check_failed (op0, tem);
14593
14594 md5_init_ctx (&ctx);
14595 fold_checksum_tree (op1, &ctx, &ht);
14596 md5_finish_ctx (&ctx, checksum_after_op1);
14597 ht.empty ();
14598
14599 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14600 fold_check_failed (op1, tem);
14601
14602 md5_init_ctx (&ctx);
14603 fold_checksum_tree (op2, &ctx, &ht);
14604 md5_finish_ctx (&ctx, checksum_after_op2);
14605
14606 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14607 fold_check_failed (op2, tem);
14608 #endif
14609 return tem;
14610 }
14611
14612 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14613 arguments in ARGARRAY, and a null static chain.
14614 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14615 of type TYPE from the given operands as constructed by build_call_array. */
14616
14617 tree
14618 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14619 int nargs, tree *argarray)
14620 {
14621 tree tem;
14622 #ifdef ENABLE_FOLD_CHECKING
14623 unsigned char checksum_before_fn[16],
14624 checksum_before_arglist[16],
14625 checksum_after_fn[16],
14626 checksum_after_arglist[16];
14627 struct md5_ctx ctx;
14628 hash_table<pointer_hash<const tree_node> > ht (32);
14629 int i;
14630
14631 md5_init_ctx (&ctx);
14632 fold_checksum_tree (fn, &ctx, &ht);
14633 md5_finish_ctx (&ctx, checksum_before_fn);
14634 ht.empty ();
14635
14636 md5_init_ctx (&ctx);
14637 for (i = 0; i < nargs; i++)
14638 fold_checksum_tree (argarray[i], &ctx, &ht);
14639 md5_finish_ctx (&ctx, checksum_before_arglist);
14640 ht.empty ();
14641 #endif
14642
14643 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14644
14645 #ifdef ENABLE_FOLD_CHECKING
14646 md5_init_ctx (&ctx);
14647 fold_checksum_tree (fn, &ctx, &ht);
14648 md5_finish_ctx (&ctx, checksum_after_fn);
14649 ht.empty ();
14650
14651 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14652 fold_check_failed (fn, tem);
14653
14654 md5_init_ctx (&ctx);
14655 for (i = 0; i < nargs; i++)
14656 fold_checksum_tree (argarray[i], &ctx, &ht);
14657 md5_finish_ctx (&ctx, checksum_after_arglist);
14658
14659 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14660 fold_check_failed (NULL_TREE, tem);
14661 #endif
14662 return tem;
14663 }
14664
14665 /* Perform constant folding and related simplification of initializer
14666 expression EXPR. These behave identically to "fold_buildN" but ignore
14667 potential run-time traps and exceptions that fold must preserve. */
14668
14669 #define START_FOLD_INIT \
14670 int saved_signaling_nans = flag_signaling_nans;\
14671 int saved_trapping_math = flag_trapping_math;\
14672 int saved_rounding_math = flag_rounding_math;\
14673 int saved_trapv = flag_trapv;\
14674 int saved_folding_initializer = folding_initializer;\
14675 flag_signaling_nans = 0;\
14676 flag_trapping_math = 0;\
14677 flag_rounding_math = 0;\
14678 flag_trapv = 0;\
14679 folding_initializer = 1;
14680
14681 #define END_FOLD_INIT \
14682 flag_signaling_nans = saved_signaling_nans;\
14683 flag_trapping_math = saved_trapping_math;\
14684 flag_rounding_math = saved_rounding_math;\
14685 flag_trapv = saved_trapv;\
14686 folding_initializer = saved_folding_initializer;
14687
14688 tree
14689 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14690 tree type, tree op)
14691 {
14692 tree result;
14693 START_FOLD_INIT;
14694
14695 result = fold_build1_loc (loc, code, type, op);
14696
14697 END_FOLD_INIT;
14698 return result;
14699 }
14700
14701 tree
14702 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14703 tree type, tree op0, tree op1)
14704 {
14705 tree result;
14706 START_FOLD_INIT;
14707
14708 result = fold_build2_loc (loc, code, type, op0, op1);
14709
14710 END_FOLD_INIT;
14711 return result;
14712 }
14713
14714 tree
14715 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14716 int nargs, tree *argarray)
14717 {
14718 tree result;
14719 START_FOLD_INIT;
14720
14721 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14722
14723 END_FOLD_INIT;
14724 return result;
14725 }
14726
14727 #undef START_FOLD_INIT
14728 #undef END_FOLD_INIT
14729
14730 /* Determine if first argument is a multiple of second argument. Return 0 if
14731 it is not, or we cannot easily determined it to be.
14732
14733 An example of the sort of thing we care about (at this point; this routine
14734 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14735 fold cases do now) is discovering that
14736
14737 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14738
14739 is a multiple of
14740
14741 SAVE_EXPR (J * 8)
14742
14743 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14744
14745 This code also handles discovering that
14746
14747 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14748
14749 is a multiple of 8 so we don't have to worry about dealing with a
14750 possible remainder.
14751
14752 Note that we *look* inside a SAVE_EXPR only to determine how it was
14753 calculated; it is not safe for fold to do much of anything else with the
14754 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14755 at run time. For example, the latter example above *cannot* be implemented
14756 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14757 evaluation time of the original SAVE_EXPR is not necessarily the same at
14758 the time the new expression is evaluated. The only optimization of this
14759 sort that would be valid is changing
14760
14761 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14762
14763 divided by 8 to
14764
14765 SAVE_EXPR (I) * SAVE_EXPR (J)
14766
14767 (where the same SAVE_EXPR (J) is used in the original and the
14768 transformed version). */
14769
14770 int
14771 multiple_of_p (tree type, const_tree top, const_tree bottom)
14772 {
14773 if (operand_equal_p (top, bottom, 0))
14774 return 1;
14775
14776 if (TREE_CODE (type) != INTEGER_TYPE)
14777 return 0;
14778
14779 switch (TREE_CODE (top))
14780 {
14781 case BIT_AND_EXPR:
14782 /* Bitwise and provides a power of two multiple. If the mask is
14783 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14784 if (!integer_pow2p (bottom))
14785 return 0;
14786 /* FALLTHRU */
14787
14788 case MULT_EXPR:
14789 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14790 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14791
14792 case PLUS_EXPR:
14793 case MINUS_EXPR:
14794 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14795 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14796
14797 case LSHIFT_EXPR:
14798 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14799 {
14800 tree op1, t1;
14801
14802 op1 = TREE_OPERAND (top, 1);
14803 /* const_binop may not detect overflow correctly,
14804 so check for it explicitly here. */
14805 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14806 && 0 != (t1 = fold_convert (type,
14807 const_binop (LSHIFT_EXPR,
14808 size_one_node,
14809 op1)))
14810 && !TREE_OVERFLOW (t1))
14811 return multiple_of_p (type, t1, bottom);
14812 }
14813 return 0;
14814
14815 case NOP_EXPR:
14816 /* Can't handle conversions from non-integral or wider integral type. */
14817 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14818 || (TYPE_PRECISION (type)
14819 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14820 return 0;
14821
14822 /* .. fall through ... */
14823
14824 case SAVE_EXPR:
14825 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14826
14827 case COND_EXPR:
14828 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14829 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14830
14831 case INTEGER_CST:
14832 if (TREE_CODE (bottom) != INTEGER_CST
14833 || integer_zerop (bottom)
14834 || (TYPE_UNSIGNED (type)
14835 && (tree_int_cst_sgn (top) < 0
14836 || tree_int_cst_sgn (bottom) < 0)))
14837 return 0;
14838 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14839 SIGNED);
14840
14841 default:
14842 return 0;
14843 }
14844 }
14845
14846 /* Return true if CODE or TYPE is known to be non-negative. */
14847
14848 static bool
14849 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14850 {
14851 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14852 && truth_value_p (code))
14853 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14854 have a signed:1 type (where the value is -1 and 0). */
14855 return true;
14856 return false;
14857 }
14858
14859 /* Return true if (CODE OP0) is known to be non-negative. If the return
14860 value is based on the assumption that signed overflow is undefined,
14861 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14862 *STRICT_OVERFLOW_P. */
14863
14864 bool
14865 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14866 bool *strict_overflow_p)
14867 {
14868 if (TYPE_UNSIGNED (type))
14869 return true;
14870
14871 switch (code)
14872 {
14873 case ABS_EXPR:
14874 /* We can't return 1 if flag_wrapv is set because
14875 ABS_EXPR<INT_MIN> = INT_MIN. */
14876 if (!INTEGRAL_TYPE_P (type))
14877 return true;
14878 if (TYPE_OVERFLOW_UNDEFINED (type))
14879 {
14880 *strict_overflow_p = true;
14881 return true;
14882 }
14883 break;
14884
14885 case NON_LVALUE_EXPR:
14886 case FLOAT_EXPR:
14887 case FIX_TRUNC_EXPR:
14888 return tree_expr_nonnegative_warnv_p (op0,
14889 strict_overflow_p);
14890
14891 case NOP_EXPR:
14892 {
14893 tree inner_type = TREE_TYPE (op0);
14894 tree outer_type = type;
14895
14896 if (TREE_CODE (outer_type) == REAL_TYPE)
14897 {
14898 if (TREE_CODE (inner_type) == REAL_TYPE)
14899 return tree_expr_nonnegative_warnv_p (op0,
14900 strict_overflow_p);
14901 if (INTEGRAL_TYPE_P (inner_type))
14902 {
14903 if (TYPE_UNSIGNED (inner_type))
14904 return true;
14905 return tree_expr_nonnegative_warnv_p (op0,
14906 strict_overflow_p);
14907 }
14908 }
14909 else if (INTEGRAL_TYPE_P (outer_type))
14910 {
14911 if (TREE_CODE (inner_type) == REAL_TYPE)
14912 return tree_expr_nonnegative_warnv_p (op0,
14913 strict_overflow_p);
14914 if (INTEGRAL_TYPE_P (inner_type))
14915 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14916 && TYPE_UNSIGNED (inner_type);
14917 }
14918 }
14919 break;
14920
14921 default:
14922 return tree_simple_nonnegative_warnv_p (code, type);
14923 }
14924
14925 /* We don't know sign of `t', so be conservative and return false. */
14926 return false;
14927 }
14928
14929 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14930 value is based on the assumption that signed overflow is undefined,
14931 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14932 *STRICT_OVERFLOW_P. */
14933
14934 bool
14935 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14936 tree op1, bool *strict_overflow_p)
14937 {
14938 if (TYPE_UNSIGNED (type))
14939 return true;
14940
14941 switch (code)
14942 {
14943 case POINTER_PLUS_EXPR:
14944 case PLUS_EXPR:
14945 if (FLOAT_TYPE_P (type))
14946 return (tree_expr_nonnegative_warnv_p (op0,
14947 strict_overflow_p)
14948 && tree_expr_nonnegative_warnv_p (op1,
14949 strict_overflow_p));
14950
14951 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14952 both unsigned and at least 2 bits shorter than the result. */
14953 if (TREE_CODE (type) == INTEGER_TYPE
14954 && TREE_CODE (op0) == NOP_EXPR
14955 && TREE_CODE (op1) == NOP_EXPR)
14956 {
14957 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14958 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14959 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14960 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14961 {
14962 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14963 TYPE_PRECISION (inner2)) + 1;
14964 return prec < TYPE_PRECISION (type);
14965 }
14966 }
14967 break;
14968
14969 case MULT_EXPR:
14970 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14971 {
14972 /* x * x is always non-negative for floating point x
14973 or without overflow. */
14974 if (operand_equal_p (op0, op1, 0)
14975 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14976 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14977 {
14978 if (TYPE_OVERFLOW_UNDEFINED (type))
14979 *strict_overflow_p = true;
14980 return true;
14981 }
14982 }
14983
14984 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14985 both unsigned and their total bits is shorter than the result. */
14986 if (TREE_CODE (type) == INTEGER_TYPE
14987 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14988 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14989 {
14990 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14991 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14992 : TREE_TYPE (op0);
14993 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14994 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14995 : TREE_TYPE (op1);
14996
14997 bool unsigned0 = TYPE_UNSIGNED (inner0);
14998 bool unsigned1 = TYPE_UNSIGNED (inner1);
14999
15000 if (TREE_CODE (op0) == INTEGER_CST)
15001 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15002
15003 if (TREE_CODE (op1) == INTEGER_CST)
15004 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15005
15006 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15007 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15008 {
15009 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15010 ? tree_int_cst_min_precision (op0, UNSIGNED)
15011 : TYPE_PRECISION (inner0);
15012
15013 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15014 ? tree_int_cst_min_precision (op1, UNSIGNED)
15015 : TYPE_PRECISION (inner1);
15016
15017 return precision0 + precision1 < TYPE_PRECISION (type);
15018 }
15019 }
15020 return false;
15021
15022 case BIT_AND_EXPR:
15023 case MAX_EXPR:
15024 return (tree_expr_nonnegative_warnv_p (op0,
15025 strict_overflow_p)
15026 || tree_expr_nonnegative_warnv_p (op1,
15027 strict_overflow_p));
15028
15029 case BIT_IOR_EXPR:
15030 case BIT_XOR_EXPR:
15031 case MIN_EXPR:
15032 case RDIV_EXPR:
15033 case TRUNC_DIV_EXPR:
15034 case CEIL_DIV_EXPR:
15035 case FLOOR_DIV_EXPR:
15036 case ROUND_DIV_EXPR:
15037 return (tree_expr_nonnegative_warnv_p (op0,
15038 strict_overflow_p)
15039 && tree_expr_nonnegative_warnv_p (op1,
15040 strict_overflow_p));
15041
15042 case TRUNC_MOD_EXPR:
15043 case CEIL_MOD_EXPR:
15044 case FLOOR_MOD_EXPR:
15045 case ROUND_MOD_EXPR:
15046 return tree_expr_nonnegative_warnv_p (op0,
15047 strict_overflow_p);
15048 default:
15049 return tree_simple_nonnegative_warnv_p (code, type);
15050 }
15051
15052 /* We don't know sign of `t', so be conservative and return false. */
15053 return false;
15054 }
15055
15056 /* Return true if T is known to be non-negative. If the return
15057 value is based on the assumption that signed overflow is undefined,
15058 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15059 *STRICT_OVERFLOW_P. */
15060
15061 bool
15062 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15063 {
15064 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15065 return true;
15066
15067 switch (TREE_CODE (t))
15068 {
15069 case INTEGER_CST:
15070 return tree_int_cst_sgn (t) >= 0;
15071
15072 case REAL_CST:
15073 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15074
15075 case FIXED_CST:
15076 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15077
15078 case COND_EXPR:
15079 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15080 strict_overflow_p)
15081 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15082 strict_overflow_p));
15083 default:
15084 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15085 TREE_TYPE (t));
15086 }
15087 /* We don't know sign of `t', so be conservative and return false. */
15088 return false;
15089 }
15090
15091 /* Return true if T is known to be non-negative. If the return
15092 value is based on the assumption that signed overflow is undefined,
15093 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15094 *STRICT_OVERFLOW_P. */
15095
15096 bool
15097 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15098 tree arg0, tree arg1, bool *strict_overflow_p)
15099 {
15100 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15101 switch (DECL_FUNCTION_CODE (fndecl))
15102 {
15103 CASE_FLT_FN (BUILT_IN_ACOS):
15104 CASE_FLT_FN (BUILT_IN_ACOSH):
15105 CASE_FLT_FN (BUILT_IN_CABS):
15106 CASE_FLT_FN (BUILT_IN_COSH):
15107 CASE_FLT_FN (BUILT_IN_ERFC):
15108 CASE_FLT_FN (BUILT_IN_EXP):
15109 CASE_FLT_FN (BUILT_IN_EXP10):
15110 CASE_FLT_FN (BUILT_IN_EXP2):
15111 CASE_FLT_FN (BUILT_IN_FABS):
15112 CASE_FLT_FN (BUILT_IN_FDIM):
15113 CASE_FLT_FN (BUILT_IN_HYPOT):
15114 CASE_FLT_FN (BUILT_IN_POW10):
15115 CASE_INT_FN (BUILT_IN_FFS):
15116 CASE_INT_FN (BUILT_IN_PARITY):
15117 CASE_INT_FN (BUILT_IN_POPCOUNT):
15118 CASE_INT_FN (BUILT_IN_CLZ):
15119 CASE_INT_FN (BUILT_IN_CLRSB):
15120 case BUILT_IN_BSWAP32:
15121 case BUILT_IN_BSWAP64:
15122 /* Always true. */
15123 return true;
15124
15125 CASE_FLT_FN (BUILT_IN_SQRT):
15126 /* sqrt(-0.0) is -0.0. */
15127 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15128 return true;
15129 return tree_expr_nonnegative_warnv_p (arg0,
15130 strict_overflow_p);
15131
15132 CASE_FLT_FN (BUILT_IN_ASINH):
15133 CASE_FLT_FN (BUILT_IN_ATAN):
15134 CASE_FLT_FN (BUILT_IN_ATANH):
15135 CASE_FLT_FN (BUILT_IN_CBRT):
15136 CASE_FLT_FN (BUILT_IN_CEIL):
15137 CASE_FLT_FN (BUILT_IN_ERF):
15138 CASE_FLT_FN (BUILT_IN_EXPM1):
15139 CASE_FLT_FN (BUILT_IN_FLOOR):
15140 CASE_FLT_FN (BUILT_IN_FMOD):
15141 CASE_FLT_FN (BUILT_IN_FREXP):
15142 CASE_FLT_FN (BUILT_IN_ICEIL):
15143 CASE_FLT_FN (BUILT_IN_IFLOOR):
15144 CASE_FLT_FN (BUILT_IN_IRINT):
15145 CASE_FLT_FN (BUILT_IN_IROUND):
15146 CASE_FLT_FN (BUILT_IN_LCEIL):
15147 CASE_FLT_FN (BUILT_IN_LDEXP):
15148 CASE_FLT_FN (BUILT_IN_LFLOOR):
15149 CASE_FLT_FN (BUILT_IN_LLCEIL):
15150 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15151 CASE_FLT_FN (BUILT_IN_LLRINT):
15152 CASE_FLT_FN (BUILT_IN_LLROUND):
15153 CASE_FLT_FN (BUILT_IN_LRINT):
15154 CASE_FLT_FN (BUILT_IN_LROUND):
15155 CASE_FLT_FN (BUILT_IN_MODF):
15156 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15157 CASE_FLT_FN (BUILT_IN_RINT):
15158 CASE_FLT_FN (BUILT_IN_ROUND):
15159 CASE_FLT_FN (BUILT_IN_SCALB):
15160 CASE_FLT_FN (BUILT_IN_SCALBLN):
15161 CASE_FLT_FN (BUILT_IN_SCALBN):
15162 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15163 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15164 CASE_FLT_FN (BUILT_IN_SINH):
15165 CASE_FLT_FN (BUILT_IN_TANH):
15166 CASE_FLT_FN (BUILT_IN_TRUNC):
15167 /* True if the 1st argument is nonnegative. */
15168 return tree_expr_nonnegative_warnv_p (arg0,
15169 strict_overflow_p);
15170
15171 CASE_FLT_FN (BUILT_IN_FMAX):
15172 /* True if the 1st OR 2nd arguments are nonnegative. */
15173 return (tree_expr_nonnegative_warnv_p (arg0,
15174 strict_overflow_p)
15175 || (tree_expr_nonnegative_warnv_p (arg1,
15176 strict_overflow_p)));
15177
15178 CASE_FLT_FN (BUILT_IN_FMIN):
15179 /* True if the 1st AND 2nd arguments are nonnegative. */
15180 return (tree_expr_nonnegative_warnv_p (arg0,
15181 strict_overflow_p)
15182 && (tree_expr_nonnegative_warnv_p (arg1,
15183 strict_overflow_p)));
15184
15185 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15186 /* True if the 2nd argument is nonnegative. */
15187 return tree_expr_nonnegative_warnv_p (arg1,
15188 strict_overflow_p);
15189
15190 CASE_FLT_FN (BUILT_IN_POWI):
15191 /* True if the 1st argument is nonnegative or the second
15192 argument is an even integer. */
15193 if (TREE_CODE (arg1) == INTEGER_CST
15194 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15195 return true;
15196 return tree_expr_nonnegative_warnv_p (arg0,
15197 strict_overflow_p);
15198
15199 CASE_FLT_FN (BUILT_IN_POW):
15200 /* True if the 1st argument is nonnegative or the second
15201 argument is an even integer valued real. */
15202 if (TREE_CODE (arg1) == REAL_CST)
15203 {
15204 REAL_VALUE_TYPE c;
15205 HOST_WIDE_INT n;
15206
15207 c = TREE_REAL_CST (arg1);
15208 n = real_to_integer (&c);
15209 if ((n & 1) == 0)
15210 {
15211 REAL_VALUE_TYPE cint;
15212 real_from_integer (&cint, VOIDmode, n, SIGNED);
15213 if (real_identical (&c, &cint))
15214 return true;
15215 }
15216 }
15217 return tree_expr_nonnegative_warnv_p (arg0,
15218 strict_overflow_p);
15219
15220 default:
15221 break;
15222 }
15223 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15224 type);
15225 }
15226
15227 /* Return true if T is known to be non-negative. If the return
15228 value is based on the assumption that signed overflow is undefined,
15229 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15230 *STRICT_OVERFLOW_P. */
15231
15232 static bool
15233 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15234 {
15235 enum tree_code code = TREE_CODE (t);
15236 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15237 return true;
15238
15239 switch (code)
15240 {
15241 case TARGET_EXPR:
15242 {
15243 tree temp = TARGET_EXPR_SLOT (t);
15244 t = TARGET_EXPR_INITIAL (t);
15245
15246 /* If the initializer is non-void, then it's a normal expression
15247 that will be assigned to the slot. */
15248 if (!VOID_TYPE_P (t))
15249 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15250
15251 /* Otherwise, the initializer sets the slot in some way. One common
15252 way is an assignment statement at the end of the initializer. */
15253 while (1)
15254 {
15255 if (TREE_CODE (t) == BIND_EXPR)
15256 t = expr_last (BIND_EXPR_BODY (t));
15257 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15258 || TREE_CODE (t) == TRY_CATCH_EXPR)
15259 t = expr_last (TREE_OPERAND (t, 0));
15260 else if (TREE_CODE (t) == STATEMENT_LIST)
15261 t = expr_last (t);
15262 else
15263 break;
15264 }
15265 if (TREE_CODE (t) == MODIFY_EXPR
15266 && TREE_OPERAND (t, 0) == temp)
15267 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15268 strict_overflow_p);
15269
15270 return false;
15271 }
15272
15273 case CALL_EXPR:
15274 {
15275 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15276 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15277
15278 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15279 get_callee_fndecl (t),
15280 arg0,
15281 arg1,
15282 strict_overflow_p);
15283 }
15284 case COMPOUND_EXPR:
15285 case MODIFY_EXPR:
15286 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15287 strict_overflow_p);
15288 case BIND_EXPR:
15289 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15290 strict_overflow_p);
15291 case SAVE_EXPR:
15292 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15293 strict_overflow_p);
15294
15295 default:
15296 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15297 TREE_TYPE (t));
15298 }
15299
15300 /* We don't know sign of `t', so be conservative and return false. */
15301 return false;
15302 }
15303
15304 /* Return true if T is known to be non-negative. If the return
15305 value is based on the assumption that signed overflow is undefined,
15306 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15307 *STRICT_OVERFLOW_P. */
15308
15309 bool
15310 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15311 {
15312 enum tree_code code;
15313 if (t == error_mark_node)
15314 return false;
15315
15316 code = TREE_CODE (t);
15317 switch (TREE_CODE_CLASS (code))
15318 {
15319 case tcc_binary:
15320 case tcc_comparison:
15321 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15322 TREE_TYPE (t),
15323 TREE_OPERAND (t, 0),
15324 TREE_OPERAND (t, 1),
15325 strict_overflow_p);
15326
15327 case tcc_unary:
15328 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15329 TREE_TYPE (t),
15330 TREE_OPERAND (t, 0),
15331 strict_overflow_p);
15332
15333 case tcc_constant:
15334 case tcc_declaration:
15335 case tcc_reference:
15336 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15337
15338 default:
15339 break;
15340 }
15341
15342 switch (code)
15343 {
15344 case TRUTH_AND_EXPR:
15345 case TRUTH_OR_EXPR:
15346 case TRUTH_XOR_EXPR:
15347 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15348 TREE_TYPE (t),
15349 TREE_OPERAND (t, 0),
15350 TREE_OPERAND (t, 1),
15351 strict_overflow_p);
15352 case TRUTH_NOT_EXPR:
15353 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15354 TREE_TYPE (t),
15355 TREE_OPERAND (t, 0),
15356 strict_overflow_p);
15357
15358 case COND_EXPR:
15359 case CONSTRUCTOR:
15360 case OBJ_TYPE_REF:
15361 case ASSERT_EXPR:
15362 case ADDR_EXPR:
15363 case WITH_SIZE_EXPR:
15364 case SSA_NAME:
15365 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15366
15367 default:
15368 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15369 }
15370 }
15371
15372 /* Return true if `t' is known to be non-negative. Handle warnings
15373 about undefined signed overflow. */
15374
15375 bool
15376 tree_expr_nonnegative_p (tree t)
15377 {
15378 bool ret, strict_overflow_p;
15379
15380 strict_overflow_p = false;
15381 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15382 if (strict_overflow_p)
15383 fold_overflow_warning (("assuming signed overflow does not occur when "
15384 "determining that expression is always "
15385 "non-negative"),
15386 WARN_STRICT_OVERFLOW_MISC);
15387 return ret;
15388 }
15389
15390
15391 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15392 For floating point we further ensure that T is not denormal.
15393 Similar logic is present in nonzero_address in rtlanal.h.
15394
15395 If the return value is based on the assumption that signed overflow
15396 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15397 change *STRICT_OVERFLOW_P. */
15398
15399 bool
15400 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15401 bool *strict_overflow_p)
15402 {
15403 switch (code)
15404 {
15405 case ABS_EXPR:
15406 return tree_expr_nonzero_warnv_p (op0,
15407 strict_overflow_p);
15408
15409 case NOP_EXPR:
15410 {
15411 tree inner_type = TREE_TYPE (op0);
15412 tree outer_type = type;
15413
15414 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15415 && tree_expr_nonzero_warnv_p (op0,
15416 strict_overflow_p));
15417 }
15418 break;
15419
15420 case NON_LVALUE_EXPR:
15421 return tree_expr_nonzero_warnv_p (op0,
15422 strict_overflow_p);
15423
15424 default:
15425 break;
15426 }
15427
15428 return false;
15429 }
15430
15431 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15432 For floating point we further ensure that T is not denormal.
15433 Similar logic is present in nonzero_address in rtlanal.h.
15434
15435 If the return value is based on the assumption that signed overflow
15436 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15437 change *STRICT_OVERFLOW_P. */
15438
15439 bool
15440 tree_binary_nonzero_warnv_p (enum tree_code code,
15441 tree type,
15442 tree op0,
15443 tree op1, bool *strict_overflow_p)
15444 {
15445 bool sub_strict_overflow_p;
15446 switch (code)
15447 {
15448 case POINTER_PLUS_EXPR:
15449 case PLUS_EXPR:
15450 if (TYPE_OVERFLOW_UNDEFINED (type))
15451 {
15452 /* With the presence of negative values it is hard
15453 to say something. */
15454 sub_strict_overflow_p = false;
15455 if (!tree_expr_nonnegative_warnv_p (op0,
15456 &sub_strict_overflow_p)
15457 || !tree_expr_nonnegative_warnv_p (op1,
15458 &sub_strict_overflow_p))
15459 return false;
15460 /* One of operands must be positive and the other non-negative. */
15461 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15462 overflows, on a twos-complement machine the sum of two
15463 nonnegative numbers can never be zero. */
15464 return (tree_expr_nonzero_warnv_p (op0,
15465 strict_overflow_p)
15466 || tree_expr_nonzero_warnv_p (op1,
15467 strict_overflow_p));
15468 }
15469 break;
15470
15471 case MULT_EXPR:
15472 if (TYPE_OVERFLOW_UNDEFINED (type))
15473 {
15474 if (tree_expr_nonzero_warnv_p (op0,
15475 strict_overflow_p)
15476 && tree_expr_nonzero_warnv_p (op1,
15477 strict_overflow_p))
15478 {
15479 *strict_overflow_p = true;
15480 return true;
15481 }
15482 }
15483 break;
15484
15485 case MIN_EXPR:
15486 sub_strict_overflow_p = false;
15487 if (tree_expr_nonzero_warnv_p (op0,
15488 &sub_strict_overflow_p)
15489 && tree_expr_nonzero_warnv_p (op1,
15490 &sub_strict_overflow_p))
15491 {
15492 if (sub_strict_overflow_p)
15493 *strict_overflow_p = true;
15494 }
15495 break;
15496
15497 case MAX_EXPR:
15498 sub_strict_overflow_p = false;
15499 if (tree_expr_nonzero_warnv_p (op0,
15500 &sub_strict_overflow_p))
15501 {
15502 if (sub_strict_overflow_p)
15503 *strict_overflow_p = true;
15504
15505 /* When both operands are nonzero, then MAX must be too. */
15506 if (tree_expr_nonzero_warnv_p (op1,
15507 strict_overflow_p))
15508 return true;
15509
15510 /* MAX where operand 0 is positive is positive. */
15511 return tree_expr_nonnegative_warnv_p (op0,
15512 strict_overflow_p);
15513 }
15514 /* MAX where operand 1 is positive is positive. */
15515 else if (tree_expr_nonzero_warnv_p (op1,
15516 &sub_strict_overflow_p)
15517 && tree_expr_nonnegative_warnv_p (op1,
15518 &sub_strict_overflow_p))
15519 {
15520 if (sub_strict_overflow_p)
15521 *strict_overflow_p = true;
15522 return true;
15523 }
15524 break;
15525
15526 case BIT_IOR_EXPR:
15527 return (tree_expr_nonzero_warnv_p (op1,
15528 strict_overflow_p)
15529 || tree_expr_nonzero_warnv_p (op0,
15530 strict_overflow_p));
15531
15532 default:
15533 break;
15534 }
15535
15536 return false;
15537 }
15538
15539 /* Return true when T is an address and is known to be nonzero.
15540 For floating point we further ensure that T is not denormal.
15541 Similar logic is present in nonzero_address in rtlanal.h.
15542
15543 If the return value is based on the assumption that signed overflow
15544 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15545 change *STRICT_OVERFLOW_P. */
15546
15547 bool
15548 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15549 {
15550 bool sub_strict_overflow_p;
15551 switch (TREE_CODE (t))
15552 {
15553 case INTEGER_CST:
15554 return !integer_zerop (t);
15555
15556 case ADDR_EXPR:
15557 {
15558 tree base = TREE_OPERAND (t, 0);
15559
15560 if (!DECL_P (base))
15561 base = get_base_address (base);
15562
15563 if (!base)
15564 return false;
15565
15566 /* For objects in symbol table check if we know they are non-zero.
15567 Don't do anything for variables and functions before symtab is built;
15568 it is quite possible that they will be declared weak later. */
15569 if (DECL_P (base) && decl_in_symtab_p (base))
15570 {
15571 struct symtab_node *symbol;
15572
15573 symbol = symtab_node::get_create (base);
15574 if (symbol)
15575 return symbol->nonzero_address ();
15576 else
15577 return false;
15578 }
15579
15580 /* Function local objects are never NULL. */
15581 if (DECL_P (base)
15582 && (DECL_CONTEXT (base)
15583 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15584 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15585 return true;
15586
15587 /* Constants are never weak. */
15588 if (CONSTANT_CLASS_P (base))
15589 return true;
15590
15591 return false;
15592 }
15593
15594 case COND_EXPR:
15595 sub_strict_overflow_p = false;
15596 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15597 &sub_strict_overflow_p)
15598 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15599 &sub_strict_overflow_p))
15600 {
15601 if (sub_strict_overflow_p)
15602 *strict_overflow_p = true;
15603 return true;
15604 }
15605 break;
15606
15607 default:
15608 break;
15609 }
15610 return false;
15611 }
15612
15613 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15614 attempt to fold the expression to a constant without modifying TYPE,
15615 OP0 or OP1.
15616
15617 If the expression could be simplified to a constant, then return
15618 the constant. If the expression would not be simplified to a
15619 constant, then return NULL_TREE. */
15620
15621 tree
15622 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15623 {
15624 tree tem = fold_binary (code, type, op0, op1);
15625 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15626 }
15627
15628 /* Given the components of a unary expression CODE, TYPE and OP0,
15629 attempt to fold the expression to a constant without modifying
15630 TYPE or OP0.
15631
15632 If the expression could be simplified to a constant, then return
15633 the constant. If the expression would not be simplified to a
15634 constant, then return NULL_TREE. */
15635
15636 tree
15637 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15638 {
15639 tree tem = fold_unary (code, type, op0);
15640 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15641 }
15642
15643 /* If EXP represents referencing an element in a constant string
15644 (either via pointer arithmetic or array indexing), return the
15645 tree representing the value accessed, otherwise return NULL. */
15646
15647 tree
15648 fold_read_from_constant_string (tree exp)
15649 {
15650 if ((TREE_CODE (exp) == INDIRECT_REF
15651 || TREE_CODE (exp) == ARRAY_REF)
15652 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15653 {
15654 tree exp1 = TREE_OPERAND (exp, 0);
15655 tree index;
15656 tree string;
15657 location_t loc = EXPR_LOCATION (exp);
15658
15659 if (TREE_CODE (exp) == INDIRECT_REF)
15660 string = string_constant (exp1, &index);
15661 else
15662 {
15663 tree low_bound = array_ref_low_bound (exp);
15664 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15665
15666 /* Optimize the special-case of a zero lower bound.
15667
15668 We convert the low_bound to sizetype to avoid some problems
15669 with constant folding. (E.g. suppose the lower bound is 1,
15670 and its mode is QI. Without the conversion,l (ARRAY
15671 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15672 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15673 if (! integer_zerop (low_bound))
15674 index = size_diffop_loc (loc, index,
15675 fold_convert_loc (loc, sizetype, low_bound));
15676
15677 string = exp1;
15678 }
15679
15680 if (string
15681 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15682 && TREE_CODE (string) == STRING_CST
15683 && TREE_CODE (index) == INTEGER_CST
15684 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15685 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15686 == MODE_INT)
15687 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15688 return build_int_cst_type (TREE_TYPE (exp),
15689 (TREE_STRING_POINTER (string)
15690 [TREE_INT_CST_LOW (index)]));
15691 }
15692 return NULL;
15693 }
15694
15695 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15696 an integer constant, real, or fixed-point constant.
15697
15698 TYPE is the type of the result. */
15699
15700 static tree
15701 fold_negate_const (tree arg0, tree type)
15702 {
15703 tree t = NULL_TREE;
15704
15705 switch (TREE_CODE (arg0))
15706 {
15707 case INTEGER_CST:
15708 {
15709 bool overflow;
15710 wide_int val = wi::neg (arg0, &overflow);
15711 t = force_fit_type (type, val, 1,
15712 (overflow | TREE_OVERFLOW (arg0))
15713 && !TYPE_UNSIGNED (type));
15714 break;
15715 }
15716
15717 case REAL_CST:
15718 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15719 break;
15720
15721 case FIXED_CST:
15722 {
15723 FIXED_VALUE_TYPE f;
15724 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15725 &(TREE_FIXED_CST (arg0)), NULL,
15726 TYPE_SATURATING (type));
15727 t = build_fixed (type, f);
15728 /* Propagate overflow flags. */
15729 if (overflow_p | TREE_OVERFLOW (arg0))
15730 TREE_OVERFLOW (t) = 1;
15731 break;
15732 }
15733
15734 default:
15735 gcc_unreachable ();
15736 }
15737
15738 return t;
15739 }
15740
15741 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15742 an integer constant or real constant.
15743
15744 TYPE is the type of the result. */
15745
15746 tree
15747 fold_abs_const (tree arg0, tree type)
15748 {
15749 tree t = NULL_TREE;
15750
15751 switch (TREE_CODE (arg0))
15752 {
15753 case INTEGER_CST:
15754 {
15755 /* If the value is unsigned or non-negative, then the absolute value
15756 is the same as the ordinary value. */
15757 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15758 t = arg0;
15759
15760 /* If the value is negative, then the absolute value is
15761 its negation. */
15762 else
15763 {
15764 bool overflow;
15765 wide_int val = wi::neg (arg0, &overflow);
15766 t = force_fit_type (type, val, -1,
15767 overflow | TREE_OVERFLOW (arg0));
15768 }
15769 }
15770 break;
15771
15772 case REAL_CST:
15773 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15774 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15775 else
15776 t = arg0;
15777 break;
15778
15779 default:
15780 gcc_unreachable ();
15781 }
15782
15783 return t;
15784 }
15785
15786 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15787 constant. TYPE is the type of the result. */
15788
15789 static tree
15790 fold_not_const (const_tree arg0, tree type)
15791 {
15792 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15793
15794 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15795 }
15796
15797 /* Given CODE, a relational operator, the target type, TYPE and two
15798 constant operands OP0 and OP1, return the result of the
15799 relational operation. If the result is not a compile time
15800 constant, then return NULL_TREE. */
15801
15802 static tree
15803 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15804 {
15805 int result, invert;
15806
15807 /* From here on, the only cases we handle are when the result is
15808 known to be a constant. */
15809
15810 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15811 {
15812 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15813 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15814
15815 /* Handle the cases where either operand is a NaN. */
15816 if (real_isnan (c0) || real_isnan (c1))
15817 {
15818 switch (code)
15819 {
15820 case EQ_EXPR:
15821 case ORDERED_EXPR:
15822 result = 0;
15823 break;
15824
15825 case NE_EXPR:
15826 case UNORDERED_EXPR:
15827 case UNLT_EXPR:
15828 case UNLE_EXPR:
15829 case UNGT_EXPR:
15830 case UNGE_EXPR:
15831 case UNEQ_EXPR:
15832 result = 1;
15833 break;
15834
15835 case LT_EXPR:
15836 case LE_EXPR:
15837 case GT_EXPR:
15838 case GE_EXPR:
15839 case LTGT_EXPR:
15840 if (flag_trapping_math)
15841 return NULL_TREE;
15842 result = 0;
15843 break;
15844
15845 default:
15846 gcc_unreachable ();
15847 }
15848
15849 return constant_boolean_node (result, type);
15850 }
15851
15852 return constant_boolean_node (real_compare (code, c0, c1), type);
15853 }
15854
15855 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15856 {
15857 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15858 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15859 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15860 }
15861
15862 /* Handle equality/inequality of complex constants. */
15863 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15864 {
15865 tree rcond = fold_relational_const (code, type,
15866 TREE_REALPART (op0),
15867 TREE_REALPART (op1));
15868 tree icond = fold_relational_const (code, type,
15869 TREE_IMAGPART (op0),
15870 TREE_IMAGPART (op1));
15871 if (code == EQ_EXPR)
15872 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15873 else if (code == NE_EXPR)
15874 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15875 else
15876 return NULL_TREE;
15877 }
15878
15879 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15880 {
15881 unsigned count = VECTOR_CST_NELTS (op0);
15882 tree *elts = XALLOCAVEC (tree, count);
15883 gcc_assert (VECTOR_CST_NELTS (op1) == count
15884 && TYPE_VECTOR_SUBPARTS (type) == count);
15885
15886 for (unsigned i = 0; i < count; i++)
15887 {
15888 tree elem_type = TREE_TYPE (type);
15889 tree elem0 = VECTOR_CST_ELT (op0, i);
15890 tree elem1 = VECTOR_CST_ELT (op1, i);
15891
15892 tree tem = fold_relational_const (code, elem_type,
15893 elem0, elem1);
15894
15895 if (tem == NULL_TREE)
15896 return NULL_TREE;
15897
15898 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15899 }
15900
15901 return build_vector (type, elts);
15902 }
15903
15904 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15905
15906 To compute GT, swap the arguments and do LT.
15907 To compute GE, do LT and invert the result.
15908 To compute LE, swap the arguments, do LT and invert the result.
15909 To compute NE, do EQ and invert the result.
15910
15911 Therefore, the code below must handle only EQ and LT. */
15912
15913 if (code == LE_EXPR || code == GT_EXPR)
15914 {
15915 tree tem = op0;
15916 op0 = op1;
15917 op1 = tem;
15918 code = swap_tree_comparison (code);
15919 }
15920
15921 /* Note that it is safe to invert for real values here because we
15922 have already handled the one case that it matters. */
15923
15924 invert = 0;
15925 if (code == NE_EXPR || code == GE_EXPR)
15926 {
15927 invert = 1;
15928 code = invert_tree_comparison (code, false);
15929 }
15930
15931 /* Compute a result for LT or EQ if args permit;
15932 Otherwise return T. */
15933 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15934 {
15935 if (code == EQ_EXPR)
15936 result = tree_int_cst_equal (op0, op1);
15937 else
15938 result = tree_int_cst_lt (op0, op1);
15939 }
15940 else
15941 return NULL_TREE;
15942
15943 if (invert)
15944 result ^= 1;
15945 return constant_boolean_node (result, type);
15946 }
15947
15948 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15949 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15950 itself. */
15951
15952 tree
15953 fold_build_cleanup_point_expr (tree type, tree expr)
15954 {
15955 /* If the expression does not have side effects then we don't have to wrap
15956 it with a cleanup point expression. */
15957 if (!TREE_SIDE_EFFECTS (expr))
15958 return expr;
15959
15960 /* If the expression is a return, check to see if the expression inside the
15961 return has no side effects or the right hand side of the modify expression
15962 inside the return. If either don't have side effects set we don't need to
15963 wrap the expression in a cleanup point expression. Note we don't check the
15964 left hand side of the modify because it should always be a return decl. */
15965 if (TREE_CODE (expr) == RETURN_EXPR)
15966 {
15967 tree op = TREE_OPERAND (expr, 0);
15968 if (!op || !TREE_SIDE_EFFECTS (op))
15969 return expr;
15970 op = TREE_OPERAND (op, 1);
15971 if (!TREE_SIDE_EFFECTS (op))
15972 return expr;
15973 }
15974
15975 return build1 (CLEANUP_POINT_EXPR, type, expr);
15976 }
15977
15978 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15979 of an indirection through OP0, or NULL_TREE if no simplification is
15980 possible. */
15981
15982 tree
15983 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15984 {
15985 tree sub = op0;
15986 tree subtype;
15987
15988 STRIP_NOPS (sub);
15989 subtype = TREE_TYPE (sub);
15990 if (!POINTER_TYPE_P (subtype))
15991 return NULL_TREE;
15992
15993 if (TREE_CODE (sub) == ADDR_EXPR)
15994 {
15995 tree op = TREE_OPERAND (sub, 0);
15996 tree optype = TREE_TYPE (op);
15997 /* *&CONST_DECL -> to the value of the const decl. */
15998 if (TREE_CODE (op) == CONST_DECL)
15999 return DECL_INITIAL (op);
16000 /* *&p => p; make sure to handle *&"str"[cst] here. */
16001 if (type == optype)
16002 {
16003 tree fop = fold_read_from_constant_string (op);
16004 if (fop)
16005 return fop;
16006 else
16007 return op;
16008 }
16009 /* *(foo *)&fooarray => fooarray[0] */
16010 else if (TREE_CODE (optype) == ARRAY_TYPE
16011 && type == TREE_TYPE (optype)
16012 && (!in_gimple_form
16013 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16014 {
16015 tree type_domain = TYPE_DOMAIN (optype);
16016 tree min_val = size_zero_node;
16017 if (type_domain && TYPE_MIN_VALUE (type_domain))
16018 min_val = TYPE_MIN_VALUE (type_domain);
16019 if (in_gimple_form
16020 && TREE_CODE (min_val) != INTEGER_CST)
16021 return NULL_TREE;
16022 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16023 NULL_TREE, NULL_TREE);
16024 }
16025 /* *(foo *)&complexfoo => __real__ complexfoo */
16026 else if (TREE_CODE (optype) == COMPLEX_TYPE
16027 && type == TREE_TYPE (optype))
16028 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16029 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16030 else if (TREE_CODE (optype) == VECTOR_TYPE
16031 && type == TREE_TYPE (optype))
16032 {
16033 tree part_width = TYPE_SIZE (type);
16034 tree index = bitsize_int (0);
16035 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16036 }
16037 }
16038
16039 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16040 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16041 {
16042 tree op00 = TREE_OPERAND (sub, 0);
16043 tree op01 = TREE_OPERAND (sub, 1);
16044
16045 STRIP_NOPS (op00);
16046 if (TREE_CODE (op00) == ADDR_EXPR)
16047 {
16048 tree op00type;
16049 op00 = TREE_OPERAND (op00, 0);
16050 op00type = TREE_TYPE (op00);
16051
16052 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16053 if (TREE_CODE (op00type) == VECTOR_TYPE
16054 && type == TREE_TYPE (op00type))
16055 {
16056 HOST_WIDE_INT offset = tree_to_shwi (op01);
16057 tree part_width = TYPE_SIZE (type);
16058 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16059 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16060 tree index = bitsize_int (indexi);
16061
16062 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16063 return fold_build3_loc (loc,
16064 BIT_FIELD_REF, type, op00,
16065 part_width, index);
16066
16067 }
16068 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16069 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16070 && type == TREE_TYPE (op00type))
16071 {
16072 tree size = TYPE_SIZE_UNIT (type);
16073 if (tree_int_cst_equal (size, op01))
16074 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16075 }
16076 /* ((foo *)&fooarray)[1] => fooarray[1] */
16077 else if (TREE_CODE (op00type) == ARRAY_TYPE
16078 && type == TREE_TYPE (op00type))
16079 {
16080 tree type_domain = TYPE_DOMAIN (op00type);
16081 tree min_val = size_zero_node;
16082 if (type_domain && TYPE_MIN_VALUE (type_domain))
16083 min_val = TYPE_MIN_VALUE (type_domain);
16084 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16085 TYPE_SIZE_UNIT (type));
16086 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16087 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16088 NULL_TREE, NULL_TREE);
16089 }
16090 }
16091 }
16092
16093 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16094 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16095 && type == TREE_TYPE (TREE_TYPE (subtype))
16096 && (!in_gimple_form
16097 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16098 {
16099 tree type_domain;
16100 tree min_val = size_zero_node;
16101 sub = build_fold_indirect_ref_loc (loc, sub);
16102 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16103 if (type_domain && TYPE_MIN_VALUE (type_domain))
16104 min_val = TYPE_MIN_VALUE (type_domain);
16105 if (in_gimple_form
16106 && TREE_CODE (min_val) != INTEGER_CST)
16107 return NULL_TREE;
16108 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16109 NULL_TREE);
16110 }
16111
16112 return NULL_TREE;
16113 }
16114
16115 /* Builds an expression for an indirection through T, simplifying some
16116 cases. */
16117
16118 tree
16119 build_fold_indirect_ref_loc (location_t loc, tree t)
16120 {
16121 tree type = TREE_TYPE (TREE_TYPE (t));
16122 tree sub = fold_indirect_ref_1 (loc, type, t);
16123
16124 if (sub)
16125 return sub;
16126
16127 return build1_loc (loc, INDIRECT_REF, type, t);
16128 }
16129
16130 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16131
16132 tree
16133 fold_indirect_ref_loc (location_t loc, tree t)
16134 {
16135 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16136
16137 if (sub)
16138 return sub;
16139 else
16140 return t;
16141 }
16142
16143 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16144 whose result is ignored. The type of the returned tree need not be
16145 the same as the original expression. */
16146
16147 tree
16148 fold_ignored_result (tree t)
16149 {
16150 if (!TREE_SIDE_EFFECTS (t))
16151 return integer_zero_node;
16152
16153 for (;;)
16154 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16155 {
16156 case tcc_unary:
16157 t = TREE_OPERAND (t, 0);
16158 break;
16159
16160 case tcc_binary:
16161 case tcc_comparison:
16162 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16163 t = TREE_OPERAND (t, 0);
16164 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16165 t = TREE_OPERAND (t, 1);
16166 else
16167 return t;
16168 break;
16169
16170 case tcc_expression:
16171 switch (TREE_CODE (t))
16172 {
16173 case COMPOUND_EXPR:
16174 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16175 return t;
16176 t = TREE_OPERAND (t, 0);
16177 break;
16178
16179 case COND_EXPR:
16180 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16181 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16182 return t;
16183 t = TREE_OPERAND (t, 0);
16184 break;
16185
16186 default:
16187 return t;
16188 }
16189 break;
16190
16191 default:
16192 return t;
16193 }
16194 }
16195
16196 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16197
16198 tree
16199 round_up_loc (location_t loc, tree value, unsigned int divisor)
16200 {
16201 tree div = NULL_TREE;
16202
16203 if (divisor == 1)
16204 return value;
16205
16206 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16207 have to do anything. Only do this when we are not given a const,
16208 because in that case, this check is more expensive than just
16209 doing it. */
16210 if (TREE_CODE (value) != INTEGER_CST)
16211 {
16212 div = build_int_cst (TREE_TYPE (value), divisor);
16213
16214 if (multiple_of_p (TREE_TYPE (value), value, div))
16215 return value;
16216 }
16217
16218 /* If divisor is a power of two, simplify this to bit manipulation. */
16219 if (divisor == (divisor & -divisor))
16220 {
16221 if (TREE_CODE (value) == INTEGER_CST)
16222 {
16223 wide_int val = value;
16224 bool overflow_p;
16225
16226 if ((val & (divisor - 1)) == 0)
16227 return value;
16228
16229 overflow_p = TREE_OVERFLOW (value);
16230 val &= ~(divisor - 1);
16231 val += divisor;
16232 if (val == 0)
16233 overflow_p = true;
16234
16235 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16236 }
16237 else
16238 {
16239 tree t;
16240
16241 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16242 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16243 t = build_int_cst (TREE_TYPE (value), -divisor);
16244 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16245 }
16246 }
16247 else
16248 {
16249 if (!div)
16250 div = build_int_cst (TREE_TYPE (value), divisor);
16251 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16252 value = size_binop_loc (loc, MULT_EXPR, value, div);
16253 }
16254
16255 return value;
16256 }
16257
16258 /* Likewise, but round down. */
16259
16260 tree
16261 round_down_loc (location_t loc, tree value, int divisor)
16262 {
16263 tree div = NULL_TREE;
16264
16265 gcc_assert (divisor > 0);
16266 if (divisor == 1)
16267 return value;
16268
16269 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16270 have to do anything. Only do this when we are not given a const,
16271 because in that case, this check is more expensive than just
16272 doing it. */
16273 if (TREE_CODE (value) != INTEGER_CST)
16274 {
16275 div = build_int_cst (TREE_TYPE (value), divisor);
16276
16277 if (multiple_of_p (TREE_TYPE (value), value, div))
16278 return value;
16279 }
16280
16281 /* If divisor is a power of two, simplify this to bit manipulation. */
16282 if (divisor == (divisor & -divisor))
16283 {
16284 tree t;
16285
16286 t = build_int_cst (TREE_TYPE (value), -divisor);
16287 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16288 }
16289 else
16290 {
16291 if (!div)
16292 div = build_int_cst (TREE_TYPE (value), divisor);
16293 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16294 value = size_binop_loc (loc, MULT_EXPR, value, div);
16295 }
16296
16297 return value;
16298 }
16299
16300 /* Returns the pointer to the base of the object addressed by EXP and
16301 extracts the information about the offset of the access, storing it
16302 to PBITPOS and POFFSET. */
16303
16304 static tree
16305 split_address_to_core_and_offset (tree exp,
16306 HOST_WIDE_INT *pbitpos, tree *poffset)
16307 {
16308 tree core;
16309 machine_mode mode;
16310 int unsignedp, volatilep;
16311 HOST_WIDE_INT bitsize;
16312 location_t loc = EXPR_LOCATION (exp);
16313
16314 if (TREE_CODE (exp) == ADDR_EXPR)
16315 {
16316 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16317 poffset, &mode, &unsignedp, &volatilep,
16318 false);
16319 core = build_fold_addr_expr_loc (loc, core);
16320 }
16321 else
16322 {
16323 core = exp;
16324 *pbitpos = 0;
16325 *poffset = NULL_TREE;
16326 }
16327
16328 return core;
16329 }
16330
16331 /* Returns true if addresses of E1 and E2 differ by a constant, false
16332 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16333
16334 bool
16335 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16336 {
16337 tree core1, core2;
16338 HOST_WIDE_INT bitpos1, bitpos2;
16339 tree toffset1, toffset2, tdiff, type;
16340
16341 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16342 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16343
16344 if (bitpos1 % BITS_PER_UNIT != 0
16345 || bitpos2 % BITS_PER_UNIT != 0
16346 || !operand_equal_p (core1, core2, 0))
16347 return false;
16348
16349 if (toffset1 && toffset2)
16350 {
16351 type = TREE_TYPE (toffset1);
16352 if (type != TREE_TYPE (toffset2))
16353 toffset2 = fold_convert (type, toffset2);
16354
16355 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16356 if (!cst_and_fits_in_hwi (tdiff))
16357 return false;
16358
16359 *diff = int_cst_value (tdiff);
16360 }
16361 else if (toffset1 || toffset2)
16362 {
16363 /* If only one of the offsets is non-constant, the difference cannot
16364 be a constant. */
16365 return false;
16366 }
16367 else
16368 *diff = 0;
16369
16370 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16371 return true;
16372 }
16373
16374 /* Simplify the floating point expression EXP when the sign of the
16375 result is not significant. Return NULL_TREE if no simplification
16376 is possible. */
16377
16378 tree
16379 fold_strip_sign_ops (tree exp)
16380 {
16381 tree arg0, arg1;
16382 location_t loc = EXPR_LOCATION (exp);
16383
16384 switch (TREE_CODE (exp))
16385 {
16386 case ABS_EXPR:
16387 case NEGATE_EXPR:
16388 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16389 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16390
16391 case MULT_EXPR:
16392 case RDIV_EXPR:
16393 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16394 return NULL_TREE;
16395 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16396 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16397 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16398 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16399 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16400 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16401 break;
16402
16403 case COMPOUND_EXPR:
16404 arg0 = TREE_OPERAND (exp, 0);
16405 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16406 if (arg1)
16407 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16408 break;
16409
16410 case COND_EXPR:
16411 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16412 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16413 if (arg0 || arg1)
16414 return fold_build3_loc (loc,
16415 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16416 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16417 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16418 break;
16419
16420 case CALL_EXPR:
16421 {
16422 const enum built_in_function fcode = builtin_mathfn_code (exp);
16423 switch (fcode)
16424 {
16425 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16426 /* Strip copysign function call, return the 1st argument. */
16427 arg0 = CALL_EXPR_ARG (exp, 0);
16428 arg1 = CALL_EXPR_ARG (exp, 1);
16429 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16430
16431 default:
16432 /* Strip sign ops from the argument of "odd" math functions. */
16433 if (negate_mathfn_p (fcode))
16434 {
16435 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16436 if (arg0)
16437 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16438 }
16439 break;
16440 }
16441 }
16442 break;
16443
16444 default:
16445 break;
16446 }
16447 return NULL_TREE;
16448 }