fold-const.h (const_unop): Declare.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_mathfn_compare (location_t,
149 enum built_in_function, enum tree_code,
150 tree, tree, tree);
151 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
152 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
153 static bool reorder_operands_p (const_tree, const_tree);
154 static tree fold_negate_const (tree, tree);
155 static tree fold_not_const (const_tree, tree);
156 static tree fold_relational_const (enum tree_code, tree, tree, tree);
157 static tree fold_convert_const (enum tree_code, tree, tree);
158 static tree fold_view_convert_expr (tree, tree);
159 static bool vec_cst_ctor_to_array (tree, tree *);
160
161
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
164
165 static location_t
166 expr_location_or (tree t, location_t loc)
167 {
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
170 }
171
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
174
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
177 {
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
183 {
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
186 }
187 return x;
188 }
189 \f
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
193
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 {
197 widest_int quo;
198
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202
203 return NULL_TREE;
204 }
205 \f
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
214
215 static int fold_deferring_overflow_warnings;
216
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
221
222 static const char* fold_deferred_overflow_warning;
223
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
226
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
231
232 void
233 fold_defer_overflow_warnings (void)
234 {
235 ++fold_deferring_overflow_warnings;
236 }
237
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
246
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 {
250 const char *warnmsg;
251 location_t locus;
252
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
256 {
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
262 }
263
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
266
267 if (!issue || warnmsg == NULL)
268 return;
269
270 if (gimple_no_warning_p (stmt))
271 return;
272
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
277
278 if (!issue_strict_overflow_warning (code))
279 return;
280
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
286 }
287
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
290
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
293 {
294 fold_undefer_overflow_warnings (false, NULL, 0);
295 }
296
297 /* Whether we are deferring overflow warnings. */
298
299 bool
300 fold_deferring_overflow_warnings_p (void)
301 {
302 return fold_deferring_overflow_warnings > 0;
303 }
304
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
307
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 {
311 if (fold_deferring_overflow_warnings > 0)
312 {
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
315 {
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
318 }
319 }
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
322 }
323 \f
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
326
327 static bool
328 negate_mathfn_p (enum built_in_function code)
329 {
330 switch (code)
331 {
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
356
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
362
363 default:
364 break;
365 }
366 return false;
367 }
368
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
371
372 bool
373 may_negate_without_overflow_p (const_tree t)
374 {
375 tree type;
376
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
382
383 return !wi::only_sign_bit_p (t);
384 }
385
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
388
389 static bool
390 negate_expr_p (tree t)
391 {
392 tree type;
393
394 if (t == 0)
395 return false;
396
397 type = TREE_TYPE (t);
398
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
401 {
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
405
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
411
412 case FIXED_CST:
413 return true;
414
415 case NEGATE_EXPR:
416 return !TYPE_OVERFLOW_SANITIZED (type);
417
418 case REAL_CST:
419 /* We want to canonicalize to positive real constants. Pretend
420 that only negative ones can be easily negated. */
421 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422
423 case COMPLEX_CST:
424 return negate_expr_p (TREE_REALPART (t))
425 && negate_expr_p (TREE_IMAGPART (t));
426
427 case VECTOR_CST:
428 {
429 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
430 return true;
431
432 int count = TYPE_VECTOR_SUBPARTS (type), i;
433
434 for (i = 0; i < count; i++)
435 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
436 return false;
437
438 return true;
439 }
440
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
444
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
447
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
450 || HONOR_SIGNED_ZEROS (element_mode (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
459
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
463 && !HONOR_SIGNED_ZEROS (element_mode (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
466
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
470
471 /* Fall through. */
472
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
478
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case EXACT_DIV_EXPR:
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
486 overflow. */
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
488 {
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
490 break;
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
500 return true;
501 }
502 else if (negate_expr_p (TREE_OPERAND (t, 0)))
503 return true;
504 return negate_expr_p (TREE_OPERAND (t, 1));
505
506 case NOP_EXPR:
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type) == REAL_TYPE)
509 {
510 tree tem = strip_float_extensions (t);
511 if (tem != t)
512 return negate_expr_p (tem);
513 }
514 break;
515
516 case CALL_EXPR:
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t)))
519 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 break;
521
522 case RSHIFT_EXPR:
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
524 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
525 {
526 tree op1 = TREE_OPERAND (t, 1);
527 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
528 return true;
529 }
530 break;
531
532 default:
533 break;
534 }
535 return false;
536 }
537
538 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
539 simplification is possible.
540 If negate_expr_p would return true for T, NULL_TREE will never be
541 returned. */
542
543 static tree
544 fold_negate_expr (location_t loc, tree t)
545 {
546 tree type = TREE_TYPE (t);
547 tree tem;
548
549 switch (TREE_CODE (t))
550 {
551 /* Convert - (~A) to A + 1. */
552 case BIT_NOT_EXPR:
553 if (INTEGRAL_TYPE_P (type))
554 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
555 build_one_cst (type));
556 break;
557
558 case INTEGER_CST:
559 tem = fold_negate_const (t, type);
560 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
561 || (!TYPE_OVERFLOW_TRAPS (type)
562 && TYPE_OVERFLOW_WRAPS (type))
563 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
564 return tem;
565 break;
566
567 case REAL_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case FIXED_CST:
572 tem = fold_negate_const (t, type);
573 return tem;
574
575 case COMPLEX_CST:
576 {
577 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
578 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
579 if (rpart && ipart)
580 return build_complex (type, rpart, ipart);
581 }
582 break;
583
584 case VECTOR_CST:
585 {
586 int count = TYPE_VECTOR_SUBPARTS (type), i;
587 tree *elts = XALLOCAVEC (tree, count);
588
589 for (i = 0; i < count; i++)
590 {
591 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
592 if (elts[i] == NULL_TREE)
593 return NULL_TREE;
594 }
595
596 return build_vector (type, elts);
597 }
598
599 case COMPLEX_EXPR:
600 if (negate_expr_p (t))
601 return fold_build2_loc (loc, COMPLEX_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
603 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
604 break;
605
606 case CONJ_EXPR:
607 if (negate_expr_p (t))
608 return fold_build1_loc (loc, CONJ_EXPR, type,
609 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
610 break;
611
612 case NEGATE_EXPR:
613 if (!TYPE_OVERFLOW_SANITIZED (type))
614 return TREE_OPERAND (t, 0);
615 break;
616
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
619 && !HONOR_SIGNED_ZEROS (element_mode (type)))
620 {
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
625 {
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
629 }
630
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
633 {
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
637 }
638 }
639 break;
640
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
644 && !HONOR_SIGNED_ZEROS (element_mode (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
649
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
653
654 /* Fall through. */
655
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
658 {
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
667 }
668 break;
669
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
679 {
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
684 {
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
691 }
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
706 }
707 break;
708
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
712 {
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
716 }
717 break;
718
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
723 {
724 tree fndecl, arg;
725
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
729 }
730 break;
731
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
735 {
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
738 {
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
745 }
746 }
747 break;
748
749 default:
750 break;
751 }
752
753 return NULL_TREE;
754 }
755
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
759
760 static tree
761 negate_expr (tree t)
762 {
763 tree type, tem;
764 location_t loc;
765
766 if (t == NULL_TREE)
767 return NULL_TREE;
768
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
772
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
777 }
778 \f
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
786
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
790
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
793
794 If IN is itself a literal or constant, return it as appropriate.
795
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
798
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
802 {
803 tree var = 0;
804
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
808
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
811
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
824 {
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
829
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
837
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
842
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
851
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
859 }
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
862 {
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
866 }
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
871
872 if (negate_p)
873 {
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
880 }
881
882 return var;
883 }
884
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
889
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
892 {
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
897
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
903 {
904 if (code == PLUS_EXPR)
905 {
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
918 }
919 else if (code == MINUS_EXPR)
920 {
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
923 }
924
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
927 }
928
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
931 }
932 \f
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
935
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
938 {
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
943
944 switch (code)
945 {
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
951
952 default:
953 break;
954 }
955
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
959 }
960
961
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
965
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
969 {
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
975
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
978
979 switch (code)
980 {
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
984
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
988
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
992
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
996 {
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1002 }
1003
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1012
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1016 {
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1022 }
1023
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1029
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1033
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1037
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1041
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1045
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1076
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1082
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1088
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1094
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1098
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1102
1103 default:
1104 return NULL_TREE;
1105 }
1106
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1111
1112 return t;
1113 }
1114
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1117 {
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1119 }
1120
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1125
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1128 {
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1132
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1135
1136 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1138
1139 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1140 {
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1148
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1151 {
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1159
1160 default:
1161 return NULL_TREE;
1162 }
1163
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1166
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1169
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1175
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1182
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1189
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1192
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1201
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1210
1211 t = build_real (type, result);
1212
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1215 }
1216
1217 if (TREE_CODE (arg1) == FIXED_CST && TREE_CODE (arg2) == FIXED_CST)
1218 {
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1225
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1228 {
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1234 break;
1235
1236 case LSHIFT_EXPR:
1237 case RSHIFT_EXPR:
1238 {
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1243 }
1244 break;
1245
1246 default:
1247 return NULL_TREE;
1248 }
1249
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1259 }
1260
1261 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1262 {
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1269
1270 switch (code)
1271 {
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1277
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1283
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1291
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1303 {
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1306
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1310 */
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1323
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1326 }
1327 else
1328 {
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1331
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1337
1338 if (integer_nonzerop (compare))
1339 {
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1353
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1357 }
1358 else
1359 {
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1370
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1374
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1378 }
1379 }
1380 break;
1381
1382 default:
1383 return NULL_TREE;
1384 }
1385
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1388 }
1389
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1392 {
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1396
1397 for (i = 0; i < count; i++)
1398 {
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1401
1402 elts[i] = const_binop (code, elem1, elem2);
1403
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1408 }
1409
1410 return build_vector (type, elts);
1411 }
1412
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1416 {
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1420
1421 for (i = 0; i < count; i++)
1422 {
1423 tree elem1 = VECTOR_CST_ELT (arg1, i);
1424
1425 elts[i] = const_binop (code, elem1, arg2);
1426
1427 /* It is possible that const_binop cannot handle the given
1428 code and return NULL_TREE. */
1429 if (elts[i] == NULL_TREE)
1430 return NULL_TREE;
1431 }
1432
1433 return build_vector (type, elts);
1434 }
1435 return NULL_TREE;
1436 }
1437
1438 /* Overload that adds a TYPE parameter to be able to dispatch
1439 to fold_relational_const. */
1440
1441 tree
1442 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1443 {
1444 if (TREE_CODE_CLASS (code) == tcc_comparison)
1445 return fold_relational_const (code, type, arg1, arg2);
1446 else
1447 return const_binop (code, arg1, arg2);
1448 }
1449
1450 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1451 Return zero if computing the constants is not possible. */
1452
1453 tree
1454 const_unop (enum tree_code code, tree type, tree arg0)
1455 {
1456 switch (code)
1457 {
1458 CASE_CONVERT:
1459 case FLOAT_EXPR:
1460 case FIX_TRUNC_EXPR:
1461 case FIXED_CONVERT_EXPR:
1462 return fold_convert_const (code, type, arg0);
1463
1464 case ADDR_SPACE_CONVERT_EXPR:
1465 if (integer_zerop (arg0))
1466 return fold_convert_const (code, type, arg0);
1467 break;
1468
1469 case VIEW_CONVERT_EXPR:
1470 return fold_view_convert_expr (type, arg0);
1471
1472 case NEGATE_EXPR:
1473 {
1474 /* Can't call fold_negate_const directly here as that doesn't
1475 handle all cases and we might not be able to negate some
1476 constants. */
1477 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1478 if (tem && CONSTANT_CLASS_P (tem))
1479 return tem;
1480 break;
1481 }
1482
1483 case ABS_EXPR:
1484 return fold_abs_const (arg0, type);
1485
1486 case CONJ_EXPR:
1487 if (TREE_CODE (arg0) == COMPLEX_CST)
1488 {
1489 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1490 TREE_TYPE (type));
1491 return build_complex (type, TREE_REALPART (arg0), ipart);
1492 }
1493 break;
1494
1495 case BIT_NOT_EXPR:
1496 if (TREE_CODE (arg0) == INTEGER_CST)
1497 return fold_not_const (arg0, type);
1498 /* Perform BIT_NOT_EXPR on each element individually. */
1499 else if (TREE_CODE (arg0) == VECTOR_CST)
1500 {
1501 tree *elements;
1502 tree elem;
1503 unsigned count = VECTOR_CST_NELTS (arg0), i;
1504
1505 elements = XALLOCAVEC (tree, count);
1506 for (i = 0; i < count; i++)
1507 {
1508 elem = VECTOR_CST_ELT (arg0, i);
1509 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1510 if (elem == NULL_TREE)
1511 break;
1512 elements[i] = elem;
1513 }
1514 if (i == count)
1515 return build_vector (type, elements);
1516 }
1517 break;
1518
1519 case TRUTH_NOT_EXPR:
1520 if (TREE_CODE (arg0) == INTEGER_CST)
1521 return constant_boolean_node (integer_zerop (arg0), type);
1522 break;
1523
1524 case REALPART_EXPR:
1525 if (TREE_CODE (arg0) == COMPLEX_CST)
1526 return fold_convert (type, TREE_REALPART (arg0));
1527 break;
1528
1529 case IMAGPART_EXPR:
1530 if (TREE_CODE (arg0) == COMPLEX_CST)
1531 return fold_convert (type, TREE_IMAGPART (arg0));
1532 break;
1533
1534 case VEC_UNPACK_LO_EXPR:
1535 case VEC_UNPACK_HI_EXPR:
1536 case VEC_UNPACK_FLOAT_LO_EXPR:
1537 case VEC_UNPACK_FLOAT_HI_EXPR:
1538 {
1539 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1540 tree *elts;
1541 enum tree_code subcode;
1542
1543 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1544 if (TREE_CODE (arg0) != VECTOR_CST)
1545 return NULL_TREE;
1546
1547 elts = XALLOCAVEC (tree, nelts * 2);
1548 if (!vec_cst_ctor_to_array (arg0, elts))
1549 return NULL_TREE;
1550
1551 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1552 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1553 elts += nelts;
1554
1555 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1556 subcode = NOP_EXPR;
1557 else
1558 subcode = FLOAT_EXPR;
1559
1560 for (i = 0; i < nelts; i++)
1561 {
1562 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1563 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1564 return NULL_TREE;
1565 }
1566
1567 return build_vector (type, elts);
1568 }
1569
1570 case REDUC_MIN_EXPR:
1571 case REDUC_MAX_EXPR:
1572 case REDUC_PLUS_EXPR:
1573 {
1574 unsigned int nelts, i;
1575 tree *elts;
1576 enum tree_code subcode;
1577
1578 if (TREE_CODE (arg0) != VECTOR_CST)
1579 return NULL_TREE;
1580 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1581
1582 elts = XALLOCAVEC (tree, nelts);
1583 if (!vec_cst_ctor_to_array (arg0, elts))
1584 return NULL_TREE;
1585
1586 switch (code)
1587 {
1588 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1589 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1590 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1591 default: gcc_unreachable ();
1592 }
1593
1594 for (i = 1; i < nelts; i++)
1595 {
1596 elts[0] = const_binop (subcode, elts[0], elts[i]);
1597 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1598 return NULL_TREE;
1599 }
1600
1601 return elts[0];
1602 }
1603
1604 default:
1605 break;
1606 }
1607
1608 return NULL_TREE;
1609 }
1610
1611 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1612 indicates which particular sizetype to create. */
1613
1614 tree
1615 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1616 {
1617 return build_int_cst (sizetype_tab[(int) kind], number);
1618 }
1619 \f
1620 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1621 is a tree code. The type of the result is taken from the operands.
1622 Both must be equivalent integer types, ala int_binop_types_match_p.
1623 If the operands are constant, so is the result. */
1624
1625 tree
1626 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1627 {
1628 tree type = TREE_TYPE (arg0);
1629
1630 if (arg0 == error_mark_node || arg1 == error_mark_node)
1631 return error_mark_node;
1632
1633 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1634 TREE_TYPE (arg1)));
1635
1636 /* Handle the special case of two integer constants faster. */
1637 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1638 {
1639 /* And some specific cases even faster than that. */
1640 if (code == PLUS_EXPR)
1641 {
1642 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1643 return arg1;
1644 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1645 return arg0;
1646 }
1647 else if (code == MINUS_EXPR)
1648 {
1649 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1650 return arg0;
1651 }
1652 else if (code == MULT_EXPR)
1653 {
1654 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1655 return arg1;
1656 }
1657
1658 /* Handle general case of two integer constants. For sizetype
1659 constant calculations we always want to know about overflow,
1660 even in the unsigned case. */
1661 return int_const_binop_1 (code, arg0, arg1, -1);
1662 }
1663
1664 return fold_build2_loc (loc, code, type, arg0, arg1);
1665 }
1666
1667 /* Given two values, either both of sizetype or both of bitsizetype,
1668 compute the difference between the two values. Return the value
1669 in signed type corresponding to the type of the operands. */
1670
1671 tree
1672 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1673 {
1674 tree type = TREE_TYPE (arg0);
1675 tree ctype;
1676
1677 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1678 TREE_TYPE (arg1)));
1679
1680 /* If the type is already signed, just do the simple thing. */
1681 if (!TYPE_UNSIGNED (type))
1682 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1683
1684 if (type == sizetype)
1685 ctype = ssizetype;
1686 else if (type == bitsizetype)
1687 ctype = sbitsizetype;
1688 else
1689 ctype = signed_type_for (type);
1690
1691 /* If either operand is not a constant, do the conversions to the signed
1692 type and subtract. The hardware will do the right thing with any
1693 overflow in the subtraction. */
1694 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1695 return size_binop_loc (loc, MINUS_EXPR,
1696 fold_convert_loc (loc, ctype, arg0),
1697 fold_convert_loc (loc, ctype, arg1));
1698
1699 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1700 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1701 overflow) and negate (which can't either). Special-case a result
1702 of zero while we're here. */
1703 if (tree_int_cst_equal (arg0, arg1))
1704 return build_int_cst (ctype, 0);
1705 else if (tree_int_cst_lt (arg1, arg0))
1706 return fold_convert_loc (loc, ctype,
1707 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1708 else
1709 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1710 fold_convert_loc (loc, ctype,
1711 size_binop_loc (loc,
1712 MINUS_EXPR,
1713 arg1, arg0)));
1714 }
1715 \f
1716 /* A subroutine of fold_convert_const handling conversions of an
1717 INTEGER_CST to another integer type. */
1718
1719 static tree
1720 fold_convert_const_int_from_int (tree type, const_tree arg1)
1721 {
1722 /* Given an integer constant, make new constant with new type,
1723 appropriately sign-extended or truncated. Use widest_int
1724 so that any extension is done according ARG1's type. */
1725 return force_fit_type (type, wi::to_widest (arg1),
1726 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1727 TREE_OVERFLOW (arg1));
1728 }
1729
1730 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1731 to an integer type. */
1732
1733 static tree
1734 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1735 {
1736 bool overflow = false;
1737 tree t;
1738
1739 /* The following code implements the floating point to integer
1740 conversion rules required by the Java Language Specification,
1741 that IEEE NaNs are mapped to zero and values that overflow
1742 the target precision saturate, i.e. values greater than
1743 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1744 are mapped to INT_MIN. These semantics are allowed by the
1745 C and C++ standards that simply state that the behavior of
1746 FP-to-integer conversion is unspecified upon overflow. */
1747
1748 wide_int val;
1749 REAL_VALUE_TYPE r;
1750 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1751
1752 switch (code)
1753 {
1754 case FIX_TRUNC_EXPR:
1755 real_trunc (&r, VOIDmode, &x);
1756 break;
1757
1758 default:
1759 gcc_unreachable ();
1760 }
1761
1762 /* If R is NaN, return zero and show we have an overflow. */
1763 if (REAL_VALUE_ISNAN (r))
1764 {
1765 overflow = true;
1766 val = wi::zero (TYPE_PRECISION (type));
1767 }
1768
1769 /* See if R is less than the lower bound or greater than the
1770 upper bound. */
1771
1772 if (! overflow)
1773 {
1774 tree lt = TYPE_MIN_VALUE (type);
1775 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1776 if (REAL_VALUES_LESS (r, l))
1777 {
1778 overflow = true;
1779 val = lt;
1780 }
1781 }
1782
1783 if (! overflow)
1784 {
1785 tree ut = TYPE_MAX_VALUE (type);
1786 if (ut)
1787 {
1788 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1789 if (REAL_VALUES_LESS (u, r))
1790 {
1791 overflow = true;
1792 val = ut;
1793 }
1794 }
1795 }
1796
1797 if (! overflow)
1798 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1799
1800 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1801 return t;
1802 }
1803
1804 /* A subroutine of fold_convert_const handling conversions of a
1805 FIXED_CST to an integer type. */
1806
1807 static tree
1808 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1809 {
1810 tree t;
1811 double_int temp, temp_trunc;
1812 unsigned int mode;
1813
1814 /* Right shift FIXED_CST to temp by fbit. */
1815 temp = TREE_FIXED_CST (arg1).data;
1816 mode = TREE_FIXED_CST (arg1).mode;
1817 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1818 {
1819 temp = temp.rshift (GET_MODE_FBIT (mode),
1820 HOST_BITS_PER_DOUBLE_INT,
1821 SIGNED_FIXED_POINT_MODE_P (mode));
1822
1823 /* Left shift temp to temp_trunc by fbit. */
1824 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1825 HOST_BITS_PER_DOUBLE_INT,
1826 SIGNED_FIXED_POINT_MODE_P (mode));
1827 }
1828 else
1829 {
1830 temp = double_int_zero;
1831 temp_trunc = double_int_zero;
1832 }
1833
1834 /* If FIXED_CST is negative, we need to round the value toward 0.
1835 By checking if the fractional bits are not zero to add 1 to temp. */
1836 if (SIGNED_FIXED_POINT_MODE_P (mode)
1837 && temp_trunc.is_negative ()
1838 && TREE_FIXED_CST (arg1).data != temp_trunc)
1839 temp += double_int_one;
1840
1841 /* Given a fixed-point constant, make new constant with new type,
1842 appropriately sign-extended or truncated. */
1843 t = force_fit_type (type, temp, -1,
1844 (temp.is_negative ()
1845 && (TYPE_UNSIGNED (type)
1846 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1847 | TREE_OVERFLOW (arg1));
1848
1849 return t;
1850 }
1851
1852 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1853 to another floating point type. */
1854
1855 static tree
1856 fold_convert_const_real_from_real (tree type, const_tree arg1)
1857 {
1858 REAL_VALUE_TYPE value;
1859 tree t;
1860
1861 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1862 t = build_real (type, value);
1863
1864 /* If converting an infinity or NAN to a representation that doesn't
1865 have one, set the overflow bit so that we can produce some kind of
1866 error message at the appropriate point if necessary. It's not the
1867 most user-friendly message, but it's better than nothing. */
1868 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1869 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1870 TREE_OVERFLOW (t) = 1;
1871 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1872 && !MODE_HAS_NANS (TYPE_MODE (type)))
1873 TREE_OVERFLOW (t) = 1;
1874 /* Regular overflow, conversion produced an infinity in a mode that
1875 can't represent them. */
1876 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1877 && REAL_VALUE_ISINF (value)
1878 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1879 TREE_OVERFLOW (t) = 1;
1880 else
1881 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1882 return t;
1883 }
1884
1885 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1886 to a floating point type. */
1887
1888 static tree
1889 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1890 {
1891 REAL_VALUE_TYPE value;
1892 tree t;
1893
1894 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1895 t = build_real (type, value);
1896
1897 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1898 return t;
1899 }
1900
1901 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1902 to another fixed-point type. */
1903
1904 static tree
1905 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1906 {
1907 FIXED_VALUE_TYPE value;
1908 tree t;
1909 bool overflow_p;
1910
1911 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1912 TYPE_SATURATING (type));
1913 t = build_fixed (type, value);
1914
1915 /* Propagate overflow flags. */
1916 if (overflow_p | TREE_OVERFLOW (arg1))
1917 TREE_OVERFLOW (t) = 1;
1918 return t;
1919 }
1920
1921 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1922 to a fixed-point type. */
1923
1924 static tree
1925 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1926 {
1927 FIXED_VALUE_TYPE value;
1928 tree t;
1929 bool overflow_p;
1930 double_int di;
1931
1932 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1933
1934 di.low = TREE_INT_CST_ELT (arg1, 0);
1935 if (TREE_INT_CST_NUNITS (arg1) == 1)
1936 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1937 else
1938 di.high = TREE_INT_CST_ELT (arg1, 1);
1939
1940 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1941 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1942 TYPE_SATURATING (type));
1943 t = build_fixed (type, value);
1944
1945 /* Propagate overflow flags. */
1946 if (overflow_p | TREE_OVERFLOW (arg1))
1947 TREE_OVERFLOW (t) = 1;
1948 return t;
1949 }
1950
1951 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1952 to a fixed-point type. */
1953
1954 static tree
1955 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1956 {
1957 FIXED_VALUE_TYPE value;
1958 tree t;
1959 bool overflow_p;
1960
1961 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1962 &TREE_REAL_CST (arg1),
1963 TYPE_SATURATING (type));
1964 t = build_fixed (type, value);
1965
1966 /* Propagate overflow flags. */
1967 if (overflow_p | TREE_OVERFLOW (arg1))
1968 TREE_OVERFLOW (t) = 1;
1969 return t;
1970 }
1971
1972 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1973 type TYPE. If no simplification can be done return NULL_TREE. */
1974
1975 static tree
1976 fold_convert_const (enum tree_code code, tree type, tree arg1)
1977 {
1978 if (TREE_TYPE (arg1) == type)
1979 return arg1;
1980
1981 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1982 || TREE_CODE (type) == OFFSET_TYPE)
1983 {
1984 if (TREE_CODE (arg1) == INTEGER_CST)
1985 return fold_convert_const_int_from_int (type, arg1);
1986 else if (TREE_CODE (arg1) == REAL_CST)
1987 return fold_convert_const_int_from_real (code, type, arg1);
1988 else if (TREE_CODE (arg1) == FIXED_CST)
1989 return fold_convert_const_int_from_fixed (type, arg1);
1990 }
1991 else if (TREE_CODE (type) == REAL_TYPE)
1992 {
1993 if (TREE_CODE (arg1) == INTEGER_CST)
1994 return build_real_from_int_cst (type, arg1);
1995 else if (TREE_CODE (arg1) == REAL_CST)
1996 return fold_convert_const_real_from_real (type, arg1);
1997 else if (TREE_CODE (arg1) == FIXED_CST)
1998 return fold_convert_const_real_from_fixed (type, arg1);
1999 }
2000 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2001 {
2002 if (TREE_CODE (arg1) == FIXED_CST)
2003 return fold_convert_const_fixed_from_fixed (type, arg1);
2004 else if (TREE_CODE (arg1) == INTEGER_CST)
2005 return fold_convert_const_fixed_from_int (type, arg1);
2006 else if (TREE_CODE (arg1) == REAL_CST)
2007 return fold_convert_const_fixed_from_real (type, arg1);
2008 }
2009 return NULL_TREE;
2010 }
2011
2012 /* Construct a vector of zero elements of vector type TYPE. */
2013
2014 static tree
2015 build_zero_vector (tree type)
2016 {
2017 tree t;
2018
2019 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2020 return build_vector_from_val (type, t);
2021 }
2022
2023 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2024
2025 bool
2026 fold_convertible_p (const_tree type, const_tree arg)
2027 {
2028 tree orig = TREE_TYPE (arg);
2029
2030 if (type == orig)
2031 return true;
2032
2033 if (TREE_CODE (arg) == ERROR_MARK
2034 || TREE_CODE (type) == ERROR_MARK
2035 || TREE_CODE (orig) == ERROR_MARK)
2036 return false;
2037
2038 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2039 return true;
2040
2041 switch (TREE_CODE (type))
2042 {
2043 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2044 case POINTER_TYPE: case REFERENCE_TYPE:
2045 case OFFSET_TYPE:
2046 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2047 || TREE_CODE (orig) == OFFSET_TYPE)
2048 return true;
2049 return (TREE_CODE (orig) == VECTOR_TYPE
2050 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2051
2052 case REAL_TYPE:
2053 case FIXED_POINT_TYPE:
2054 case COMPLEX_TYPE:
2055 case VECTOR_TYPE:
2056 case VOID_TYPE:
2057 return TREE_CODE (type) == TREE_CODE (orig);
2058
2059 default:
2060 return false;
2061 }
2062 }
2063
2064 /* Convert expression ARG to type TYPE. Used by the middle-end for
2065 simple conversions in preference to calling the front-end's convert. */
2066
2067 tree
2068 fold_convert_loc (location_t loc, tree type, tree arg)
2069 {
2070 tree orig = TREE_TYPE (arg);
2071 tree tem;
2072
2073 if (type == orig)
2074 return arg;
2075
2076 if (TREE_CODE (arg) == ERROR_MARK
2077 || TREE_CODE (type) == ERROR_MARK
2078 || TREE_CODE (orig) == ERROR_MARK)
2079 return error_mark_node;
2080
2081 switch (TREE_CODE (type))
2082 {
2083 case POINTER_TYPE:
2084 case REFERENCE_TYPE:
2085 /* Handle conversions between pointers to different address spaces. */
2086 if (POINTER_TYPE_P (orig)
2087 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2088 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2089 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2090 /* fall through */
2091
2092 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2093 case OFFSET_TYPE:
2094 if (TREE_CODE (arg) == INTEGER_CST)
2095 {
2096 tem = fold_convert_const (NOP_EXPR, type, arg);
2097 if (tem != NULL_TREE)
2098 return tem;
2099 }
2100 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2101 || TREE_CODE (orig) == OFFSET_TYPE)
2102 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2103 if (TREE_CODE (orig) == COMPLEX_TYPE)
2104 return fold_convert_loc (loc, type,
2105 fold_build1_loc (loc, REALPART_EXPR,
2106 TREE_TYPE (orig), arg));
2107 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2108 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2109 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2110
2111 case REAL_TYPE:
2112 if (TREE_CODE (arg) == INTEGER_CST)
2113 {
2114 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2115 if (tem != NULL_TREE)
2116 return tem;
2117 }
2118 else if (TREE_CODE (arg) == REAL_CST)
2119 {
2120 tem = fold_convert_const (NOP_EXPR, type, arg);
2121 if (tem != NULL_TREE)
2122 return tem;
2123 }
2124 else if (TREE_CODE (arg) == FIXED_CST)
2125 {
2126 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2127 if (tem != NULL_TREE)
2128 return tem;
2129 }
2130
2131 switch (TREE_CODE (orig))
2132 {
2133 case INTEGER_TYPE:
2134 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2135 case POINTER_TYPE: case REFERENCE_TYPE:
2136 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2137
2138 case REAL_TYPE:
2139 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2140
2141 case FIXED_POINT_TYPE:
2142 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2143
2144 case COMPLEX_TYPE:
2145 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2146 return fold_convert_loc (loc, type, tem);
2147
2148 default:
2149 gcc_unreachable ();
2150 }
2151
2152 case FIXED_POINT_TYPE:
2153 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2154 || TREE_CODE (arg) == REAL_CST)
2155 {
2156 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2157 if (tem != NULL_TREE)
2158 goto fold_convert_exit;
2159 }
2160
2161 switch (TREE_CODE (orig))
2162 {
2163 case FIXED_POINT_TYPE:
2164 case INTEGER_TYPE:
2165 case ENUMERAL_TYPE:
2166 case BOOLEAN_TYPE:
2167 case REAL_TYPE:
2168 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2169
2170 case COMPLEX_TYPE:
2171 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2172 return fold_convert_loc (loc, type, tem);
2173
2174 default:
2175 gcc_unreachable ();
2176 }
2177
2178 case COMPLEX_TYPE:
2179 switch (TREE_CODE (orig))
2180 {
2181 case INTEGER_TYPE:
2182 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2183 case POINTER_TYPE: case REFERENCE_TYPE:
2184 case REAL_TYPE:
2185 case FIXED_POINT_TYPE:
2186 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2187 fold_convert_loc (loc, TREE_TYPE (type), arg),
2188 fold_convert_loc (loc, TREE_TYPE (type),
2189 integer_zero_node));
2190 case COMPLEX_TYPE:
2191 {
2192 tree rpart, ipart;
2193
2194 if (TREE_CODE (arg) == COMPLEX_EXPR)
2195 {
2196 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2197 TREE_OPERAND (arg, 0));
2198 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2199 TREE_OPERAND (arg, 1));
2200 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2201 }
2202
2203 arg = save_expr (arg);
2204 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2205 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2206 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2207 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2208 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2209 }
2210
2211 default:
2212 gcc_unreachable ();
2213 }
2214
2215 case VECTOR_TYPE:
2216 if (integer_zerop (arg))
2217 return build_zero_vector (type);
2218 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2219 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2220 || TREE_CODE (orig) == VECTOR_TYPE);
2221 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2222
2223 case VOID_TYPE:
2224 tem = fold_ignored_result (arg);
2225 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2226
2227 default:
2228 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2229 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2230 gcc_unreachable ();
2231 }
2232 fold_convert_exit:
2233 protected_set_expr_location_unshare (tem, loc);
2234 return tem;
2235 }
2236 \f
2237 /* Return false if expr can be assumed not to be an lvalue, true
2238 otherwise. */
2239
2240 static bool
2241 maybe_lvalue_p (const_tree x)
2242 {
2243 /* We only need to wrap lvalue tree codes. */
2244 switch (TREE_CODE (x))
2245 {
2246 case VAR_DECL:
2247 case PARM_DECL:
2248 case RESULT_DECL:
2249 case LABEL_DECL:
2250 case FUNCTION_DECL:
2251 case SSA_NAME:
2252
2253 case COMPONENT_REF:
2254 case MEM_REF:
2255 case INDIRECT_REF:
2256 case ARRAY_REF:
2257 case ARRAY_RANGE_REF:
2258 case BIT_FIELD_REF:
2259 case OBJ_TYPE_REF:
2260
2261 case REALPART_EXPR:
2262 case IMAGPART_EXPR:
2263 case PREINCREMENT_EXPR:
2264 case PREDECREMENT_EXPR:
2265 case SAVE_EXPR:
2266 case TRY_CATCH_EXPR:
2267 case WITH_CLEANUP_EXPR:
2268 case COMPOUND_EXPR:
2269 case MODIFY_EXPR:
2270 case TARGET_EXPR:
2271 case COND_EXPR:
2272 case BIND_EXPR:
2273 break;
2274
2275 default:
2276 /* Assume the worst for front-end tree codes. */
2277 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2278 break;
2279 return false;
2280 }
2281
2282 return true;
2283 }
2284
2285 /* Return an expr equal to X but certainly not valid as an lvalue. */
2286
2287 tree
2288 non_lvalue_loc (location_t loc, tree x)
2289 {
2290 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2291 us. */
2292 if (in_gimple_form)
2293 return x;
2294
2295 if (! maybe_lvalue_p (x))
2296 return x;
2297 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2298 }
2299
2300 /* When pedantic, return an expr equal to X but certainly not valid as a
2301 pedantic lvalue. Otherwise, return X. */
2302
2303 static tree
2304 pedantic_non_lvalue_loc (location_t loc, tree x)
2305 {
2306 return protected_set_expr_location_unshare (x, loc);
2307 }
2308 \f
2309 /* Given a tree comparison code, return the code that is the logical inverse.
2310 It is generally not safe to do this for floating-point comparisons, except
2311 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2312 ERROR_MARK in this case. */
2313
2314 enum tree_code
2315 invert_tree_comparison (enum tree_code code, bool honor_nans)
2316 {
2317 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2318 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2319 return ERROR_MARK;
2320
2321 switch (code)
2322 {
2323 case EQ_EXPR:
2324 return NE_EXPR;
2325 case NE_EXPR:
2326 return EQ_EXPR;
2327 case GT_EXPR:
2328 return honor_nans ? UNLE_EXPR : LE_EXPR;
2329 case GE_EXPR:
2330 return honor_nans ? UNLT_EXPR : LT_EXPR;
2331 case LT_EXPR:
2332 return honor_nans ? UNGE_EXPR : GE_EXPR;
2333 case LE_EXPR:
2334 return honor_nans ? UNGT_EXPR : GT_EXPR;
2335 case LTGT_EXPR:
2336 return UNEQ_EXPR;
2337 case UNEQ_EXPR:
2338 return LTGT_EXPR;
2339 case UNGT_EXPR:
2340 return LE_EXPR;
2341 case UNGE_EXPR:
2342 return LT_EXPR;
2343 case UNLT_EXPR:
2344 return GE_EXPR;
2345 case UNLE_EXPR:
2346 return GT_EXPR;
2347 case ORDERED_EXPR:
2348 return UNORDERED_EXPR;
2349 case UNORDERED_EXPR:
2350 return ORDERED_EXPR;
2351 default:
2352 gcc_unreachable ();
2353 }
2354 }
2355
2356 /* Similar, but return the comparison that results if the operands are
2357 swapped. This is safe for floating-point. */
2358
2359 enum tree_code
2360 swap_tree_comparison (enum tree_code code)
2361 {
2362 switch (code)
2363 {
2364 case EQ_EXPR:
2365 case NE_EXPR:
2366 case ORDERED_EXPR:
2367 case UNORDERED_EXPR:
2368 case LTGT_EXPR:
2369 case UNEQ_EXPR:
2370 return code;
2371 case GT_EXPR:
2372 return LT_EXPR;
2373 case GE_EXPR:
2374 return LE_EXPR;
2375 case LT_EXPR:
2376 return GT_EXPR;
2377 case LE_EXPR:
2378 return GE_EXPR;
2379 case UNGT_EXPR:
2380 return UNLT_EXPR;
2381 case UNGE_EXPR:
2382 return UNLE_EXPR;
2383 case UNLT_EXPR:
2384 return UNGT_EXPR;
2385 case UNLE_EXPR:
2386 return UNGE_EXPR;
2387 default:
2388 gcc_unreachable ();
2389 }
2390 }
2391
2392
2393 /* Convert a comparison tree code from an enum tree_code representation
2394 into a compcode bit-based encoding. This function is the inverse of
2395 compcode_to_comparison. */
2396
2397 static enum comparison_code
2398 comparison_to_compcode (enum tree_code code)
2399 {
2400 switch (code)
2401 {
2402 case LT_EXPR:
2403 return COMPCODE_LT;
2404 case EQ_EXPR:
2405 return COMPCODE_EQ;
2406 case LE_EXPR:
2407 return COMPCODE_LE;
2408 case GT_EXPR:
2409 return COMPCODE_GT;
2410 case NE_EXPR:
2411 return COMPCODE_NE;
2412 case GE_EXPR:
2413 return COMPCODE_GE;
2414 case ORDERED_EXPR:
2415 return COMPCODE_ORD;
2416 case UNORDERED_EXPR:
2417 return COMPCODE_UNORD;
2418 case UNLT_EXPR:
2419 return COMPCODE_UNLT;
2420 case UNEQ_EXPR:
2421 return COMPCODE_UNEQ;
2422 case UNLE_EXPR:
2423 return COMPCODE_UNLE;
2424 case UNGT_EXPR:
2425 return COMPCODE_UNGT;
2426 case LTGT_EXPR:
2427 return COMPCODE_LTGT;
2428 case UNGE_EXPR:
2429 return COMPCODE_UNGE;
2430 default:
2431 gcc_unreachable ();
2432 }
2433 }
2434
2435 /* Convert a compcode bit-based encoding of a comparison operator back
2436 to GCC's enum tree_code representation. This function is the
2437 inverse of comparison_to_compcode. */
2438
2439 static enum tree_code
2440 compcode_to_comparison (enum comparison_code code)
2441 {
2442 switch (code)
2443 {
2444 case COMPCODE_LT:
2445 return LT_EXPR;
2446 case COMPCODE_EQ:
2447 return EQ_EXPR;
2448 case COMPCODE_LE:
2449 return LE_EXPR;
2450 case COMPCODE_GT:
2451 return GT_EXPR;
2452 case COMPCODE_NE:
2453 return NE_EXPR;
2454 case COMPCODE_GE:
2455 return GE_EXPR;
2456 case COMPCODE_ORD:
2457 return ORDERED_EXPR;
2458 case COMPCODE_UNORD:
2459 return UNORDERED_EXPR;
2460 case COMPCODE_UNLT:
2461 return UNLT_EXPR;
2462 case COMPCODE_UNEQ:
2463 return UNEQ_EXPR;
2464 case COMPCODE_UNLE:
2465 return UNLE_EXPR;
2466 case COMPCODE_UNGT:
2467 return UNGT_EXPR;
2468 case COMPCODE_LTGT:
2469 return LTGT_EXPR;
2470 case COMPCODE_UNGE:
2471 return UNGE_EXPR;
2472 default:
2473 gcc_unreachable ();
2474 }
2475 }
2476
2477 /* Return a tree for the comparison which is the combination of
2478 doing the AND or OR (depending on CODE) of the two operations LCODE
2479 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2480 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2481 if this makes the transformation invalid. */
2482
2483 tree
2484 combine_comparisons (location_t loc,
2485 enum tree_code code, enum tree_code lcode,
2486 enum tree_code rcode, tree truth_type,
2487 tree ll_arg, tree lr_arg)
2488 {
2489 bool honor_nans = HONOR_NANS (element_mode (ll_arg));
2490 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2491 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2492 int compcode;
2493
2494 switch (code)
2495 {
2496 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2497 compcode = lcompcode & rcompcode;
2498 break;
2499
2500 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2501 compcode = lcompcode | rcompcode;
2502 break;
2503
2504 default:
2505 return NULL_TREE;
2506 }
2507
2508 if (!honor_nans)
2509 {
2510 /* Eliminate unordered comparisons, as well as LTGT and ORD
2511 which are not used unless the mode has NaNs. */
2512 compcode &= ~COMPCODE_UNORD;
2513 if (compcode == COMPCODE_LTGT)
2514 compcode = COMPCODE_NE;
2515 else if (compcode == COMPCODE_ORD)
2516 compcode = COMPCODE_TRUE;
2517 }
2518 else if (flag_trapping_math)
2519 {
2520 /* Check that the original operation and the optimized ones will trap
2521 under the same condition. */
2522 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2523 && (lcompcode != COMPCODE_EQ)
2524 && (lcompcode != COMPCODE_ORD);
2525 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2526 && (rcompcode != COMPCODE_EQ)
2527 && (rcompcode != COMPCODE_ORD);
2528 bool trap = (compcode & COMPCODE_UNORD) == 0
2529 && (compcode != COMPCODE_EQ)
2530 && (compcode != COMPCODE_ORD);
2531
2532 /* In a short-circuited boolean expression the LHS might be
2533 such that the RHS, if evaluated, will never trap. For
2534 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2535 if neither x nor y is NaN. (This is a mixed blessing: for
2536 example, the expression above will never trap, hence
2537 optimizing it to x < y would be invalid). */
2538 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2539 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2540 rtrap = false;
2541
2542 /* If the comparison was short-circuited, and only the RHS
2543 trapped, we may now generate a spurious trap. */
2544 if (rtrap && !ltrap
2545 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2546 return NULL_TREE;
2547
2548 /* If we changed the conditions that cause a trap, we lose. */
2549 if ((ltrap || rtrap) != trap)
2550 return NULL_TREE;
2551 }
2552
2553 if (compcode == COMPCODE_TRUE)
2554 return constant_boolean_node (true, truth_type);
2555 else if (compcode == COMPCODE_FALSE)
2556 return constant_boolean_node (false, truth_type);
2557 else
2558 {
2559 enum tree_code tcode;
2560
2561 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2562 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2563 }
2564 }
2565 \f
2566 /* Return nonzero if two operands (typically of the same tree node)
2567 are necessarily equal. If either argument has side-effects this
2568 function returns zero. FLAGS modifies behavior as follows:
2569
2570 If OEP_ONLY_CONST is set, only return nonzero for constants.
2571 This function tests whether the operands are indistinguishable;
2572 it does not test whether they are equal using C's == operation.
2573 The distinction is important for IEEE floating point, because
2574 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2575 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2576
2577 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2578 even though it may hold multiple values during a function.
2579 This is because a GCC tree node guarantees that nothing else is
2580 executed between the evaluation of its "operands" (which may often
2581 be evaluated in arbitrary order). Hence if the operands themselves
2582 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2583 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2584 unset means assuming isochronic (or instantaneous) tree equivalence.
2585 Unless comparing arbitrary expression trees, such as from different
2586 statements, this flag can usually be left unset.
2587
2588 If OEP_PURE_SAME is set, then pure functions with identical arguments
2589 are considered the same. It is used when the caller has other ways
2590 to ensure that global memory is unchanged in between. */
2591
2592 int
2593 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2594 {
2595 /* If either is ERROR_MARK, they aren't equal. */
2596 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2597 || TREE_TYPE (arg0) == error_mark_node
2598 || TREE_TYPE (arg1) == error_mark_node)
2599 return 0;
2600
2601 /* Similar, if either does not have a type (like a released SSA name),
2602 they aren't equal. */
2603 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2604 return 0;
2605
2606 /* Check equality of integer constants before bailing out due to
2607 precision differences. */
2608 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2609 return tree_int_cst_equal (arg0, arg1);
2610
2611 /* If both types don't have the same signedness, then we can't consider
2612 them equal. We must check this before the STRIP_NOPS calls
2613 because they may change the signedness of the arguments. As pointers
2614 strictly don't have a signedness, require either two pointers or
2615 two non-pointers as well. */
2616 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2617 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2618 return 0;
2619
2620 /* We cannot consider pointers to different address space equal. */
2621 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2622 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2623 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2624 return 0;
2625
2626 /* If both types don't have the same precision, then it is not safe
2627 to strip NOPs. */
2628 if (element_precision (TREE_TYPE (arg0))
2629 != element_precision (TREE_TYPE (arg1)))
2630 return 0;
2631
2632 STRIP_NOPS (arg0);
2633 STRIP_NOPS (arg1);
2634
2635 /* In case both args are comparisons but with different comparison
2636 code, try to swap the comparison operands of one arg to produce
2637 a match and compare that variant. */
2638 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2639 && COMPARISON_CLASS_P (arg0)
2640 && COMPARISON_CLASS_P (arg1))
2641 {
2642 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2643
2644 if (TREE_CODE (arg0) == swap_code)
2645 return operand_equal_p (TREE_OPERAND (arg0, 0),
2646 TREE_OPERAND (arg1, 1), flags)
2647 && operand_equal_p (TREE_OPERAND (arg0, 1),
2648 TREE_OPERAND (arg1, 0), flags);
2649 }
2650
2651 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2652 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2653 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2654 return 0;
2655
2656 /* This is needed for conversions and for COMPONENT_REF.
2657 Might as well play it safe and always test this. */
2658 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2659 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2660 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2661 return 0;
2662
2663 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2664 We don't care about side effects in that case because the SAVE_EXPR
2665 takes care of that for us. In all other cases, two expressions are
2666 equal if they have no side effects. If we have two identical
2667 expressions with side effects that should be treated the same due
2668 to the only side effects being identical SAVE_EXPR's, that will
2669 be detected in the recursive calls below.
2670 If we are taking an invariant address of two identical objects
2671 they are necessarily equal as well. */
2672 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2673 && (TREE_CODE (arg0) == SAVE_EXPR
2674 || (flags & OEP_CONSTANT_ADDRESS_OF)
2675 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2676 return 1;
2677
2678 /* Next handle constant cases, those for which we can return 1 even
2679 if ONLY_CONST is set. */
2680 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2681 switch (TREE_CODE (arg0))
2682 {
2683 case INTEGER_CST:
2684 return tree_int_cst_equal (arg0, arg1);
2685
2686 case FIXED_CST:
2687 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2688 TREE_FIXED_CST (arg1));
2689
2690 case REAL_CST:
2691 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2692 TREE_REAL_CST (arg1)))
2693 return 1;
2694
2695
2696 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2697 {
2698 /* If we do not distinguish between signed and unsigned zero,
2699 consider them equal. */
2700 if (real_zerop (arg0) && real_zerop (arg1))
2701 return 1;
2702 }
2703 return 0;
2704
2705 case VECTOR_CST:
2706 {
2707 unsigned i;
2708
2709 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2710 return 0;
2711
2712 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2713 {
2714 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2715 VECTOR_CST_ELT (arg1, i), flags))
2716 return 0;
2717 }
2718 return 1;
2719 }
2720
2721 case COMPLEX_CST:
2722 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2723 flags)
2724 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2725 flags));
2726
2727 case STRING_CST:
2728 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2729 && ! memcmp (TREE_STRING_POINTER (arg0),
2730 TREE_STRING_POINTER (arg1),
2731 TREE_STRING_LENGTH (arg0)));
2732
2733 case ADDR_EXPR:
2734 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2735 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2736 ? OEP_CONSTANT_ADDRESS_OF : 0);
2737 default:
2738 break;
2739 }
2740
2741 if (flags & OEP_ONLY_CONST)
2742 return 0;
2743
2744 /* Define macros to test an operand from arg0 and arg1 for equality and a
2745 variant that allows null and views null as being different from any
2746 non-null value. In the latter case, if either is null, the both
2747 must be; otherwise, do the normal comparison. */
2748 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2749 TREE_OPERAND (arg1, N), flags)
2750
2751 #define OP_SAME_WITH_NULL(N) \
2752 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2753 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2754
2755 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2756 {
2757 case tcc_unary:
2758 /* Two conversions are equal only if signedness and modes match. */
2759 switch (TREE_CODE (arg0))
2760 {
2761 CASE_CONVERT:
2762 case FIX_TRUNC_EXPR:
2763 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2764 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2765 return 0;
2766 break;
2767 default:
2768 break;
2769 }
2770
2771 return OP_SAME (0);
2772
2773
2774 case tcc_comparison:
2775 case tcc_binary:
2776 if (OP_SAME (0) && OP_SAME (1))
2777 return 1;
2778
2779 /* For commutative ops, allow the other order. */
2780 return (commutative_tree_code (TREE_CODE (arg0))
2781 && operand_equal_p (TREE_OPERAND (arg0, 0),
2782 TREE_OPERAND (arg1, 1), flags)
2783 && operand_equal_p (TREE_OPERAND (arg0, 1),
2784 TREE_OPERAND (arg1, 0), flags));
2785
2786 case tcc_reference:
2787 /* If either of the pointer (or reference) expressions we are
2788 dereferencing contain a side effect, these cannot be equal,
2789 but their addresses can be. */
2790 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2791 && (TREE_SIDE_EFFECTS (arg0)
2792 || TREE_SIDE_EFFECTS (arg1)))
2793 return 0;
2794
2795 switch (TREE_CODE (arg0))
2796 {
2797 case INDIRECT_REF:
2798 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2799 return OP_SAME (0);
2800
2801 case REALPART_EXPR:
2802 case IMAGPART_EXPR:
2803 return OP_SAME (0);
2804
2805 case TARGET_MEM_REF:
2806 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2807 /* Require equal extra operands and then fall through to MEM_REF
2808 handling of the two common operands. */
2809 if (!OP_SAME_WITH_NULL (2)
2810 || !OP_SAME_WITH_NULL (3)
2811 || !OP_SAME_WITH_NULL (4))
2812 return 0;
2813 /* Fallthru. */
2814 case MEM_REF:
2815 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2816 /* Require equal access sizes, and similar pointer types.
2817 We can have incomplete types for array references of
2818 variable-sized arrays from the Fortran frontend
2819 though. Also verify the types are compatible. */
2820 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2821 || (TYPE_SIZE (TREE_TYPE (arg0))
2822 && TYPE_SIZE (TREE_TYPE (arg1))
2823 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2824 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2825 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2826 && alias_ptr_types_compatible_p
2827 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2828 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2829 && OP_SAME (0) && OP_SAME (1));
2830
2831 case ARRAY_REF:
2832 case ARRAY_RANGE_REF:
2833 /* Operands 2 and 3 may be null.
2834 Compare the array index by value if it is constant first as we
2835 may have different types but same value here. */
2836 if (!OP_SAME (0))
2837 return 0;
2838 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2839 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2840 TREE_OPERAND (arg1, 1))
2841 || OP_SAME (1))
2842 && OP_SAME_WITH_NULL (2)
2843 && OP_SAME_WITH_NULL (3));
2844
2845 case COMPONENT_REF:
2846 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2847 may be NULL when we're called to compare MEM_EXPRs. */
2848 if (!OP_SAME_WITH_NULL (0)
2849 || !OP_SAME (1))
2850 return 0;
2851 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2852 return OP_SAME_WITH_NULL (2);
2853
2854 case BIT_FIELD_REF:
2855 if (!OP_SAME (0))
2856 return 0;
2857 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2858 return OP_SAME (1) && OP_SAME (2);
2859
2860 default:
2861 return 0;
2862 }
2863
2864 case tcc_expression:
2865 switch (TREE_CODE (arg0))
2866 {
2867 case ADDR_EXPR:
2868 case TRUTH_NOT_EXPR:
2869 return OP_SAME (0);
2870
2871 case TRUTH_ANDIF_EXPR:
2872 case TRUTH_ORIF_EXPR:
2873 return OP_SAME (0) && OP_SAME (1);
2874
2875 case FMA_EXPR:
2876 case WIDEN_MULT_PLUS_EXPR:
2877 case WIDEN_MULT_MINUS_EXPR:
2878 if (!OP_SAME (2))
2879 return 0;
2880 /* The multiplcation operands are commutative. */
2881 /* FALLTHRU */
2882
2883 case TRUTH_AND_EXPR:
2884 case TRUTH_OR_EXPR:
2885 case TRUTH_XOR_EXPR:
2886 if (OP_SAME (0) && OP_SAME (1))
2887 return 1;
2888
2889 /* Otherwise take into account this is a commutative operation. */
2890 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2891 TREE_OPERAND (arg1, 1), flags)
2892 && operand_equal_p (TREE_OPERAND (arg0, 1),
2893 TREE_OPERAND (arg1, 0), flags));
2894
2895 case COND_EXPR:
2896 case VEC_COND_EXPR:
2897 case DOT_PROD_EXPR:
2898 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2899
2900 default:
2901 return 0;
2902 }
2903
2904 case tcc_vl_exp:
2905 switch (TREE_CODE (arg0))
2906 {
2907 case CALL_EXPR:
2908 /* If the CALL_EXPRs call different functions, then they
2909 clearly can not be equal. */
2910 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2911 flags))
2912 return 0;
2913
2914 {
2915 unsigned int cef = call_expr_flags (arg0);
2916 if (flags & OEP_PURE_SAME)
2917 cef &= ECF_CONST | ECF_PURE;
2918 else
2919 cef &= ECF_CONST;
2920 if (!cef)
2921 return 0;
2922 }
2923
2924 /* Now see if all the arguments are the same. */
2925 {
2926 const_call_expr_arg_iterator iter0, iter1;
2927 const_tree a0, a1;
2928 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2929 a1 = first_const_call_expr_arg (arg1, &iter1);
2930 a0 && a1;
2931 a0 = next_const_call_expr_arg (&iter0),
2932 a1 = next_const_call_expr_arg (&iter1))
2933 if (! operand_equal_p (a0, a1, flags))
2934 return 0;
2935
2936 /* If we get here and both argument lists are exhausted
2937 then the CALL_EXPRs are equal. */
2938 return ! (a0 || a1);
2939 }
2940 default:
2941 return 0;
2942 }
2943
2944 case tcc_declaration:
2945 /* Consider __builtin_sqrt equal to sqrt. */
2946 return (TREE_CODE (arg0) == FUNCTION_DECL
2947 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2948 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2949 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2950
2951 default:
2952 return 0;
2953 }
2954
2955 #undef OP_SAME
2956 #undef OP_SAME_WITH_NULL
2957 }
2958 \f
2959 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2960 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2961
2962 When in doubt, return 0. */
2963
2964 static int
2965 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2966 {
2967 int unsignedp1, unsignedpo;
2968 tree primarg0, primarg1, primother;
2969 unsigned int correct_width;
2970
2971 if (operand_equal_p (arg0, arg1, 0))
2972 return 1;
2973
2974 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2975 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2976 return 0;
2977
2978 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2979 and see if the inner values are the same. This removes any
2980 signedness comparison, which doesn't matter here. */
2981 primarg0 = arg0, primarg1 = arg1;
2982 STRIP_NOPS (primarg0);
2983 STRIP_NOPS (primarg1);
2984 if (operand_equal_p (primarg0, primarg1, 0))
2985 return 1;
2986
2987 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2988 actual comparison operand, ARG0.
2989
2990 First throw away any conversions to wider types
2991 already present in the operands. */
2992
2993 primarg1 = get_narrower (arg1, &unsignedp1);
2994 primother = get_narrower (other, &unsignedpo);
2995
2996 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2997 if (unsignedp1 == unsignedpo
2998 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2999 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3000 {
3001 tree type = TREE_TYPE (arg0);
3002
3003 /* Make sure shorter operand is extended the right way
3004 to match the longer operand. */
3005 primarg1 = fold_convert (signed_or_unsigned_type_for
3006 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3007
3008 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3009 return 1;
3010 }
3011
3012 return 0;
3013 }
3014 \f
3015 /* See if ARG is an expression that is either a comparison or is performing
3016 arithmetic on comparisons. The comparisons must only be comparing
3017 two different values, which will be stored in *CVAL1 and *CVAL2; if
3018 they are nonzero it means that some operands have already been found.
3019 No variables may be used anywhere else in the expression except in the
3020 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3021 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3022
3023 If this is true, return 1. Otherwise, return zero. */
3024
3025 static int
3026 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3027 {
3028 enum tree_code code = TREE_CODE (arg);
3029 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3030
3031 /* We can handle some of the tcc_expression cases here. */
3032 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3033 tclass = tcc_unary;
3034 else if (tclass == tcc_expression
3035 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3036 || code == COMPOUND_EXPR))
3037 tclass = tcc_binary;
3038
3039 else if (tclass == tcc_expression && code == SAVE_EXPR
3040 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3041 {
3042 /* If we've already found a CVAL1 or CVAL2, this expression is
3043 two complex to handle. */
3044 if (*cval1 || *cval2)
3045 return 0;
3046
3047 tclass = tcc_unary;
3048 *save_p = 1;
3049 }
3050
3051 switch (tclass)
3052 {
3053 case tcc_unary:
3054 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3055
3056 case tcc_binary:
3057 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3058 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3059 cval1, cval2, save_p));
3060
3061 case tcc_constant:
3062 return 1;
3063
3064 case tcc_expression:
3065 if (code == COND_EXPR)
3066 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3067 cval1, cval2, save_p)
3068 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3069 cval1, cval2, save_p)
3070 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3071 cval1, cval2, save_p));
3072 return 0;
3073
3074 case tcc_comparison:
3075 /* First see if we can handle the first operand, then the second. For
3076 the second operand, we know *CVAL1 can't be zero. It must be that
3077 one side of the comparison is each of the values; test for the
3078 case where this isn't true by failing if the two operands
3079 are the same. */
3080
3081 if (operand_equal_p (TREE_OPERAND (arg, 0),
3082 TREE_OPERAND (arg, 1), 0))
3083 return 0;
3084
3085 if (*cval1 == 0)
3086 *cval1 = TREE_OPERAND (arg, 0);
3087 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3088 ;
3089 else if (*cval2 == 0)
3090 *cval2 = TREE_OPERAND (arg, 0);
3091 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3092 ;
3093 else
3094 return 0;
3095
3096 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3097 ;
3098 else if (*cval2 == 0)
3099 *cval2 = TREE_OPERAND (arg, 1);
3100 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3101 ;
3102 else
3103 return 0;
3104
3105 return 1;
3106
3107 default:
3108 return 0;
3109 }
3110 }
3111 \f
3112 /* ARG is a tree that is known to contain just arithmetic operations and
3113 comparisons. Evaluate the operations in the tree substituting NEW0 for
3114 any occurrence of OLD0 as an operand of a comparison and likewise for
3115 NEW1 and OLD1. */
3116
3117 static tree
3118 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3119 tree old1, tree new1)
3120 {
3121 tree type = TREE_TYPE (arg);
3122 enum tree_code code = TREE_CODE (arg);
3123 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3124
3125 /* We can handle some of the tcc_expression cases here. */
3126 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3127 tclass = tcc_unary;
3128 else if (tclass == tcc_expression
3129 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3130 tclass = tcc_binary;
3131
3132 switch (tclass)
3133 {
3134 case tcc_unary:
3135 return fold_build1_loc (loc, code, type,
3136 eval_subst (loc, TREE_OPERAND (arg, 0),
3137 old0, new0, old1, new1));
3138
3139 case tcc_binary:
3140 return fold_build2_loc (loc, code, type,
3141 eval_subst (loc, TREE_OPERAND (arg, 0),
3142 old0, new0, old1, new1),
3143 eval_subst (loc, TREE_OPERAND (arg, 1),
3144 old0, new0, old1, new1));
3145
3146 case tcc_expression:
3147 switch (code)
3148 {
3149 case SAVE_EXPR:
3150 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3151 old1, new1);
3152
3153 case COMPOUND_EXPR:
3154 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3155 old1, new1);
3156
3157 case COND_EXPR:
3158 return fold_build3_loc (loc, code, type,
3159 eval_subst (loc, TREE_OPERAND (arg, 0),
3160 old0, new0, old1, new1),
3161 eval_subst (loc, TREE_OPERAND (arg, 1),
3162 old0, new0, old1, new1),
3163 eval_subst (loc, TREE_OPERAND (arg, 2),
3164 old0, new0, old1, new1));
3165 default:
3166 break;
3167 }
3168 /* Fall through - ??? */
3169
3170 case tcc_comparison:
3171 {
3172 tree arg0 = TREE_OPERAND (arg, 0);
3173 tree arg1 = TREE_OPERAND (arg, 1);
3174
3175 /* We need to check both for exact equality and tree equality. The
3176 former will be true if the operand has a side-effect. In that
3177 case, we know the operand occurred exactly once. */
3178
3179 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3180 arg0 = new0;
3181 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3182 arg0 = new1;
3183
3184 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3185 arg1 = new0;
3186 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3187 arg1 = new1;
3188
3189 return fold_build2_loc (loc, code, type, arg0, arg1);
3190 }
3191
3192 default:
3193 return arg;
3194 }
3195 }
3196 \f
3197 /* Return a tree for the case when the result of an expression is RESULT
3198 converted to TYPE and OMITTED was previously an operand of the expression
3199 but is now not needed (e.g., we folded OMITTED * 0).
3200
3201 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3202 the conversion of RESULT to TYPE. */
3203
3204 tree
3205 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3206 {
3207 tree t = fold_convert_loc (loc, type, result);
3208
3209 /* If the resulting operand is an empty statement, just return the omitted
3210 statement casted to void. */
3211 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3212 return build1_loc (loc, NOP_EXPR, void_type_node,
3213 fold_ignored_result (omitted));
3214
3215 if (TREE_SIDE_EFFECTS (omitted))
3216 return build2_loc (loc, COMPOUND_EXPR, type,
3217 fold_ignored_result (omitted), t);
3218
3219 return non_lvalue_loc (loc, t);
3220 }
3221
3222 /* Return a tree for the case when the result of an expression is RESULT
3223 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3224 of the expression but are now not needed.
3225
3226 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3227 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3228 evaluated before OMITTED2. Otherwise, if neither has side effects,
3229 just do the conversion of RESULT to TYPE. */
3230
3231 tree
3232 omit_two_operands_loc (location_t loc, tree type, tree result,
3233 tree omitted1, tree omitted2)
3234 {
3235 tree t = fold_convert_loc (loc, type, result);
3236
3237 if (TREE_SIDE_EFFECTS (omitted2))
3238 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3239 if (TREE_SIDE_EFFECTS (omitted1))
3240 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3241
3242 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3243 }
3244
3245 \f
3246 /* Return a simplified tree node for the truth-negation of ARG. This
3247 never alters ARG itself. We assume that ARG is an operation that
3248 returns a truth value (0 or 1).
3249
3250 FIXME: one would think we would fold the result, but it causes
3251 problems with the dominator optimizer. */
3252
3253 static tree
3254 fold_truth_not_expr (location_t loc, tree arg)
3255 {
3256 tree type = TREE_TYPE (arg);
3257 enum tree_code code = TREE_CODE (arg);
3258 location_t loc1, loc2;
3259
3260 /* If this is a comparison, we can simply invert it, except for
3261 floating-point non-equality comparisons, in which case we just
3262 enclose a TRUTH_NOT_EXPR around what we have. */
3263
3264 if (TREE_CODE_CLASS (code) == tcc_comparison)
3265 {
3266 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3267 if (FLOAT_TYPE_P (op_type)
3268 && flag_trapping_math
3269 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3270 && code != NE_EXPR && code != EQ_EXPR)
3271 return NULL_TREE;
3272
3273 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3274 if (code == ERROR_MARK)
3275 return NULL_TREE;
3276
3277 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3278 TREE_OPERAND (arg, 1));
3279 }
3280
3281 switch (code)
3282 {
3283 case INTEGER_CST:
3284 return constant_boolean_node (integer_zerop (arg), type);
3285
3286 case TRUTH_AND_EXPR:
3287 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3288 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3289 return build2_loc (loc, TRUTH_OR_EXPR, type,
3290 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3291 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3292
3293 case TRUTH_OR_EXPR:
3294 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3295 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3296 return build2_loc (loc, TRUTH_AND_EXPR, type,
3297 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3298 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3299
3300 case TRUTH_XOR_EXPR:
3301 /* Here we can invert either operand. We invert the first operand
3302 unless the second operand is a TRUTH_NOT_EXPR in which case our
3303 result is the XOR of the first operand with the inside of the
3304 negation of the second operand. */
3305
3306 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3307 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3308 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3309 else
3310 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3311 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3312 TREE_OPERAND (arg, 1));
3313
3314 case TRUTH_ANDIF_EXPR:
3315 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3316 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3317 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3318 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3319 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3320
3321 case TRUTH_ORIF_EXPR:
3322 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3323 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3324 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3325 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3326 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3327
3328 case TRUTH_NOT_EXPR:
3329 return TREE_OPERAND (arg, 0);
3330
3331 case COND_EXPR:
3332 {
3333 tree arg1 = TREE_OPERAND (arg, 1);
3334 tree arg2 = TREE_OPERAND (arg, 2);
3335
3336 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3337 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3338
3339 /* A COND_EXPR may have a throw as one operand, which
3340 then has void type. Just leave void operands
3341 as they are. */
3342 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3343 VOID_TYPE_P (TREE_TYPE (arg1))
3344 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3345 VOID_TYPE_P (TREE_TYPE (arg2))
3346 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3347 }
3348
3349 case COMPOUND_EXPR:
3350 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3351 return build2_loc (loc, COMPOUND_EXPR, type,
3352 TREE_OPERAND (arg, 0),
3353 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3354
3355 case NON_LVALUE_EXPR:
3356 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3357 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3358
3359 CASE_CONVERT:
3360 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3361 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3362
3363 /* ... fall through ... */
3364
3365 case FLOAT_EXPR:
3366 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3367 return build1_loc (loc, TREE_CODE (arg), type,
3368 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3369
3370 case BIT_AND_EXPR:
3371 if (!integer_onep (TREE_OPERAND (arg, 1)))
3372 return NULL_TREE;
3373 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3374
3375 case SAVE_EXPR:
3376 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3377
3378 case CLEANUP_POINT_EXPR:
3379 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3380 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3381 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3382
3383 default:
3384 return NULL_TREE;
3385 }
3386 }
3387
3388 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3389 assume that ARG is an operation that returns a truth value (0 or 1
3390 for scalars, 0 or -1 for vectors). Return the folded expression if
3391 folding is successful. Otherwise, return NULL_TREE. */
3392
3393 static tree
3394 fold_invert_truthvalue (location_t loc, tree arg)
3395 {
3396 tree type = TREE_TYPE (arg);
3397 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3398 ? BIT_NOT_EXPR
3399 : TRUTH_NOT_EXPR,
3400 type, arg);
3401 }
3402
3403 /* Return a simplified tree node for the truth-negation of ARG. This
3404 never alters ARG itself. We assume that ARG is an operation that
3405 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3406
3407 tree
3408 invert_truthvalue_loc (location_t loc, tree arg)
3409 {
3410 if (TREE_CODE (arg) == ERROR_MARK)
3411 return arg;
3412
3413 tree type = TREE_TYPE (arg);
3414 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3415 ? BIT_NOT_EXPR
3416 : TRUTH_NOT_EXPR,
3417 type, arg);
3418 }
3419
3420 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3421 operands are another bit-wise operation with a common input. If so,
3422 distribute the bit operations to save an operation and possibly two if
3423 constants are involved. For example, convert
3424 (A | B) & (A | C) into A | (B & C)
3425 Further simplification will occur if B and C are constants.
3426
3427 If this optimization cannot be done, 0 will be returned. */
3428
3429 static tree
3430 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3431 tree arg0, tree arg1)
3432 {
3433 tree common;
3434 tree left, right;
3435
3436 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3437 || TREE_CODE (arg0) == code
3438 || (TREE_CODE (arg0) != BIT_AND_EXPR
3439 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3440 return 0;
3441
3442 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3443 {
3444 common = TREE_OPERAND (arg0, 0);
3445 left = TREE_OPERAND (arg0, 1);
3446 right = TREE_OPERAND (arg1, 1);
3447 }
3448 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3449 {
3450 common = TREE_OPERAND (arg0, 0);
3451 left = TREE_OPERAND (arg0, 1);
3452 right = TREE_OPERAND (arg1, 0);
3453 }
3454 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3455 {
3456 common = TREE_OPERAND (arg0, 1);
3457 left = TREE_OPERAND (arg0, 0);
3458 right = TREE_OPERAND (arg1, 1);
3459 }
3460 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3461 {
3462 common = TREE_OPERAND (arg0, 1);
3463 left = TREE_OPERAND (arg0, 0);
3464 right = TREE_OPERAND (arg1, 0);
3465 }
3466 else
3467 return 0;
3468
3469 common = fold_convert_loc (loc, type, common);
3470 left = fold_convert_loc (loc, type, left);
3471 right = fold_convert_loc (loc, type, right);
3472 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3473 fold_build2_loc (loc, code, type, left, right));
3474 }
3475
3476 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3477 with code CODE. This optimization is unsafe. */
3478 static tree
3479 distribute_real_division (location_t loc, enum tree_code code, tree type,
3480 tree arg0, tree arg1)
3481 {
3482 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3483 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3484
3485 /* (A / C) +- (B / C) -> (A +- B) / C. */
3486 if (mul0 == mul1
3487 && operand_equal_p (TREE_OPERAND (arg0, 1),
3488 TREE_OPERAND (arg1, 1), 0))
3489 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3490 fold_build2_loc (loc, code, type,
3491 TREE_OPERAND (arg0, 0),
3492 TREE_OPERAND (arg1, 0)),
3493 TREE_OPERAND (arg0, 1));
3494
3495 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3496 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3497 TREE_OPERAND (arg1, 0), 0)
3498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3499 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3500 {
3501 REAL_VALUE_TYPE r0, r1;
3502 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3503 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3504 if (!mul0)
3505 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3506 if (!mul1)
3507 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3508 real_arithmetic (&r0, code, &r0, &r1);
3509 return fold_build2_loc (loc, MULT_EXPR, type,
3510 TREE_OPERAND (arg0, 0),
3511 build_real (type, r0));
3512 }
3513
3514 return NULL_TREE;
3515 }
3516 \f
3517 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3518 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3519
3520 static tree
3521 make_bit_field_ref (location_t loc, tree inner, tree type,
3522 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3523 {
3524 tree result, bftype;
3525
3526 if (bitpos == 0)
3527 {
3528 tree size = TYPE_SIZE (TREE_TYPE (inner));
3529 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3530 || POINTER_TYPE_P (TREE_TYPE (inner)))
3531 && tree_fits_shwi_p (size)
3532 && tree_to_shwi (size) == bitsize)
3533 return fold_convert_loc (loc, type, inner);
3534 }
3535
3536 bftype = type;
3537 if (TYPE_PRECISION (bftype) != bitsize
3538 || TYPE_UNSIGNED (bftype) == !unsignedp)
3539 bftype = build_nonstandard_integer_type (bitsize, 0);
3540
3541 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3542 size_int (bitsize), bitsize_int (bitpos));
3543
3544 if (bftype != type)
3545 result = fold_convert_loc (loc, type, result);
3546
3547 return result;
3548 }
3549
3550 /* Optimize a bit-field compare.
3551
3552 There are two cases: First is a compare against a constant and the
3553 second is a comparison of two items where the fields are at the same
3554 bit position relative to the start of a chunk (byte, halfword, word)
3555 large enough to contain it. In these cases we can avoid the shift
3556 implicit in bitfield extractions.
3557
3558 For constants, we emit a compare of the shifted constant with the
3559 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3560 compared. For two fields at the same position, we do the ANDs with the
3561 similar mask and compare the result of the ANDs.
3562
3563 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3564 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3565 are the left and right operands of the comparison, respectively.
3566
3567 If the optimization described above can be done, we return the resulting
3568 tree. Otherwise we return zero. */
3569
3570 static tree
3571 optimize_bit_field_compare (location_t loc, enum tree_code code,
3572 tree compare_type, tree lhs, tree rhs)
3573 {
3574 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3575 tree type = TREE_TYPE (lhs);
3576 tree unsigned_type;
3577 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3578 machine_mode lmode, rmode, nmode;
3579 int lunsignedp, runsignedp;
3580 int lvolatilep = 0, rvolatilep = 0;
3581 tree linner, rinner = NULL_TREE;
3582 tree mask;
3583 tree offset;
3584
3585 /* Get all the information about the extractions being done. If the bit size
3586 if the same as the size of the underlying object, we aren't doing an
3587 extraction at all and so can do nothing. We also don't want to
3588 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3589 then will no longer be able to replace it. */
3590 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3591 &lunsignedp, &lvolatilep, false);
3592 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3593 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3594 return 0;
3595
3596 if (!const_p)
3597 {
3598 /* If this is not a constant, we can only do something if bit positions,
3599 sizes, and signedness are the same. */
3600 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3601 &runsignedp, &rvolatilep, false);
3602
3603 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3604 || lunsignedp != runsignedp || offset != 0
3605 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3606 return 0;
3607 }
3608
3609 /* See if we can find a mode to refer to this field. We should be able to,
3610 but fail if we can't. */
3611 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3612 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3613 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3614 TYPE_ALIGN (TREE_TYPE (rinner))),
3615 word_mode, false);
3616 if (nmode == VOIDmode)
3617 return 0;
3618
3619 /* Set signed and unsigned types of the precision of this mode for the
3620 shifts below. */
3621 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3622
3623 /* Compute the bit position and size for the new reference and our offset
3624 within it. If the new reference is the same size as the original, we
3625 won't optimize anything, so return zero. */
3626 nbitsize = GET_MODE_BITSIZE (nmode);
3627 nbitpos = lbitpos & ~ (nbitsize - 1);
3628 lbitpos -= nbitpos;
3629 if (nbitsize == lbitsize)
3630 return 0;
3631
3632 if (BYTES_BIG_ENDIAN)
3633 lbitpos = nbitsize - lbitsize - lbitpos;
3634
3635 /* Make the mask to be used against the extracted field. */
3636 mask = build_int_cst_type (unsigned_type, -1);
3637 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3638 mask = const_binop (RSHIFT_EXPR, mask,
3639 size_int (nbitsize - lbitsize - lbitpos));
3640
3641 if (! const_p)
3642 /* If not comparing with constant, just rework the comparison
3643 and return. */
3644 return fold_build2_loc (loc, code, compare_type,
3645 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3646 make_bit_field_ref (loc, linner,
3647 unsigned_type,
3648 nbitsize, nbitpos,
3649 1),
3650 mask),
3651 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3652 make_bit_field_ref (loc, rinner,
3653 unsigned_type,
3654 nbitsize, nbitpos,
3655 1),
3656 mask));
3657
3658 /* Otherwise, we are handling the constant case. See if the constant is too
3659 big for the field. Warn and return a tree of for 0 (false) if so. We do
3660 this not only for its own sake, but to avoid having to test for this
3661 error case below. If we didn't, we might generate wrong code.
3662
3663 For unsigned fields, the constant shifted right by the field length should
3664 be all zero. For signed fields, the high-order bits should agree with
3665 the sign bit. */
3666
3667 if (lunsignedp)
3668 {
3669 if (wi::lrshift (rhs, lbitsize) != 0)
3670 {
3671 warning (0, "comparison is always %d due to width of bit-field",
3672 code == NE_EXPR);
3673 return constant_boolean_node (code == NE_EXPR, compare_type);
3674 }
3675 }
3676 else
3677 {
3678 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3679 if (tem != 0 && tem != -1)
3680 {
3681 warning (0, "comparison is always %d due to width of bit-field",
3682 code == NE_EXPR);
3683 return constant_boolean_node (code == NE_EXPR, compare_type);
3684 }
3685 }
3686
3687 /* Single-bit compares should always be against zero. */
3688 if (lbitsize == 1 && ! integer_zerop (rhs))
3689 {
3690 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3691 rhs = build_int_cst (type, 0);
3692 }
3693
3694 /* Make a new bitfield reference, shift the constant over the
3695 appropriate number of bits and mask it with the computed mask
3696 (in case this was a signed field). If we changed it, make a new one. */
3697 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3698
3699 rhs = const_binop (BIT_AND_EXPR,
3700 const_binop (LSHIFT_EXPR,
3701 fold_convert_loc (loc, unsigned_type, rhs),
3702 size_int (lbitpos)),
3703 mask);
3704
3705 lhs = build2_loc (loc, code, compare_type,
3706 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3707 return lhs;
3708 }
3709 \f
3710 /* Subroutine for fold_truth_andor_1: decode a field reference.
3711
3712 If EXP is a comparison reference, we return the innermost reference.
3713
3714 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3715 set to the starting bit number.
3716
3717 If the innermost field can be completely contained in a mode-sized
3718 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3719
3720 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3721 otherwise it is not changed.
3722
3723 *PUNSIGNEDP is set to the signedness of the field.
3724
3725 *PMASK is set to the mask used. This is either contained in a
3726 BIT_AND_EXPR or derived from the width of the field.
3727
3728 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3729
3730 Return 0 if this is not a component reference or is one that we can't
3731 do anything with. */
3732
3733 static tree
3734 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3735 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3736 int *punsignedp, int *pvolatilep,
3737 tree *pmask, tree *pand_mask)
3738 {
3739 tree outer_type = 0;
3740 tree and_mask = 0;
3741 tree mask, inner, offset;
3742 tree unsigned_type;
3743 unsigned int precision;
3744
3745 /* All the optimizations using this function assume integer fields.
3746 There are problems with FP fields since the type_for_size call
3747 below can fail for, e.g., XFmode. */
3748 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3749 return 0;
3750
3751 /* We are interested in the bare arrangement of bits, so strip everything
3752 that doesn't affect the machine mode. However, record the type of the
3753 outermost expression if it may matter below. */
3754 if (CONVERT_EXPR_P (exp)
3755 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3756 outer_type = TREE_TYPE (exp);
3757 STRIP_NOPS (exp);
3758
3759 if (TREE_CODE (exp) == BIT_AND_EXPR)
3760 {
3761 and_mask = TREE_OPERAND (exp, 1);
3762 exp = TREE_OPERAND (exp, 0);
3763 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3764 if (TREE_CODE (and_mask) != INTEGER_CST)
3765 return 0;
3766 }
3767
3768 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3769 punsignedp, pvolatilep, false);
3770 if ((inner == exp && and_mask == 0)
3771 || *pbitsize < 0 || offset != 0
3772 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3773 return 0;
3774
3775 /* If the number of bits in the reference is the same as the bitsize of
3776 the outer type, then the outer type gives the signedness. Otherwise
3777 (in case of a small bitfield) the signedness is unchanged. */
3778 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3779 *punsignedp = TYPE_UNSIGNED (outer_type);
3780
3781 /* Compute the mask to access the bitfield. */
3782 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3783 precision = TYPE_PRECISION (unsigned_type);
3784
3785 mask = build_int_cst_type (unsigned_type, -1);
3786
3787 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3788 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3789
3790 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3791 if (and_mask != 0)
3792 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3793 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3794
3795 *pmask = mask;
3796 *pand_mask = and_mask;
3797 return inner;
3798 }
3799
3800 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3801 bit positions and MASK is SIGNED. */
3802
3803 static int
3804 all_ones_mask_p (const_tree mask, unsigned int size)
3805 {
3806 tree type = TREE_TYPE (mask);
3807 unsigned int precision = TYPE_PRECISION (type);
3808
3809 /* If this function returns true when the type of the mask is
3810 UNSIGNED, then there will be errors. In particular see
3811 gcc.c-torture/execute/990326-1.c. There does not appear to be
3812 any documentation paper trail as to why this is so. But the pre
3813 wide-int worked with that restriction and it has been preserved
3814 here. */
3815 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3816 return false;
3817
3818 return wi::mask (size, false, precision) == mask;
3819 }
3820
3821 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3822 represents the sign bit of EXP's type. If EXP represents a sign
3823 or zero extension, also test VAL against the unextended type.
3824 The return value is the (sub)expression whose sign bit is VAL,
3825 or NULL_TREE otherwise. */
3826
3827 tree
3828 sign_bit_p (tree exp, const_tree val)
3829 {
3830 int width;
3831 tree t;
3832
3833 /* Tree EXP must have an integral type. */
3834 t = TREE_TYPE (exp);
3835 if (! INTEGRAL_TYPE_P (t))
3836 return NULL_TREE;
3837
3838 /* Tree VAL must be an integer constant. */
3839 if (TREE_CODE (val) != INTEGER_CST
3840 || TREE_OVERFLOW (val))
3841 return NULL_TREE;
3842
3843 width = TYPE_PRECISION (t);
3844 if (wi::only_sign_bit_p (val, width))
3845 return exp;
3846
3847 /* Handle extension from a narrower type. */
3848 if (TREE_CODE (exp) == NOP_EXPR
3849 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3850 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3851
3852 return NULL_TREE;
3853 }
3854
3855 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3856 to be evaluated unconditionally. */
3857
3858 static int
3859 simple_operand_p (const_tree exp)
3860 {
3861 /* Strip any conversions that don't change the machine mode. */
3862 STRIP_NOPS (exp);
3863
3864 return (CONSTANT_CLASS_P (exp)
3865 || TREE_CODE (exp) == SSA_NAME
3866 || (DECL_P (exp)
3867 && ! TREE_ADDRESSABLE (exp)
3868 && ! TREE_THIS_VOLATILE (exp)
3869 && ! DECL_NONLOCAL (exp)
3870 /* Don't regard global variables as simple. They may be
3871 allocated in ways unknown to the compiler (shared memory,
3872 #pragma weak, etc). */
3873 && ! TREE_PUBLIC (exp)
3874 && ! DECL_EXTERNAL (exp)
3875 /* Weakrefs are not safe to be read, since they can be NULL.
3876 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3877 have DECL_WEAK flag set. */
3878 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3879 /* Loading a static variable is unduly expensive, but global
3880 registers aren't expensive. */
3881 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3882 }
3883
3884 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3885 to be evaluated unconditionally.
3886 I addition to simple_operand_p, we assume that comparisons, conversions,
3887 and logic-not operations are simple, if their operands are simple, too. */
3888
3889 static bool
3890 simple_operand_p_2 (tree exp)
3891 {
3892 enum tree_code code;
3893
3894 if (TREE_SIDE_EFFECTS (exp)
3895 || tree_could_trap_p (exp))
3896 return false;
3897
3898 while (CONVERT_EXPR_P (exp))
3899 exp = TREE_OPERAND (exp, 0);
3900
3901 code = TREE_CODE (exp);
3902
3903 if (TREE_CODE_CLASS (code) == tcc_comparison)
3904 return (simple_operand_p (TREE_OPERAND (exp, 0))
3905 && simple_operand_p (TREE_OPERAND (exp, 1)));
3906
3907 if (code == TRUTH_NOT_EXPR)
3908 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3909
3910 return simple_operand_p (exp);
3911 }
3912
3913 \f
3914 /* The following functions are subroutines to fold_range_test and allow it to
3915 try to change a logical combination of comparisons into a range test.
3916
3917 For example, both
3918 X == 2 || X == 3 || X == 4 || X == 5
3919 and
3920 X >= 2 && X <= 5
3921 are converted to
3922 (unsigned) (X - 2) <= 3
3923
3924 We describe each set of comparisons as being either inside or outside
3925 a range, using a variable named like IN_P, and then describe the
3926 range with a lower and upper bound. If one of the bounds is omitted,
3927 it represents either the highest or lowest value of the type.
3928
3929 In the comments below, we represent a range by two numbers in brackets
3930 preceded by a "+" to designate being inside that range, or a "-" to
3931 designate being outside that range, so the condition can be inverted by
3932 flipping the prefix. An omitted bound is represented by a "-". For
3933 example, "- [-, 10]" means being outside the range starting at the lowest
3934 possible value and ending at 10, in other words, being greater than 10.
3935 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3936 always false.
3937
3938 We set up things so that the missing bounds are handled in a consistent
3939 manner so neither a missing bound nor "true" and "false" need to be
3940 handled using a special case. */
3941
3942 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3943 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3944 and UPPER1_P are nonzero if the respective argument is an upper bound
3945 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3946 must be specified for a comparison. ARG1 will be converted to ARG0's
3947 type if both are specified. */
3948
3949 static tree
3950 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3951 tree arg1, int upper1_p)
3952 {
3953 tree tem;
3954 int result;
3955 int sgn0, sgn1;
3956
3957 /* If neither arg represents infinity, do the normal operation.
3958 Else, if not a comparison, return infinity. Else handle the special
3959 comparison rules. Note that most of the cases below won't occur, but
3960 are handled for consistency. */
3961
3962 if (arg0 != 0 && arg1 != 0)
3963 {
3964 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3965 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3966 STRIP_NOPS (tem);
3967 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3968 }
3969
3970 if (TREE_CODE_CLASS (code) != tcc_comparison)
3971 return 0;
3972
3973 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3974 for neither. In real maths, we cannot assume open ended ranges are
3975 the same. But, this is computer arithmetic, where numbers are finite.
3976 We can therefore make the transformation of any unbounded range with
3977 the value Z, Z being greater than any representable number. This permits
3978 us to treat unbounded ranges as equal. */
3979 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3980 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3981 switch (code)
3982 {
3983 case EQ_EXPR:
3984 result = sgn0 == sgn1;
3985 break;
3986 case NE_EXPR:
3987 result = sgn0 != sgn1;
3988 break;
3989 case LT_EXPR:
3990 result = sgn0 < sgn1;
3991 break;
3992 case LE_EXPR:
3993 result = sgn0 <= sgn1;
3994 break;
3995 case GT_EXPR:
3996 result = sgn0 > sgn1;
3997 break;
3998 case GE_EXPR:
3999 result = sgn0 >= sgn1;
4000 break;
4001 default:
4002 gcc_unreachable ();
4003 }
4004
4005 return constant_boolean_node (result, type);
4006 }
4007 \f
4008 /* Helper routine for make_range. Perform one step for it, return
4009 new expression if the loop should continue or NULL_TREE if it should
4010 stop. */
4011
4012 tree
4013 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4014 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4015 bool *strict_overflow_p)
4016 {
4017 tree arg0_type = TREE_TYPE (arg0);
4018 tree n_low, n_high, low = *p_low, high = *p_high;
4019 int in_p = *p_in_p, n_in_p;
4020
4021 switch (code)
4022 {
4023 case TRUTH_NOT_EXPR:
4024 /* We can only do something if the range is testing for zero. */
4025 if (low == NULL_TREE || high == NULL_TREE
4026 || ! integer_zerop (low) || ! integer_zerop (high))
4027 return NULL_TREE;
4028 *p_in_p = ! in_p;
4029 return arg0;
4030
4031 case EQ_EXPR: case NE_EXPR:
4032 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4033 /* We can only do something if the range is testing for zero
4034 and if the second operand is an integer constant. Note that
4035 saying something is "in" the range we make is done by
4036 complementing IN_P since it will set in the initial case of
4037 being not equal to zero; "out" is leaving it alone. */
4038 if (low == NULL_TREE || high == NULL_TREE
4039 || ! integer_zerop (low) || ! integer_zerop (high)
4040 || TREE_CODE (arg1) != INTEGER_CST)
4041 return NULL_TREE;
4042
4043 switch (code)
4044 {
4045 case NE_EXPR: /* - [c, c] */
4046 low = high = arg1;
4047 break;
4048 case EQ_EXPR: /* + [c, c] */
4049 in_p = ! in_p, low = high = arg1;
4050 break;
4051 case GT_EXPR: /* - [-, c] */
4052 low = 0, high = arg1;
4053 break;
4054 case GE_EXPR: /* + [c, -] */
4055 in_p = ! in_p, low = arg1, high = 0;
4056 break;
4057 case LT_EXPR: /* - [c, -] */
4058 low = arg1, high = 0;
4059 break;
4060 case LE_EXPR: /* + [-, c] */
4061 in_p = ! in_p, low = 0, high = arg1;
4062 break;
4063 default:
4064 gcc_unreachable ();
4065 }
4066
4067 /* If this is an unsigned comparison, we also know that EXP is
4068 greater than or equal to zero. We base the range tests we make
4069 on that fact, so we record it here so we can parse existing
4070 range tests. We test arg0_type since often the return type
4071 of, e.g. EQ_EXPR, is boolean. */
4072 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4073 {
4074 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4075 in_p, low, high, 1,
4076 build_int_cst (arg0_type, 0),
4077 NULL_TREE))
4078 return NULL_TREE;
4079
4080 in_p = n_in_p, low = n_low, high = n_high;
4081
4082 /* If the high bound is missing, but we have a nonzero low
4083 bound, reverse the range so it goes from zero to the low bound
4084 minus 1. */
4085 if (high == 0 && low && ! integer_zerop (low))
4086 {
4087 in_p = ! in_p;
4088 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4089 build_int_cst (TREE_TYPE (low), 1), 0);
4090 low = build_int_cst (arg0_type, 0);
4091 }
4092 }
4093
4094 *p_low = low;
4095 *p_high = high;
4096 *p_in_p = in_p;
4097 return arg0;
4098
4099 case NEGATE_EXPR:
4100 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4101 low and high are non-NULL, then normalize will DTRT. */
4102 if (!TYPE_UNSIGNED (arg0_type)
4103 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4104 {
4105 if (low == NULL_TREE)
4106 low = TYPE_MIN_VALUE (arg0_type);
4107 if (high == NULL_TREE)
4108 high = TYPE_MAX_VALUE (arg0_type);
4109 }
4110
4111 /* (-x) IN [a,b] -> x in [-b, -a] */
4112 n_low = range_binop (MINUS_EXPR, exp_type,
4113 build_int_cst (exp_type, 0),
4114 0, high, 1);
4115 n_high = range_binop (MINUS_EXPR, exp_type,
4116 build_int_cst (exp_type, 0),
4117 0, low, 0);
4118 if (n_high != 0 && TREE_OVERFLOW (n_high))
4119 return NULL_TREE;
4120 goto normalize;
4121
4122 case BIT_NOT_EXPR:
4123 /* ~ X -> -X - 1 */
4124 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4125 build_int_cst (exp_type, 1));
4126
4127 case PLUS_EXPR:
4128 case MINUS_EXPR:
4129 if (TREE_CODE (arg1) != INTEGER_CST)
4130 return NULL_TREE;
4131
4132 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4133 move a constant to the other side. */
4134 if (!TYPE_UNSIGNED (arg0_type)
4135 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4136 return NULL_TREE;
4137
4138 /* If EXP is signed, any overflow in the computation is undefined,
4139 so we don't worry about it so long as our computations on
4140 the bounds don't overflow. For unsigned, overflow is defined
4141 and this is exactly the right thing. */
4142 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4143 arg0_type, low, 0, arg1, 0);
4144 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4145 arg0_type, high, 1, arg1, 0);
4146 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4147 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4148 return NULL_TREE;
4149
4150 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4151 *strict_overflow_p = true;
4152
4153 normalize:
4154 /* Check for an unsigned range which has wrapped around the maximum
4155 value thus making n_high < n_low, and normalize it. */
4156 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4157 {
4158 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4159 build_int_cst (TREE_TYPE (n_high), 1), 0);
4160 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4161 build_int_cst (TREE_TYPE (n_low), 1), 0);
4162
4163 /* If the range is of the form +/- [ x+1, x ], we won't
4164 be able to normalize it. But then, it represents the
4165 whole range or the empty set, so make it
4166 +/- [ -, - ]. */
4167 if (tree_int_cst_equal (n_low, low)
4168 && tree_int_cst_equal (n_high, high))
4169 low = high = 0;
4170 else
4171 in_p = ! in_p;
4172 }
4173 else
4174 low = n_low, high = n_high;
4175
4176 *p_low = low;
4177 *p_high = high;
4178 *p_in_p = in_p;
4179 return arg0;
4180
4181 CASE_CONVERT:
4182 case NON_LVALUE_EXPR:
4183 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4184 return NULL_TREE;
4185
4186 if (! INTEGRAL_TYPE_P (arg0_type)
4187 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4188 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4189 return NULL_TREE;
4190
4191 n_low = low, n_high = high;
4192
4193 if (n_low != 0)
4194 n_low = fold_convert_loc (loc, arg0_type, n_low);
4195
4196 if (n_high != 0)
4197 n_high = fold_convert_loc (loc, arg0_type, n_high);
4198
4199 /* If we're converting arg0 from an unsigned type, to exp,
4200 a signed type, we will be doing the comparison as unsigned.
4201 The tests above have already verified that LOW and HIGH
4202 are both positive.
4203
4204 So we have to ensure that we will handle large unsigned
4205 values the same way that the current signed bounds treat
4206 negative values. */
4207
4208 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4209 {
4210 tree high_positive;
4211 tree equiv_type;
4212 /* For fixed-point modes, we need to pass the saturating flag
4213 as the 2nd parameter. */
4214 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4215 equiv_type
4216 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4217 TYPE_SATURATING (arg0_type));
4218 else
4219 equiv_type
4220 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4221
4222 /* A range without an upper bound is, naturally, unbounded.
4223 Since convert would have cropped a very large value, use
4224 the max value for the destination type. */
4225 high_positive
4226 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4227 : TYPE_MAX_VALUE (arg0_type);
4228
4229 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4230 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4231 fold_convert_loc (loc, arg0_type,
4232 high_positive),
4233 build_int_cst (arg0_type, 1));
4234
4235 /* If the low bound is specified, "and" the range with the
4236 range for which the original unsigned value will be
4237 positive. */
4238 if (low != 0)
4239 {
4240 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4241 1, fold_convert_loc (loc, arg0_type,
4242 integer_zero_node),
4243 high_positive))
4244 return NULL_TREE;
4245
4246 in_p = (n_in_p == in_p);
4247 }
4248 else
4249 {
4250 /* Otherwise, "or" the range with the range of the input
4251 that will be interpreted as negative. */
4252 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4253 1, fold_convert_loc (loc, arg0_type,
4254 integer_zero_node),
4255 high_positive))
4256 return NULL_TREE;
4257
4258 in_p = (in_p != n_in_p);
4259 }
4260 }
4261
4262 *p_low = n_low;
4263 *p_high = n_high;
4264 *p_in_p = in_p;
4265 return arg0;
4266
4267 default:
4268 return NULL_TREE;
4269 }
4270 }
4271
4272 /* Given EXP, a logical expression, set the range it is testing into
4273 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4274 actually being tested. *PLOW and *PHIGH will be made of the same
4275 type as the returned expression. If EXP is not a comparison, we
4276 will most likely not be returning a useful value and range. Set
4277 *STRICT_OVERFLOW_P to true if the return value is only valid
4278 because signed overflow is undefined; otherwise, do not change
4279 *STRICT_OVERFLOW_P. */
4280
4281 tree
4282 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4283 bool *strict_overflow_p)
4284 {
4285 enum tree_code code;
4286 tree arg0, arg1 = NULL_TREE;
4287 tree exp_type, nexp;
4288 int in_p;
4289 tree low, high;
4290 location_t loc = EXPR_LOCATION (exp);
4291
4292 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4293 and see if we can refine the range. Some of the cases below may not
4294 happen, but it doesn't seem worth worrying about this. We "continue"
4295 the outer loop when we've changed something; otherwise we "break"
4296 the switch, which will "break" the while. */
4297
4298 in_p = 0;
4299 low = high = build_int_cst (TREE_TYPE (exp), 0);
4300
4301 while (1)
4302 {
4303 code = TREE_CODE (exp);
4304 exp_type = TREE_TYPE (exp);
4305 arg0 = NULL_TREE;
4306
4307 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4308 {
4309 if (TREE_OPERAND_LENGTH (exp) > 0)
4310 arg0 = TREE_OPERAND (exp, 0);
4311 if (TREE_CODE_CLASS (code) == tcc_binary
4312 || TREE_CODE_CLASS (code) == tcc_comparison
4313 || (TREE_CODE_CLASS (code) == tcc_expression
4314 && TREE_OPERAND_LENGTH (exp) > 1))
4315 arg1 = TREE_OPERAND (exp, 1);
4316 }
4317 if (arg0 == NULL_TREE)
4318 break;
4319
4320 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4321 &high, &in_p, strict_overflow_p);
4322 if (nexp == NULL_TREE)
4323 break;
4324 exp = nexp;
4325 }
4326
4327 /* If EXP is a constant, we can evaluate whether this is true or false. */
4328 if (TREE_CODE (exp) == INTEGER_CST)
4329 {
4330 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4331 exp, 0, low, 0))
4332 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4333 exp, 1, high, 1)));
4334 low = high = 0;
4335 exp = 0;
4336 }
4337
4338 *pin_p = in_p, *plow = low, *phigh = high;
4339 return exp;
4340 }
4341 \f
4342 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4343 type, TYPE, return an expression to test if EXP is in (or out of, depending
4344 on IN_P) the range. Return 0 if the test couldn't be created. */
4345
4346 tree
4347 build_range_check (location_t loc, tree type, tree exp, int in_p,
4348 tree low, tree high)
4349 {
4350 tree etype = TREE_TYPE (exp), value;
4351
4352 #ifdef HAVE_canonicalize_funcptr_for_compare
4353 /* Disable this optimization for function pointer expressions
4354 on targets that require function pointer canonicalization. */
4355 if (HAVE_canonicalize_funcptr_for_compare
4356 && TREE_CODE (etype) == POINTER_TYPE
4357 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4358 return NULL_TREE;
4359 #endif
4360
4361 if (! in_p)
4362 {
4363 value = build_range_check (loc, type, exp, 1, low, high);
4364 if (value != 0)
4365 return invert_truthvalue_loc (loc, value);
4366
4367 return 0;
4368 }
4369
4370 if (low == 0 && high == 0)
4371 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4372
4373 if (low == 0)
4374 return fold_build2_loc (loc, LE_EXPR, type, exp,
4375 fold_convert_loc (loc, etype, high));
4376
4377 if (high == 0)
4378 return fold_build2_loc (loc, GE_EXPR, type, exp,
4379 fold_convert_loc (loc, etype, low));
4380
4381 if (operand_equal_p (low, high, 0))
4382 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4383 fold_convert_loc (loc, etype, low));
4384
4385 if (integer_zerop (low))
4386 {
4387 if (! TYPE_UNSIGNED (etype))
4388 {
4389 etype = unsigned_type_for (etype);
4390 high = fold_convert_loc (loc, etype, high);
4391 exp = fold_convert_loc (loc, etype, exp);
4392 }
4393 return build_range_check (loc, type, exp, 1, 0, high);
4394 }
4395
4396 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4397 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4398 {
4399 int prec = TYPE_PRECISION (etype);
4400
4401 if (wi::mask (prec - 1, false, prec) == high)
4402 {
4403 if (TYPE_UNSIGNED (etype))
4404 {
4405 tree signed_etype = signed_type_for (etype);
4406 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4407 etype
4408 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4409 else
4410 etype = signed_etype;
4411 exp = fold_convert_loc (loc, etype, exp);
4412 }
4413 return fold_build2_loc (loc, GT_EXPR, type, exp,
4414 build_int_cst (etype, 0));
4415 }
4416 }
4417
4418 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4419 This requires wrap-around arithmetics for the type of the expression.
4420 First make sure that arithmetics in this type is valid, then make sure
4421 that it wraps around. */
4422 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4423 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4424 TYPE_UNSIGNED (etype));
4425
4426 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4427 {
4428 tree utype, minv, maxv;
4429
4430 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4431 for the type in question, as we rely on this here. */
4432 utype = unsigned_type_for (etype);
4433 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4434 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4435 build_int_cst (TREE_TYPE (maxv), 1), 1);
4436 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4437
4438 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4439 minv, 1, maxv, 1)))
4440 etype = utype;
4441 else
4442 return 0;
4443 }
4444
4445 high = fold_convert_loc (loc, etype, high);
4446 low = fold_convert_loc (loc, etype, low);
4447 exp = fold_convert_loc (loc, etype, exp);
4448
4449 value = const_binop (MINUS_EXPR, high, low);
4450
4451
4452 if (POINTER_TYPE_P (etype))
4453 {
4454 if (value != 0 && !TREE_OVERFLOW (value))
4455 {
4456 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4457 return build_range_check (loc, type,
4458 fold_build_pointer_plus_loc (loc, exp, low),
4459 1, build_int_cst (etype, 0), value);
4460 }
4461 return 0;
4462 }
4463
4464 if (value != 0 && !TREE_OVERFLOW (value))
4465 return build_range_check (loc, type,
4466 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4467 1, build_int_cst (etype, 0), value);
4468
4469 return 0;
4470 }
4471 \f
4472 /* Return the predecessor of VAL in its type, handling the infinite case. */
4473
4474 static tree
4475 range_predecessor (tree val)
4476 {
4477 tree type = TREE_TYPE (val);
4478
4479 if (INTEGRAL_TYPE_P (type)
4480 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4481 return 0;
4482 else
4483 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4484 build_int_cst (TREE_TYPE (val), 1), 0);
4485 }
4486
4487 /* Return the successor of VAL in its type, handling the infinite case. */
4488
4489 static tree
4490 range_successor (tree val)
4491 {
4492 tree type = TREE_TYPE (val);
4493
4494 if (INTEGRAL_TYPE_P (type)
4495 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4496 return 0;
4497 else
4498 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4499 build_int_cst (TREE_TYPE (val), 1), 0);
4500 }
4501
4502 /* Given two ranges, see if we can merge them into one. Return 1 if we
4503 can, 0 if we can't. Set the output range into the specified parameters. */
4504
4505 bool
4506 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4507 tree high0, int in1_p, tree low1, tree high1)
4508 {
4509 int no_overlap;
4510 int subset;
4511 int temp;
4512 tree tem;
4513 int in_p;
4514 tree low, high;
4515 int lowequal = ((low0 == 0 && low1 == 0)
4516 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4517 low0, 0, low1, 0)));
4518 int highequal = ((high0 == 0 && high1 == 0)
4519 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4520 high0, 1, high1, 1)));
4521
4522 /* Make range 0 be the range that starts first, or ends last if they
4523 start at the same value. Swap them if it isn't. */
4524 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4525 low0, 0, low1, 0))
4526 || (lowequal
4527 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4528 high1, 1, high0, 1))))
4529 {
4530 temp = in0_p, in0_p = in1_p, in1_p = temp;
4531 tem = low0, low0 = low1, low1 = tem;
4532 tem = high0, high0 = high1, high1 = tem;
4533 }
4534
4535 /* Now flag two cases, whether the ranges are disjoint or whether the
4536 second range is totally subsumed in the first. Note that the tests
4537 below are simplified by the ones above. */
4538 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4539 high0, 1, low1, 0));
4540 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4541 high1, 1, high0, 1));
4542
4543 /* We now have four cases, depending on whether we are including or
4544 excluding the two ranges. */
4545 if (in0_p && in1_p)
4546 {
4547 /* If they don't overlap, the result is false. If the second range
4548 is a subset it is the result. Otherwise, the range is from the start
4549 of the second to the end of the first. */
4550 if (no_overlap)
4551 in_p = 0, low = high = 0;
4552 else if (subset)
4553 in_p = 1, low = low1, high = high1;
4554 else
4555 in_p = 1, low = low1, high = high0;
4556 }
4557
4558 else if (in0_p && ! in1_p)
4559 {
4560 /* If they don't overlap, the result is the first range. If they are
4561 equal, the result is false. If the second range is a subset of the
4562 first, and the ranges begin at the same place, we go from just after
4563 the end of the second range to the end of the first. If the second
4564 range is not a subset of the first, or if it is a subset and both
4565 ranges end at the same place, the range starts at the start of the
4566 first range and ends just before the second range.
4567 Otherwise, we can't describe this as a single range. */
4568 if (no_overlap)
4569 in_p = 1, low = low0, high = high0;
4570 else if (lowequal && highequal)
4571 in_p = 0, low = high = 0;
4572 else if (subset && lowequal)
4573 {
4574 low = range_successor (high1);
4575 high = high0;
4576 in_p = 1;
4577 if (low == 0)
4578 {
4579 /* We are in the weird situation where high0 > high1 but
4580 high1 has no successor. Punt. */
4581 return 0;
4582 }
4583 }
4584 else if (! subset || highequal)
4585 {
4586 low = low0;
4587 high = range_predecessor (low1);
4588 in_p = 1;
4589 if (high == 0)
4590 {
4591 /* low0 < low1 but low1 has no predecessor. Punt. */
4592 return 0;
4593 }
4594 }
4595 else
4596 return 0;
4597 }
4598
4599 else if (! in0_p && in1_p)
4600 {
4601 /* If they don't overlap, the result is the second range. If the second
4602 is a subset of the first, the result is false. Otherwise,
4603 the range starts just after the first range and ends at the
4604 end of the second. */
4605 if (no_overlap)
4606 in_p = 1, low = low1, high = high1;
4607 else if (subset || highequal)
4608 in_p = 0, low = high = 0;
4609 else
4610 {
4611 low = range_successor (high0);
4612 high = high1;
4613 in_p = 1;
4614 if (low == 0)
4615 {
4616 /* high1 > high0 but high0 has no successor. Punt. */
4617 return 0;
4618 }
4619 }
4620 }
4621
4622 else
4623 {
4624 /* The case where we are excluding both ranges. Here the complex case
4625 is if they don't overlap. In that case, the only time we have a
4626 range is if they are adjacent. If the second is a subset of the
4627 first, the result is the first. Otherwise, the range to exclude
4628 starts at the beginning of the first range and ends at the end of the
4629 second. */
4630 if (no_overlap)
4631 {
4632 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4633 range_successor (high0),
4634 1, low1, 0)))
4635 in_p = 0, low = low0, high = high1;
4636 else
4637 {
4638 /* Canonicalize - [min, x] into - [-, x]. */
4639 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4640 switch (TREE_CODE (TREE_TYPE (low0)))
4641 {
4642 case ENUMERAL_TYPE:
4643 if (TYPE_PRECISION (TREE_TYPE (low0))
4644 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4645 break;
4646 /* FALLTHROUGH */
4647 case INTEGER_TYPE:
4648 if (tree_int_cst_equal (low0,
4649 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4650 low0 = 0;
4651 break;
4652 case POINTER_TYPE:
4653 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4654 && integer_zerop (low0))
4655 low0 = 0;
4656 break;
4657 default:
4658 break;
4659 }
4660
4661 /* Canonicalize - [x, max] into - [x, -]. */
4662 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4663 switch (TREE_CODE (TREE_TYPE (high1)))
4664 {
4665 case ENUMERAL_TYPE:
4666 if (TYPE_PRECISION (TREE_TYPE (high1))
4667 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4668 break;
4669 /* FALLTHROUGH */
4670 case INTEGER_TYPE:
4671 if (tree_int_cst_equal (high1,
4672 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4673 high1 = 0;
4674 break;
4675 case POINTER_TYPE:
4676 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4677 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4678 high1, 1,
4679 build_int_cst (TREE_TYPE (high1), 1),
4680 1)))
4681 high1 = 0;
4682 break;
4683 default:
4684 break;
4685 }
4686
4687 /* The ranges might be also adjacent between the maximum and
4688 minimum values of the given type. For
4689 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4690 return + [x + 1, y - 1]. */
4691 if (low0 == 0 && high1 == 0)
4692 {
4693 low = range_successor (high0);
4694 high = range_predecessor (low1);
4695 if (low == 0 || high == 0)
4696 return 0;
4697
4698 in_p = 1;
4699 }
4700 else
4701 return 0;
4702 }
4703 }
4704 else if (subset)
4705 in_p = 0, low = low0, high = high0;
4706 else
4707 in_p = 0, low = low0, high = high1;
4708 }
4709
4710 *pin_p = in_p, *plow = low, *phigh = high;
4711 return 1;
4712 }
4713 \f
4714
4715 /* Subroutine of fold, looking inside expressions of the form
4716 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4717 of the COND_EXPR. This function is being used also to optimize
4718 A op B ? C : A, by reversing the comparison first.
4719
4720 Return a folded expression whose code is not a COND_EXPR
4721 anymore, or NULL_TREE if no folding opportunity is found. */
4722
4723 static tree
4724 fold_cond_expr_with_comparison (location_t loc, tree type,
4725 tree arg0, tree arg1, tree arg2)
4726 {
4727 enum tree_code comp_code = TREE_CODE (arg0);
4728 tree arg00 = TREE_OPERAND (arg0, 0);
4729 tree arg01 = TREE_OPERAND (arg0, 1);
4730 tree arg1_type = TREE_TYPE (arg1);
4731 tree tem;
4732
4733 STRIP_NOPS (arg1);
4734 STRIP_NOPS (arg2);
4735
4736 /* If we have A op 0 ? A : -A, consider applying the following
4737 transformations:
4738
4739 A == 0? A : -A same as -A
4740 A != 0? A : -A same as A
4741 A >= 0? A : -A same as abs (A)
4742 A > 0? A : -A same as abs (A)
4743 A <= 0? A : -A same as -abs (A)
4744 A < 0? A : -A same as -abs (A)
4745
4746 None of these transformations work for modes with signed
4747 zeros. If A is +/-0, the first two transformations will
4748 change the sign of the result (from +0 to -0, or vice
4749 versa). The last four will fix the sign of the result,
4750 even though the original expressions could be positive or
4751 negative, depending on the sign of A.
4752
4753 Note that all these transformations are correct if A is
4754 NaN, since the two alternatives (A and -A) are also NaNs. */
4755 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4756 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4757 ? real_zerop (arg01)
4758 : integer_zerop (arg01))
4759 && ((TREE_CODE (arg2) == NEGATE_EXPR
4760 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4761 /* In the case that A is of the form X-Y, '-A' (arg2) may
4762 have already been folded to Y-X, check for that. */
4763 || (TREE_CODE (arg1) == MINUS_EXPR
4764 && TREE_CODE (arg2) == MINUS_EXPR
4765 && operand_equal_p (TREE_OPERAND (arg1, 0),
4766 TREE_OPERAND (arg2, 1), 0)
4767 && operand_equal_p (TREE_OPERAND (arg1, 1),
4768 TREE_OPERAND (arg2, 0), 0))))
4769 switch (comp_code)
4770 {
4771 case EQ_EXPR:
4772 case UNEQ_EXPR:
4773 tem = fold_convert_loc (loc, arg1_type, arg1);
4774 return pedantic_non_lvalue_loc (loc,
4775 fold_convert_loc (loc, type,
4776 negate_expr (tem)));
4777 case NE_EXPR:
4778 case LTGT_EXPR:
4779 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4780 case UNGE_EXPR:
4781 case UNGT_EXPR:
4782 if (flag_trapping_math)
4783 break;
4784 /* Fall through. */
4785 case GE_EXPR:
4786 case GT_EXPR:
4787 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4788 arg1 = fold_convert_loc (loc, signed_type_for
4789 (TREE_TYPE (arg1)), arg1);
4790 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4791 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4792 case UNLE_EXPR:
4793 case UNLT_EXPR:
4794 if (flag_trapping_math)
4795 break;
4796 case LE_EXPR:
4797 case LT_EXPR:
4798 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4799 arg1 = fold_convert_loc (loc, signed_type_for
4800 (TREE_TYPE (arg1)), arg1);
4801 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4802 return negate_expr (fold_convert_loc (loc, type, tem));
4803 default:
4804 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4805 break;
4806 }
4807
4808 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4809 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4810 both transformations are correct when A is NaN: A != 0
4811 is then true, and A == 0 is false. */
4812
4813 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4814 && integer_zerop (arg01) && integer_zerop (arg2))
4815 {
4816 if (comp_code == NE_EXPR)
4817 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4818 else if (comp_code == EQ_EXPR)
4819 return build_zero_cst (type);
4820 }
4821
4822 /* Try some transformations of A op B ? A : B.
4823
4824 A == B? A : B same as B
4825 A != B? A : B same as A
4826 A >= B? A : B same as max (A, B)
4827 A > B? A : B same as max (B, A)
4828 A <= B? A : B same as min (A, B)
4829 A < B? A : B same as min (B, A)
4830
4831 As above, these transformations don't work in the presence
4832 of signed zeros. For example, if A and B are zeros of
4833 opposite sign, the first two transformations will change
4834 the sign of the result. In the last four, the original
4835 expressions give different results for (A=+0, B=-0) and
4836 (A=-0, B=+0), but the transformed expressions do not.
4837
4838 The first two transformations are correct if either A or B
4839 is a NaN. In the first transformation, the condition will
4840 be false, and B will indeed be chosen. In the case of the
4841 second transformation, the condition A != B will be true,
4842 and A will be chosen.
4843
4844 The conversions to max() and min() are not correct if B is
4845 a number and A is not. The conditions in the original
4846 expressions will be false, so all four give B. The min()
4847 and max() versions would give a NaN instead. */
4848 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4849 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4850 /* Avoid these transformations if the COND_EXPR may be used
4851 as an lvalue in the C++ front-end. PR c++/19199. */
4852 && (in_gimple_form
4853 || VECTOR_TYPE_P (type)
4854 || (! lang_GNU_CXX ()
4855 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4856 || ! maybe_lvalue_p (arg1)
4857 || ! maybe_lvalue_p (arg2)))
4858 {
4859 tree comp_op0 = arg00;
4860 tree comp_op1 = arg01;
4861 tree comp_type = TREE_TYPE (comp_op0);
4862
4863 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4864 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4865 {
4866 comp_type = type;
4867 comp_op0 = arg1;
4868 comp_op1 = arg2;
4869 }
4870
4871 switch (comp_code)
4872 {
4873 case EQ_EXPR:
4874 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4875 case NE_EXPR:
4876 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4877 case LE_EXPR:
4878 case LT_EXPR:
4879 case UNLE_EXPR:
4880 case UNLT_EXPR:
4881 /* In C++ a ?: expression can be an lvalue, so put the
4882 operand which will be used if they are equal first
4883 so that we can convert this back to the
4884 corresponding COND_EXPR. */
4885 if (!HONOR_NANS (element_mode (arg1)))
4886 {
4887 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4888 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4889 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4890 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4891 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4892 comp_op1, comp_op0);
4893 return pedantic_non_lvalue_loc (loc,
4894 fold_convert_loc (loc, type, tem));
4895 }
4896 break;
4897 case GE_EXPR:
4898 case GT_EXPR:
4899 case UNGE_EXPR:
4900 case UNGT_EXPR:
4901 if (!HONOR_NANS (element_mode (arg1)))
4902 {
4903 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4904 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4905 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4906 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4907 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4908 comp_op1, comp_op0);
4909 return pedantic_non_lvalue_loc (loc,
4910 fold_convert_loc (loc, type, tem));
4911 }
4912 break;
4913 case UNEQ_EXPR:
4914 if (!HONOR_NANS (element_mode (arg1)))
4915 return pedantic_non_lvalue_loc (loc,
4916 fold_convert_loc (loc, type, arg2));
4917 break;
4918 case LTGT_EXPR:
4919 if (!HONOR_NANS (element_mode (arg1)))
4920 return pedantic_non_lvalue_loc (loc,
4921 fold_convert_loc (loc, type, arg1));
4922 break;
4923 default:
4924 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4925 break;
4926 }
4927 }
4928
4929 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4930 we might still be able to simplify this. For example,
4931 if C1 is one less or one more than C2, this might have started
4932 out as a MIN or MAX and been transformed by this function.
4933 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4934
4935 if (INTEGRAL_TYPE_P (type)
4936 && TREE_CODE (arg01) == INTEGER_CST
4937 && TREE_CODE (arg2) == INTEGER_CST)
4938 switch (comp_code)
4939 {
4940 case EQ_EXPR:
4941 if (TREE_CODE (arg1) == INTEGER_CST)
4942 break;
4943 /* We can replace A with C1 in this case. */
4944 arg1 = fold_convert_loc (loc, type, arg01);
4945 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4946
4947 case LT_EXPR:
4948 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4949 MIN_EXPR, to preserve the signedness of the comparison. */
4950 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4951 OEP_ONLY_CONST)
4952 && operand_equal_p (arg01,
4953 const_binop (PLUS_EXPR, arg2,
4954 build_int_cst (type, 1)),
4955 OEP_ONLY_CONST))
4956 {
4957 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4958 fold_convert_loc (loc, TREE_TYPE (arg00),
4959 arg2));
4960 return pedantic_non_lvalue_loc (loc,
4961 fold_convert_loc (loc, type, tem));
4962 }
4963 break;
4964
4965 case LE_EXPR:
4966 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4967 as above. */
4968 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4969 OEP_ONLY_CONST)
4970 && operand_equal_p (arg01,
4971 const_binop (MINUS_EXPR, arg2,
4972 build_int_cst (type, 1)),
4973 OEP_ONLY_CONST))
4974 {
4975 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4976 fold_convert_loc (loc, TREE_TYPE (arg00),
4977 arg2));
4978 return pedantic_non_lvalue_loc (loc,
4979 fold_convert_loc (loc, type, tem));
4980 }
4981 break;
4982
4983 case GT_EXPR:
4984 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4985 MAX_EXPR, to preserve the signedness of the comparison. */
4986 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4987 OEP_ONLY_CONST)
4988 && operand_equal_p (arg01,
4989 const_binop (MINUS_EXPR, arg2,
4990 build_int_cst (type, 1)),
4991 OEP_ONLY_CONST))
4992 {
4993 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4994 fold_convert_loc (loc, TREE_TYPE (arg00),
4995 arg2));
4996 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4997 }
4998 break;
4999
5000 case GE_EXPR:
5001 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5002 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5003 OEP_ONLY_CONST)
5004 && operand_equal_p (arg01,
5005 const_binop (PLUS_EXPR, arg2,
5006 build_int_cst (type, 1)),
5007 OEP_ONLY_CONST))
5008 {
5009 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5010 fold_convert_loc (loc, TREE_TYPE (arg00),
5011 arg2));
5012 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5013 }
5014 break;
5015 case NE_EXPR:
5016 break;
5017 default:
5018 gcc_unreachable ();
5019 }
5020
5021 return NULL_TREE;
5022 }
5023
5024
5025 \f
5026 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5027 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5028 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5029 false) >= 2)
5030 #endif
5031
5032 /* EXP is some logical combination of boolean tests. See if we can
5033 merge it into some range test. Return the new tree if so. */
5034
5035 static tree
5036 fold_range_test (location_t loc, enum tree_code code, tree type,
5037 tree op0, tree op1)
5038 {
5039 int or_op = (code == TRUTH_ORIF_EXPR
5040 || code == TRUTH_OR_EXPR);
5041 int in0_p, in1_p, in_p;
5042 tree low0, low1, low, high0, high1, high;
5043 bool strict_overflow_p = false;
5044 tree tem, lhs, rhs;
5045 const char * const warnmsg = G_("assuming signed overflow does not occur "
5046 "when simplifying range test");
5047
5048 if (!INTEGRAL_TYPE_P (type))
5049 return 0;
5050
5051 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5052 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5053
5054 /* If this is an OR operation, invert both sides; we will invert
5055 again at the end. */
5056 if (or_op)
5057 in0_p = ! in0_p, in1_p = ! in1_p;
5058
5059 /* If both expressions are the same, if we can merge the ranges, and we
5060 can build the range test, return it or it inverted. If one of the
5061 ranges is always true or always false, consider it to be the same
5062 expression as the other. */
5063 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5064 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5065 in1_p, low1, high1)
5066 && 0 != (tem = (build_range_check (loc, type,
5067 lhs != 0 ? lhs
5068 : rhs != 0 ? rhs : integer_zero_node,
5069 in_p, low, high))))
5070 {
5071 if (strict_overflow_p)
5072 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5073 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5074 }
5075
5076 /* On machines where the branch cost is expensive, if this is a
5077 short-circuited branch and the underlying object on both sides
5078 is the same, make a non-short-circuit operation. */
5079 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5080 && lhs != 0 && rhs != 0
5081 && (code == TRUTH_ANDIF_EXPR
5082 || code == TRUTH_ORIF_EXPR)
5083 && operand_equal_p (lhs, rhs, 0))
5084 {
5085 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5086 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5087 which cases we can't do this. */
5088 if (simple_operand_p (lhs))
5089 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5090 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5091 type, op0, op1);
5092
5093 else if (!lang_hooks.decls.global_bindings_p ()
5094 && !CONTAINS_PLACEHOLDER_P (lhs))
5095 {
5096 tree common = save_expr (lhs);
5097
5098 if (0 != (lhs = build_range_check (loc, type, common,
5099 or_op ? ! in0_p : in0_p,
5100 low0, high0))
5101 && (0 != (rhs = build_range_check (loc, type, common,
5102 or_op ? ! in1_p : in1_p,
5103 low1, high1))))
5104 {
5105 if (strict_overflow_p)
5106 fold_overflow_warning (warnmsg,
5107 WARN_STRICT_OVERFLOW_COMPARISON);
5108 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5109 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5110 type, lhs, rhs);
5111 }
5112 }
5113 }
5114
5115 return 0;
5116 }
5117 \f
5118 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5119 bit value. Arrange things so the extra bits will be set to zero if and
5120 only if C is signed-extended to its full width. If MASK is nonzero,
5121 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5122
5123 static tree
5124 unextend (tree c, int p, int unsignedp, tree mask)
5125 {
5126 tree type = TREE_TYPE (c);
5127 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5128 tree temp;
5129
5130 if (p == modesize || unsignedp)
5131 return c;
5132
5133 /* We work by getting just the sign bit into the low-order bit, then
5134 into the high-order bit, then sign-extend. We then XOR that value
5135 with C. */
5136 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5137
5138 /* We must use a signed type in order to get an arithmetic right shift.
5139 However, we must also avoid introducing accidental overflows, so that
5140 a subsequent call to integer_zerop will work. Hence we must
5141 do the type conversion here. At this point, the constant is either
5142 zero or one, and the conversion to a signed type can never overflow.
5143 We could get an overflow if this conversion is done anywhere else. */
5144 if (TYPE_UNSIGNED (type))
5145 temp = fold_convert (signed_type_for (type), temp);
5146
5147 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5148 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5149 if (mask != 0)
5150 temp = const_binop (BIT_AND_EXPR, temp,
5151 fold_convert (TREE_TYPE (c), mask));
5152 /* If necessary, convert the type back to match the type of C. */
5153 if (TYPE_UNSIGNED (type))
5154 temp = fold_convert (type, temp);
5155
5156 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5157 }
5158 \f
5159 /* For an expression that has the form
5160 (A && B) || ~B
5161 or
5162 (A || B) && ~B,
5163 we can drop one of the inner expressions and simplify to
5164 A || ~B
5165 or
5166 A && ~B
5167 LOC is the location of the resulting expression. OP is the inner
5168 logical operation; the left-hand side in the examples above, while CMPOP
5169 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5170 removing a condition that guards another, as in
5171 (A != NULL && A->...) || A == NULL
5172 which we must not transform. If RHS_ONLY is true, only eliminate the
5173 right-most operand of the inner logical operation. */
5174
5175 static tree
5176 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5177 bool rhs_only)
5178 {
5179 tree type = TREE_TYPE (cmpop);
5180 enum tree_code code = TREE_CODE (cmpop);
5181 enum tree_code truthop_code = TREE_CODE (op);
5182 tree lhs = TREE_OPERAND (op, 0);
5183 tree rhs = TREE_OPERAND (op, 1);
5184 tree orig_lhs = lhs, orig_rhs = rhs;
5185 enum tree_code rhs_code = TREE_CODE (rhs);
5186 enum tree_code lhs_code = TREE_CODE (lhs);
5187 enum tree_code inv_code;
5188
5189 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5190 return NULL_TREE;
5191
5192 if (TREE_CODE_CLASS (code) != tcc_comparison)
5193 return NULL_TREE;
5194
5195 if (rhs_code == truthop_code)
5196 {
5197 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5198 if (newrhs != NULL_TREE)
5199 {
5200 rhs = newrhs;
5201 rhs_code = TREE_CODE (rhs);
5202 }
5203 }
5204 if (lhs_code == truthop_code && !rhs_only)
5205 {
5206 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5207 if (newlhs != NULL_TREE)
5208 {
5209 lhs = newlhs;
5210 lhs_code = TREE_CODE (lhs);
5211 }
5212 }
5213
5214 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5215 if (inv_code == rhs_code
5216 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5217 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5218 return lhs;
5219 if (!rhs_only && inv_code == lhs_code
5220 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5221 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5222 return rhs;
5223 if (rhs != orig_rhs || lhs != orig_lhs)
5224 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5225 lhs, rhs);
5226 return NULL_TREE;
5227 }
5228
5229 /* Find ways of folding logical expressions of LHS and RHS:
5230 Try to merge two comparisons to the same innermost item.
5231 Look for range tests like "ch >= '0' && ch <= '9'".
5232 Look for combinations of simple terms on machines with expensive branches
5233 and evaluate the RHS unconditionally.
5234
5235 For example, if we have p->a == 2 && p->b == 4 and we can make an
5236 object large enough to span both A and B, we can do this with a comparison
5237 against the object ANDed with the a mask.
5238
5239 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5240 operations to do this with one comparison.
5241
5242 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5243 function and the one above.
5244
5245 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5246 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5247
5248 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5249 two operands.
5250
5251 We return the simplified tree or 0 if no optimization is possible. */
5252
5253 static tree
5254 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5255 tree lhs, tree rhs)
5256 {
5257 /* If this is the "or" of two comparisons, we can do something if
5258 the comparisons are NE_EXPR. If this is the "and", we can do something
5259 if the comparisons are EQ_EXPR. I.e.,
5260 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5261
5262 WANTED_CODE is this operation code. For single bit fields, we can
5263 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5264 comparison for one-bit fields. */
5265
5266 enum tree_code wanted_code;
5267 enum tree_code lcode, rcode;
5268 tree ll_arg, lr_arg, rl_arg, rr_arg;
5269 tree ll_inner, lr_inner, rl_inner, rr_inner;
5270 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5271 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5272 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5273 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5274 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5275 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5276 machine_mode lnmode, rnmode;
5277 tree ll_mask, lr_mask, rl_mask, rr_mask;
5278 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5279 tree l_const, r_const;
5280 tree lntype, rntype, result;
5281 HOST_WIDE_INT first_bit, end_bit;
5282 int volatilep;
5283
5284 /* Start by getting the comparison codes. Fail if anything is volatile.
5285 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5286 it were surrounded with a NE_EXPR. */
5287
5288 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5289 return 0;
5290
5291 lcode = TREE_CODE (lhs);
5292 rcode = TREE_CODE (rhs);
5293
5294 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5295 {
5296 lhs = build2 (NE_EXPR, truth_type, lhs,
5297 build_int_cst (TREE_TYPE (lhs), 0));
5298 lcode = NE_EXPR;
5299 }
5300
5301 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5302 {
5303 rhs = build2 (NE_EXPR, truth_type, rhs,
5304 build_int_cst (TREE_TYPE (rhs), 0));
5305 rcode = NE_EXPR;
5306 }
5307
5308 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5309 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5310 return 0;
5311
5312 ll_arg = TREE_OPERAND (lhs, 0);
5313 lr_arg = TREE_OPERAND (lhs, 1);
5314 rl_arg = TREE_OPERAND (rhs, 0);
5315 rr_arg = TREE_OPERAND (rhs, 1);
5316
5317 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5318 if (simple_operand_p (ll_arg)
5319 && simple_operand_p (lr_arg))
5320 {
5321 if (operand_equal_p (ll_arg, rl_arg, 0)
5322 && operand_equal_p (lr_arg, rr_arg, 0))
5323 {
5324 result = combine_comparisons (loc, code, lcode, rcode,
5325 truth_type, ll_arg, lr_arg);
5326 if (result)
5327 return result;
5328 }
5329 else if (operand_equal_p (ll_arg, rr_arg, 0)
5330 && operand_equal_p (lr_arg, rl_arg, 0))
5331 {
5332 result = combine_comparisons (loc, code, lcode,
5333 swap_tree_comparison (rcode),
5334 truth_type, ll_arg, lr_arg);
5335 if (result)
5336 return result;
5337 }
5338 }
5339
5340 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5341 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5342
5343 /* If the RHS can be evaluated unconditionally and its operands are
5344 simple, it wins to evaluate the RHS unconditionally on machines
5345 with expensive branches. In this case, this isn't a comparison
5346 that can be merged. */
5347
5348 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5349 false) >= 2
5350 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5351 && simple_operand_p (rl_arg)
5352 && simple_operand_p (rr_arg))
5353 {
5354 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5355 if (code == TRUTH_OR_EXPR
5356 && lcode == NE_EXPR && integer_zerop (lr_arg)
5357 && rcode == NE_EXPR && integer_zerop (rr_arg)
5358 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5359 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5360 return build2_loc (loc, NE_EXPR, truth_type,
5361 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5362 ll_arg, rl_arg),
5363 build_int_cst (TREE_TYPE (ll_arg), 0));
5364
5365 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5366 if (code == TRUTH_AND_EXPR
5367 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5368 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5369 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5370 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5371 return build2_loc (loc, EQ_EXPR, truth_type,
5372 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5373 ll_arg, rl_arg),
5374 build_int_cst (TREE_TYPE (ll_arg), 0));
5375 }
5376
5377 /* See if the comparisons can be merged. Then get all the parameters for
5378 each side. */
5379
5380 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5381 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5382 return 0;
5383
5384 volatilep = 0;
5385 ll_inner = decode_field_reference (loc, ll_arg,
5386 &ll_bitsize, &ll_bitpos, &ll_mode,
5387 &ll_unsignedp, &volatilep, &ll_mask,
5388 &ll_and_mask);
5389 lr_inner = decode_field_reference (loc, lr_arg,
5390 &lr_bitsize, &lr_bitpos, &lr_mode,
5391 &lr_unsignedp, &volatilep, &lr_mask,
5392 &lr_and_mask);
5393 rl_inner = decode_field_reference (loc, rl_arg,
5394 &rl_bitsize, &rl_bitpos, &rl_mode,
5395 &rl_unsignedp, &volatilep, &rl_mask,
5396 &rl_and_mask);
5397 rr_inner = decode_field_reference (loc, rr_arg,
5398 &rr_bitsize, &rr_bitpos, &rr_mode,
5399 &rr_unsignedp, &volatilep, &rr_mask,
5400 &rr_and_mask);
5401
5402 /* It must be true that the inner operation on the lhs of each
5403 comparison must be the same if we are to be able to do anything.
5404 Then see if we have constants. If not, the same must be true for
5405 the rhs's. */
5406 if (volatilep || ll_inner == 0 || rl_inner == 0
5407 || ! operand_equal_p (ll_inner, rl_inner, 0))
5408 return 0;
5409
5410 if (TREE_CODE (lr_arg) == INTEGER_CST
5411 && TREE_CODE (rr_arg) == INTEGER_CST)
5412 l_const = lr_arg, r_const = rr_arg;
5413 else if (lr_inner == 0 || rr_inner == 0
5414 || ! operand_equal_p (lr_inner, rr_inner, 0))
5415 return 0;
5416 else
5417 l_const = r_const = 0;
5418
5419 /* If either comparison code is not correct for our logical operation,
5420 fail. However, we can convert a one-bit comparison against zero into
5421 the opposite comparison against that bit being set in the field. */
5422
5423 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5424 if (lcode != wanted_code)
5425 {
5426 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5427 {
5428 /* Make the left operand unsigned, since we are only interested
5429 in the value of one bit. Otherwise we are doing the wrong
5430 thing below. */
5431 ll_unsignedp = 1;
5432 l_const = ll_mask;
5433 }
5434 else
5435 return 0;
5436 }
5437
5438 /* This is analogous to the code for l_const above. */
5439 if (rcode != wanted_code)
5440 {
5441 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5442 {
5443 rl_unsignedp = 1;
5444 r_const = rl_mask;
5445 }
5446 else
5447 return 0;
5448 }
5449
5450 /* See if we can find a mode that contains both fields being compared on
5451 the left. If we can't, fail. Otherwise, update all constants and masks
5452 to be relative to a field of that size. */
5453 first_bit = MIN (ll_bitpos, rl_bitpos);
5454 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5455 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5456 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5457 volatilep);
5458 if (lnmode == VOIDmode)
5459 return 0;
5460
5461 lnbitsize = GET_MODE_BITSIZE (lnmode);
5462 lnbitpos = first_bit & ~ (lnbitsize - 1);
5463 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5464 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5465
5466 if (BYTES_BIG_ENDIAN)
5467 {
5468 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5469 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5470 }
5471
5472 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5473 size_int (xll_bitpos));
5474 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5475 size_int (xrl_bitpos));
5476
5477 if (l_const)
5478 {
5479 l_const = fold_convert_loc (loc, lntype, l_const);
5480 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5481 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5482 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5483 fold_build1_loc (loc, BIT_NOT_EXPR,
5484 lntype, ll_mask))))
5485 {
5486 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5487
5488 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5489 }
5490 }
5491 if (r_const)
5492 {
5493 r_const = fold_convert_loc (loc, lntype, r_const);
5494 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5495 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5496 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5497 fold_build1_loc (loc, BIT_NOT_EXPR,
5498 lntype, rl_mask))))
5499 {
5500 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5501
5502 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5503 }
5504 }
5505
5506 /* If the right sides are not constant, do the same for it. Also,
5507 disallow this optimization if a size or signedness mismatch occurs
5508 between the left and right sides. */
5509 if (l_const == 0)
5510 {
5511 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5512 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5513 /* Make sure the two fields on the right
5514 correspond to the left without being swapped. */
5515 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5516 return 0;
5517
5518 first_bit = MIN (lr_bitpos, rr_bitpos);
5519 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5520 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5521 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5522 volatilep);
5523 if (rnmode == VOIDmode)
5524 return 0;
5525
5526 rnbitsize = GET_MODE_BITSIZE (rnmode);
5527 rnbitpos = first_bit & ~ (rnbitsize - 1);
5528 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5529 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5530
5531 if (BYTES_BIG_ENDIAN)
5532 {
5533 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5534 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5535 }
5536
5537 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5538 rntype, lr_mask),
5539 size_int (xlr_bitpos));
5540 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5541 rntype, rr_mask),
5542 size_int (xrr_bitpos));
5543
5544 /* Make a mask that corresponds to both fields being compared.
5545 Do this for both items being compared. If the operands are the
5546 same size and the bits being compared are in the same position
5547 then we can do this by masking both and comparing the masked
5548 results. */
5549 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5550 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5551 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5552 {
5553 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5554 ll_unsignedp || rl_unsignedp);
5555 if (! all_ones_mask_p (ll_mask, lnbitsize))
5556 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5557
5558 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5559 lr_unsignedp || rr_unsignedp);
5560 if (! all_ones_mask_p (lr_mask, rnbitsize))
5561 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5562
5563 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5564 }
5565
5566 /* There is still another way we can do something: If both pairs of
5567 fields being compared are adjacent, we may be able to make a wider
5568 field containing them both.
5569
5570 Note that we still must mask the lhs/rhs expressions. Furthermore,
5571 the mask must be shifted to account for the shift done by
5572 make_bit_field_ref. */
5573 if ((ll_bitsize + ll_bitpos == rl_bitpos
5574 && lr_bitsize + lr_bitpos == rr_bitpos)
5575 || (ll_bitpos == rl_bitpos + rl_bitsize
5576 && lr_bitpos == rr_bitpos + rr_bitsize))
5577 {
5578 tree type;
5579
5580 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5581 ll_bitsize + rl_bitsize,
5582 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5583 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5584 lr_bitsize + rr_bitsize,
5585 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5586
5587 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5588 size_int (MIN (xll_bitpos, xrl_bitpos)));
5589 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5590 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5591
5592 /* Convert to the smaller type before masking out unwanted bits. */
5593 type = lntype;
5594 if (lntype != rntype)
5595 {
5596 if (lnbitsize > rnbitsize)
5597 {
5598 lhs = fold_convert_loc (loc, rntype, lhs);
5599 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5600 type = rntype;
5601 }
5602 else if (lnbitsize < rnbitsize)
5603 {
5604 rhs = fold_convert_loc (loc, lntype, rhs);
5605 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5606 type = lntype;
5607 }
5608 }
5609
5610 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5611 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5612
5613 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5614 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5615
5616 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5617 }
5618
5619 return 0;
5620 }
5621
5622 /* Handle the case of comparisons with constants. If there is something in
5623 common between the masks, those bits of the constants must be the same.
5624 If not, the condition is always false. Test for this to avoid generating
5625 incorrect code below. */
5626 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5627 if (! integer_zerop (result)
5628 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5629 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5630 {
5631 if (wanted_code == NE_EXPR)
5632 {
5633 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5634 return constant_boolean_node (true, truth_type);
5635 }
5636 else
5637 {
5638 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5639 return constant_boolean_node (false, truth_type);
5640 }
5641 }
5642
5643 /* Construct the expression we will return. First get the component
5644 reference we will make. Unless the mask is all ones the width of
5645 that field, perform the mask operation. Then compare with the
5646 merged constant. */
5647 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5648 ll_unsignedp || rl_unsignedp);
5649
5650 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5651 if (! all_ones_mask_p (ll_mask, lnbitsize))
5652 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5653
5654 return build2_loc (loc, wanted_code, truth_type, result,
5655 const_binop (BIT_IOR_EXPR, l_const, r_const));
5656 }
5657 \f
5658 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5659 constant. */
5660
5661 static tree
5662 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5663 tree op0, tree op1)
5664 {
5665 tree arg0 = op0;
5666 enum tree_code op_code;
5667 tree comp_const;
5668 tree minmax_const;
5669 int consts_equal, consts_lt;
5670 tree inner;
5671
5672 STRIP_SIGN_NOPS (arg0);
5673
5674 op_code = TREE_CODE (arg0);
5675 minmax_const = TREE_OPERAND (arg0, 1);
5676 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5677 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5678 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5679 inner = TREE_OPERAND (arg0, 0);
5680
5681 /* If something does not permit us to optimize, return the original tree. */
5682 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5683 || TREE_CODE (comp_const) != INTEGER_CST
5684 || TREE_OVERFLOW (comp_const)
5685 || TREE_CODE (minmax_const) != INTEGER_CST
5686 || TREE_OVERFLOW (minmax_const))
5687 return NULL_TREE;
5688
5689 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5690 and GT_EXPR, doing the rest with recursive calls using logical
5691 simplifications. */
5692 switch (code)
5693 {
5694 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5695 {
5696 tree tem
5697 = optimize_minmax_comparison (loc,
5698 invert_tree_comparison (code, false),
5699 type, op0, op1);
5700 if (tem)
5701 return invert_truthvalue_loc (loc, tem);
5702 return NULL_TREE;
5703 }
5704
5705 case GE_EXPR:
5706 return
5707 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5708 optimize_minmax_comparison
5709 (loc, EQ_EXPR, type, arg0, comp_const),
5710 optimize_minmax_comparison
5711 (loc, GT_EXPR, type, arg0, comp_const));
5712
5713 case EQ_EXPR:
5714 if (op_code == MAX_EXPR && consts_equal)
5715 /* MAX (X, 0) == 0 -> X <= 0 */
5716 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5717
5718 else if (op_code == MAX_EXPR && consts_lt)
5719 /* MAX (X, 0) == 5 -> X == 5 */
5720 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5721
5722 else if (op_code == MAX_EXPR)
5723 /* MAX (X, 0) == -1 -> false */
5724 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5725
5726 else if (consts_equal)
5727 /* MIN (X, 0) == 0 -> X >= 0 */
5728 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5729
5730 else if (consts_lt)
5731 /* MIN (X, 0) == 5 -> false */
5732 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5733
5734 else
5735 /* MIN (X, 0) == -1 -> X == -1 */
5736 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5737
5738 case GT_EXPR:
5739 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5740 /* MAX (X, 0) > 0 -> X > 0
5741 MAX (X, 0) > 5 -> X > 5 */
5742 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5743
5744 else if (op_code == MAX_EXPR)
5745 /* MAX (X, 0) > -1 -> true */
5746 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5747
5748 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5749 /* MIN (X, 0) > 0 -> false
5750 MIN (X, 0) > 5 -> false */
5751 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5752
5753 else
5754 /* MIN (X, 0) > -1 -> X > -1 */
5755 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5756
5757 default:
5758 return NULL_TREE;
5759 }
5760 }
5761 \f
5762 /* T is an integer expression that is being multiplied, divided, or taken a
5763 modulus (CODE says which and what kind of divide or modulus) by a
5764 constant C. See if we can eliminate that operation by folding it with
5765 other operations already in T. WIDE_TYPE, if non-null, is a type that
5766 should be used for the computation if wider than our type.
5767
5768 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5769 (X * 2) + (Y * 4). We must, however, be assured that either the original
5770 expression would not overflow or that overflow is undefined for the type
5771 in the language in question.
5772
5773 If we return a non-null expression, it is an equivalent form of the
5774 original computation, but need not be in the original type.
5775
5776 We set *STRICT_OVERFLOW_P to true if the return values depends on
5777 signed overflow being undefined. Otherwise we do not change
5778 *STRICT_OVERFLOW_P. */
5779
5780 static tree
5781 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5782 bool *strict_overflow_p)
5783 {
5784 /* To avoid exponential search depth, refuse to allow recursion past
5785 three levels. Beyond that (1) it's highly unlikely that we'll find
5786 something interesting and (2) we've probably processed it before
5787 when we built the inner expression. */
5788
5789 static int depth;
5790 tree ret;
5791
5792 if (depth > 3)
5793 return NULL;
5794
5795 depth++;
5796 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5797 depth--;
5798
5799 return ret;
5800 }
5801
5802 static tree
5803 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5804 bool *strict_overflow_p)
5805 {
5806 tree type = TREE_TYPE (t);
5807 enum tree_code tcode = TREE_CODE (t);
5808 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5809 > GET_MODE_SIZE (TYPE_MODE (type)))
5810 ? wide_type : type);
5811 tree t1, t2;
5812 int same_p = tcode == code;
5813 tree op0 = NULL_TREE, op1 = NULL_TREE;
5814 bool sub_strict_overflow_p;
5815
5816 /* Don't deal with constants of zero here; they confuse the code below. */
5817 if (integer_zerop (c))
5818 return NULL_TREE;
5819
5820 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5821 op0 = TREE_OPERAND (t, 0);
5822
5823 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5824 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5825
5826 /* Note that we need not handle conditional operations here since fold
5827 already handles those cases. So just do arithmetic here. */
5828 switch (tcode)
5829 {
5830 case INTEGER_CST:
5831 /* For a constant, we can always simplify if we are a multiply
5832 or (for divide and modulus) if it is a multiple of our constant. */
5833 if (code == MULT_EXPR
5834 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5835 return const_binop (code, fold_convert (ctype, t),
5836 fold_convert (ctype, c));
5837 break;
5838
5839 CASE_CONVERT: case NON_LVALUE_EXPR:
5840 /* If op0 is an expression ... */
5841 if ((COMPARISON_CLASS_P (op0)
5842 || UNARY_CLASS_P (op0)
5843 || BINARY_CLASS_P (op0)
5844 || VL_EXP_CLASS_P (op0)
5845 || EXPRESSION_CLASS_P (op0))
5846 /* ... and has wrapping overflow, and its type is smaller
5847 than ctype, then we cannot pass through as widening. */
5848 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5849 && (TYPE_PRECISION (ctype)
5850 > TYPE_PRECISION (TREE_TYPE (op0))))
5851 /* ... or this is a truncation (t is narrower than op0),
5852 then we cannot pass through this narrowing. */
5853 || (TYPE_PRECISION (type)
5854 < TYPE_PRECISION (TREE_TYPE (op0)))
5855 /* ... or signedness changes for division or modulus,
5856 then we cannot pass through this conversion. */
5857 || (code != MULT_EXPR
5858 && (TYPE_UNSIGNED (ctype)
5859 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5860 /* ... or has undefined overflow while the converted to
5861 type has not, we cannot do the operation in the inner type
5862 as that would introduce undefined overflow. */
5863 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5864 && !TYPE_OVERFLOW_UNDEFINED (type))))
5865 break;
5866
5867 /* Pass the constant down and see if we can make a simplification. If
5868 we can, replace this expression with the inner simplification for
5869 possible later conversion to our or some other type. */
5870 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5871 && TREE_CODE (t2) == INTEGER_CST
5872 && !TREE_OVERFLOW (t2)
5873 && (0 != (t1 = extract_muldiv (op0, t2, code,
5874 code == MULT_EXPR
5875 ? ctype : NULL_TREE,
5876 strict_overflow_p))))
5877 return t1;
5878 break;
5879
5880 case ABS_EXPR:
5881 /* If widening the type changes it from signed to unsigned, then we
5882 must avoid building ABS_EXPR itself as unsigned. */
5883 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5884 {
5885 tree cstype = (*signed_type_for) (ctype);
5886 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5887 != 0)
5888 {
5889 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5890 return fold_convert (ctype, t1);
5891 }
5892 break;
5893 }
5894 /* If the constant is negative, we cannot simplify this. */
5895 if (tree_int_cst_sgn (c) == -1)
5896 break;
5897 /* FALLTHROUGH */
5898 case NEGATE_EXPR:
5899 /* For division and modulus, type can't be unsigned, as e.g.
5900 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5901 For signed types, even with wrapping overflow, this is fine. */
5902 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5903 break;
5904 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5905 != 0)
5906 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5907 break;
5908
5909 case MIN_EXPR: case MAX_EXPR:
5910 /* If widening the type changes the signedness, then we can't perform
5911 this optimization as that changes the result. */
5912 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5913 break;
5914
5915 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5916 sub_strict_overflow_p = false;
5917 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5918 &sub_strict_overflow_p)) != 0
5919 && (t2 = extract_muldiv (op1, c, code, wide_type,
5920 &sub_strict_overflow_p)) != 0)
5921 {
5922 if (tree_int_cst_sgn (c) < 0)
5923 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5924 if (sub_strict_overflow_p)
5925 *strict_overflow_p = true;
5926 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5927 fold_convert (ctype, t2));
5928 }
5929 break;
5930
5931 case LSHIFT_EXPR: case RSHIFT_EXPR:
5932 /* If the second operand is constant, this is a multiplication
5933 or floor division, by a power of two, so we can treat it that
5934 way unless the multiplier or divisor overflows. Signed
5935 left-shift overflow is implementation-defined rather than
5936 undefined in C90, so do not convert signed left shift into
5937 multiplication. */
5938 if (TREE_CODE (op1) == INTEGER_CST
5939 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5940 /* const_binop may not detect overflow correctly,
5941 so check for it explicitly here. */
5942 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5943 && 0 != (t1 = fold_convert (ctype,
5944 const_binop (LSHIFT_EXPR,
5945 size_one_node,
5946 op1)))
5947 && !TREE_OVERFLOW (t1))
5948 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5949 ? MULT_EXPR : FLOOR_DIV_EXPR,
5950 ctype,
5951 fold_convert (ctype, op0),
5952 t1),
5953 c, code, wide_type, strict_overflow_p);
5954 break;
5955
5956 case PLUS_EXPR: case MINUS_EXPR:
5957 /* See if we can eliminate the operation on both sides. If we can, we
5958 can return a new PLUS or MINUS. If we can't, the only remaining
5959 cases where we can do anything are if the second operand is a
5960 constant. */
5961 sub_strict_overflow_p = false;
5962 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5963 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5964 if (t1 != 0 && t2 != 0
5965 && (code == MULT_EXPR
5966 /* If not multiplication, we can only do this if both operands
5967 are divisible by c. */
5968 || (multiple_of_p (ctype, op0, c)
5969 && multiple_of_p (ctype, op1, c))))
5970 {
5971 if (sub_strict_overflow_p)
5972 *strict_overflow_p = true;
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5974 fold_convert (ctype, t2));
5975 }
5976
5977 /* If this was a subtraction, negate OP1 and set it to be an addition.
5978 This simplifies the logic below. */
5979 if (tcode == MINUS_EXPR)
5980 {
5981 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5982 /* If OP1 was not easily negatable, the constant may be OP0. */
5983 if (TREE_CODE (op0) == INTEGER_CST)
5984 {
5985 tree tem = op0;
5986 op0 = op1;
5987 op1 = tem;
5988 tem = t1;
5989 t1 = t2;
5990 t2 = tem;
5991 }
5992 }
5993
5994 if (TREE_CODE (op1) != INTEGER_CST)
5995 break;
5996
5997 /* If either OP1 or C are negative, this optimization is not safe for
5998 some of the division and remainder types while for others we need
5999 to change the code. */
6000 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6001 {
6002 if (code == CEIL_DIV_EXPR)
6003 code = FLOOR_DIV_EXPR;
6004 else if (code == FLOOR_DIV_EXPR)
6005 code = CEIL_DIV_EXPR;
6006 else if (code != MULT_EXPR
6007 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6008 break;
6009 }
6010
6011 /* If it's a multiply or a division/modulus operation of a multiple
6012 of our constant, do the operation and verify it doesn't overflow. */
6013 if (code == MULT_EXPR
6014 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6015 {
6016 op1 = const_binop (code, fold_convert (ctype, op1),
6017 fold_convert (ctype, c));
6018 /* We allow the constant to overflow with wrapping semantics. */
6019 if (op1 == 0
6020 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6021 break;
6022 }
6023 else
6024 break;
6025
6026 /* If we have an unsigned type, we cannot widen the operation since it
6027 will change the result if the original computation overflowed. */
6028 if (TYPE_UNSIGNED (ctype) && ctype != type)
6029 break;
6030
6031 /* If we were able to eliminate our operation from the first side,
6032 apply our operation to the second side and reform the PLUS. */
6033 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6034 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6035
6036 /* The last case is if we are a multiply. In that case, we can
6037 apply the distributive law to commute the multiply and addition
6038 if the multiplication of the constants doesn't overflow
6039 and overflow is defined. With undefined overflow
6040 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6041 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6042 return fold_build2 (tcode, ctype,
6043 fold_build2 (code, ctype,
6044 fold_convert (ctype, op0),
6045 fold_convert (ctype, c)),
6046 op1);
6047
6048 break;
6049
6050 case MULT_EXPR:
6051 /* We have a special case here if we are doing something like
6052 (C * 8) % 4 since we know that's zero. */
6053 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6054 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6055 /* If the multiplication can overflow we cannot optimize this. */
6056 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6057 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6058 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6059 {
6060 *strict_overflow_p = true;
6061 return omit_one_operand (type, integer_zero_node, op0);
6062 }
6063
6064 /* ... fall through ... */
6065
6066 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6067 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6068 /* If we can extract our operation from the LHS, do so and return a
6069 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6070 do something only if the second operand is a constant. */
6071 if (same_p
6072 && (t1 = extract_muldiv (op0, c, code, wide_type,
6073 strict_overflow_p)) != 0)
6074 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6075 fold_convert (ctype, op1));
6076 else if (tcode == MULT_EXPR && code == MULT_EXPR
6077 && (t1 = extract_muldiv (op1, c, code, wide_type,
6078 strict_overflow_p)) != 0)
6079 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6080 fold_convert (ctype, t1));
6081 else if (TREE_CODE (op1) != INTEGER_CST)
6082 return 0;
6083
6084 /* If these are the same operation types, we can associate them
6085 assuming no overflow. */
6086 if (tcode == code)
6087 {
6088 bool overflow_p = false;
6089 bool overflow_mul_p;
6090 signop sign = TYPE_SIGN (ctype);
6091 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6092 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6093 if (overflow_mul_p
6094 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6095 overflow_p = true;
6096 if (!overflow_p)
6097 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6098 wide_int_to_tree (ctype, mul));
6099 }
6100
6101 /* If these operations "cancel" each other, we have the main
6102 optimizations of this pass, which occur when either constant is a
6103 multiple of the other, in which case we replace this with either an
6104 operation or CODE or TCODE.
6105
6106 If we have an unsigned type, we cannot do this since it will change
6107 the result if the original computation overflowed. */
6108 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6109 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6110 || (tcode == MULT_EXPR
6111 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6112 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6113 && code != MULT_EXPR)))
6114 {
6115 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6116 {
6117 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6118 *strict_overflow_p = true;
6119 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6120 fold_convert (ctype,
6121 const_binop (TRUNC_DIV_EXPR,
6122 op1, c)));
6123 }
6124 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6125 {
6126 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6127 *strict_overflow_p = true;
6128 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6129 fold_convert (ctype,
6130 const_binop (TRUNC_DIV_EXPR,
6131 c, op1)));
6132 }
6133 }
6134 break;
6135
6136 default:
6137 break;
6138 }
6139
6140 return 0;
6141 }
6142 \f
6143 /* Return a node which has the indicated constant VALUE (either 0 or
6144 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6145 and is of the indicated TYPE. */
6146
6147 tree
6148 constant_boolean_node (bool value, tree type)
6149 {
6150 if (type == integer_type_node)
6151 return value ? integer_one_node : integer_zero_node;
6152 else if (type == boolean_type_node)
6153 return value ? boolean_true_node : boolean_false_node;
6154 else if (TREE_CODE (type) == VECTOR_TYPE)
6155 return build_vector_from_val (type,
6156 build_int_cst (TREE_TYPE (type),
6157 value ? -1 : 0));
6158 else
6159 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6160 }
6161
6162
6163 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6164 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6165 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6166 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6167 COND is the first argument to CODE; otherwise (as in the example
6168 given here), it is the second argument. TYPE is the type of the
6169 original expression. Return NULL_TREE if no simplification is
6170 possible. */
6171
6172 static tree
6173 fold_binary_op_with_conditional_arg (location_t loc,
6174 enum tree_code code,
6175 tree type, tree op0, tree op1,
6176 tree cond, tree arg, int cond_first_p)
6177 {
6178 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6179 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6180 tree test, true_value, false_value;
6181 tree lhs = NULL_TREE;
6182 tree rhs = NULL_TREE;
6183 enum tree_code cond_code = COND_EXPR;
6184
6185 if (TREE_CODE (cond) == COND_EXPR
6186 || TREE_CODE (cond) == VEC_COND_EXPR)
6187 {
6188 test = TREE_OPERAND (cond, 0);
6189 true_value = TREE_OPERAND (cond, 1);
6190 false_value = TREE_OPERAND (cond, 2);
6191 /* If this operand throws an expression, then it does not make
6192 sense to try to perform a logical or arithmetic operation
6193 involving it. */
6194 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6195 lhs = true_value;
6196 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6197 rhs = false_value;
6198 }
6199 else
6200 {
6201 tree testtype = TREE_TYPE (cond);
6202 test = cond;
6203 true_value = constant_boolean_node (true, testtype);
6204 false_value = constant_boolean_node (false, testtype);
6205 }
6206
6207 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6208 cond_code = VEC_COND_EXPR;
6209
6210 /* This transformation is only worthwhile if we don't have to wrap ARG
6211 in a SAVE_EXPR and the operation can be simplified without recursing
6212 on at least one of the branches once its pushed inside the COND_EXPR. */
6213 if (!TREE_CONSTANT (arg)
6214 && (TREE_SIDE_EFFECTS (arg)
6215 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6216 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6217 return NULL_TREE;
6218
6219 arg = fold_convert_loc (loc, arg_type, arg);
6220 if (lhs == 0)
6221 {
6222 true_value = fold_convert_loc (loc, cond_type, true_value);
6223 if (cond_first_p)
6224 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6225 else
6226 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6227 }
6228 if (rhs == 0)
6229 {
6230 false_value = fold_convert_loc (loc, cond_type, false_value);
6231 if (cond_first_p)
6232 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6233 else
6234 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6235 }
6236
6237 /* Check that we have simplified at least one of the branches. */
6238 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6239 return NULL_TREE;
6240
6241 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6242 }
6243
6244 \f
6245 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6246
6247 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6248 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6249 ADDEND is the same as X.
6250
6251 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6252 and finite. The problematic cases are when X is zero, and its mode
6253 has signed zeros. In the case of rounding towards -infinity,
6254 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6255 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6256
6257 bool
6258 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6259 {
6260 if (!real_zerop (addend))
6261 return false;
6262
6263 /* Don't allow the fold with -fsignaling-nans. */
6264 if (HONOR_SNANS (element_mode (type)))
6265 return false;
6266
6267 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6268 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6269 return true;
6270
6271 /* In a vector or complex, we would need to check the sign of all zeros. */
6272 if (TREE_CODE (addend) != REAL_CST)
6273 return false;
6274
6275 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6276 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6277 negate = !negate;
6278
6279 /* The mode has signed zeros, and we have to honor their sign.
6280 In this situation, there is only one case we can return true for.
6281 X - 0 is the same as X unless rounding towards -infinity is
6282 supported. */
6283 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6284 }
6285
6286 /* Subroutine of fold() that checks comparisons of built-in math
6287 functions against real constants.
6288
6289 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6290 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6291 is the type of the result and ARG0 and ARG1 are the operands of the
6292 comparison. ARG1 must be a TREE_REAL_CST.
6293
6294 The function returns the constant folded tree if a simplification
6295 can be made, and NULL_TREE otherwise. */
6296
6297 static tree
6298 fold_mathfn_compare (location_t loc,
6299 enum built_in_function fcode, enum tree_code code,
6300 tree type, tree arg0, tree arg1)
6301 {
6302 REAL_VALUE_TYPE c;
6303
6304 if (BUILTIN_SQRT_P (fcode))
6305 {
6306 tree arg = CALL_EXPR_ARG (arg0, 0);
6307 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6308
6309 c = TREE_REAL_CST (arg1);
6310 if (REAL_VALUE_NEGATIVE (c))
6311 {
6312 /* sqrt(x) < y is always false, if y is negative. */
6313 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6314 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6315
6316 /* sqrt(x) > y is always true, if y is negative and we
6317 don't care about NaNs, i.e. negative values of x. */
6318 if (code == NE_EXPR || !HONOR_NANS (mode))
6319 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6320
6321 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6322 return fold_build2_loc (loc, GE_EXPR, type, arg,
6323 build_real (TREE_TYPE (arg), dconst0));
6324 }
6325 else if (code == GT_EXPR || code == GE_EXPR)
6326 {
6327 REAL_VALUE_TYPE c2;
6328
6329 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6330 real_convert (&c2, mode, &c2);
6331
6332 if (REAL_VALUE_ISINF (c2))
6333 {
6334 /* sqrt(x) > y is x == +Inf, when y is very large. */
6335 if (HONOR_INFINITIES (mode))
6336 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6337 build_real (TREE_TYPE (arg), c2));
6338
6339 /* sqrt(x) > y is always false, when y is very large
6340 and we don't care about infinities. */
6341 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6342 }
6343
6344 /* sqrt(x) > c is the same as x > c*c. */
6345 return fold_build2_loc (loc, code, type, arg,
6346 build_real (TREE_TYPE (arg), c2));
6347 }
6348 else if (code == LT_EXPR || code == LE_EXPR)
6349 {
6350 REAL_VALUE_TYPE c2;
6351
6352 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6353 real_convert (&c2, mode, &c2);
6354
6355 if (REAL_VALUE_ISINF (c2))
6356 {
6357 /* sqrt(x) < y is always true, when y is a very large
6358 value and we don't care about NaNs or Infinities. */
6359 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6360 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6361
6362 /* sqrt(x) < y is x != +Inf when y is very large and we
6363 don't care about NaNs. */
6364 if (! HONOR_NANS (mode))
6365 return fold_build2_loc (loc, NE_EXPR, type, arg,
6366 build_real (TREE_TYPE (arg), c2));
6367
6368 /* sqrt(x) < y is x >= 0 when y is very large and we
6369 don't care about Infinities. */
6370 if (! HONOR_INFINITIES (mode))
6371 return fold_build2_loc (loc, GE_EXPR, type, arg,
6372 build_real (TREE_TYPE (arg), dconst0));
6373
6374 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6375 arg = save_expr (arg);
6376 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6377 fold_build2_loc (loc, GE_EXPR, type, arg,
6378 build_real (TREE_TYPE (arg),
6379 dconst0)),
6380 fold_build2_loc (loc, NE_EXPR, type, arg,
6381 build_real (TREE_TYPE (arg),
6382 c2)));
6383 }
6384
6385 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6386 if (! HONOR_NANS (mode))
6387 return fold_build2_loc (loc, code, type, arg,
6388 build_real (TREE_TYPE (arg), c2));
6389
6390 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6391 arg = save_expr (arg);
6392 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6393 fold_build2_loc (loc, GE_EXPR, type, arg,
6394 build_real (TREE_TYPE (arg),
6395 dconst0)),
6396 fold_build2_loc (loc, code, type, arg,
6397 build_real (TREE_TYPE (arg),
6398 c2)));
6399 }
6400 }
6401
6402 return NULL_TREE;
6403 }
6404
6405 /* Subroutine of fold() that optimizes comparisons against Infinities,
6406 either +Inf or -Inf.
6407
6408 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6409 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6410 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6411
6412 The function returns the constant folded tree if a simplification
6413 can be made, and NULL_TREE otherwise. */
6414
6415 static tree
6416 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6417 tree arg0, tree arg1)
6418 {
6419 machine_mode mode;
6420 REAL_VALUE_TYPE max;
6421 tree temp;
6422 bool neg;
6423
6424 mode = TYPE_MODE (TREE_TYPE (arg0));
6425
6426 /* For negative infinity swap the sense of the comparison. */
6427 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6428 if (neg)
6429 code = swap_tree_comparison (code);
6430
6431 switch (code)
6432 {
6433 case GT_EXPR:
6434 /* x > +Inf is always false, if with ignore sNANs. */
6435 if (HONOR_SNANS (mode))
6436 return NULL_TREE;
6437 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6438
6439 case LE_EXPR:
6440 /* x <= +Inf is always true, if we don't case about NaNs. */
6441 if (! HONOR_NANS (mode))
6442 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6443
6444 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6445 arg0 = save_expr (arg0);
6446 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6447
6448 case EQ_EXPR:
6449 case GE_EXPR:
6450 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6451 real_maxval (&max, neg, mode);
6452 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6453 arg0, build_real (TREE_TYPE (arg0), max));
6454
6455 case LT_EXPR:
6456 /* x < +Inf is always equal to x <= DBL_MAX. */
6457 real_maxval (&max, neg, mode);
6458 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6459 arg0, build_real (TREE_TYPE (arg0), max));
6460
6461 case NE_EXPR:
6462 /* x != +Inf is always equal to !(x > DBL_MAX). */
6463 real_maxval (&max, neg, mode);
6464 if (! HONOR_NANS (mode))
6465 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6466 arg0, build_real (TREE_TYPE (arg0), max));
6467
6468 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6469 arg0, build_real (TREE_TYPE (arg0), max));
6470 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6471
6472 default:
6473 break;
6474 }
6475
6476 return NULL_TREE;
6477 }
6478
6479 /* Subroutine of fold() that optimizes comparisons of a division by
6480 a nonzero integer constant against an integer constant, i.e.
6481 X/C1 op C2.
6482
6483 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6484 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6485 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6486
6487 The function returns the constant folded tree if a simplification
6488 can be made, and NULL_TREE otherwise. */
6489
6490 static tree
6491 fold_div_compare (location_t loc,
6492 enum tree_code code, tree type, tree arg0, tree arg1)
6493 {
6494 tree prod, tmp, hi, lo;
6495 tree arg00 = TREE_OPERAND (arg0, 0);
6496 tree arg01 = TREE_OPERAND (arg0, 1);
6497 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6498 bool neg_overflow = false;
6499 bool overflow;
6500
6501 /* We have to do this the hard way to detect unsigned overflow.
6502 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6503 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6504 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6505 neg_overflow = false;
6506
6507 if (sign == UNSIGNED)
6508 {
6509 tmp = int_const_binop (MINUS_EXPR, arg01,
6510 build_int_cst (TREE_TYPE (arg01), 1));
6511 lo = prod;
6512
6513 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6514 val = wi::add (prod, tmp, sign, &overflow);
6515 hi = force_fit_type (TREE_TYPE (arg00), val,
6516 -1, overflow | TREE_OVERFLOW (prod));
6517 }
6518 else if (tree_int_cst_sgn (arg01) >= 0)
6519 {
6520 tmp = int_const_binop (MINUS_EXPR, arg01,
6521 build_int_cst (TREE_TYPE (arg01), 1));
6522 switch (tree_int_cst_sgn (arg1))
6523 {
6524 case -1:
6525 neg_overflow = true;
6526 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6527 hi = prod;
6528 break;
6529
6530 case 0:
6531 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6532 hi = tmp;
6533 break;
6534
6535 case 1:
6536 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6537 lo = prod;
6538 break;
6539
6540 default:
6541 gcc_unreachable ();
6542 }
6543 }
6544 else
6545 {
6546 /* A negative divisor reverses the relational operators. */
6547 code = swap_tree_comparison (code);
6548
6549 tmp = int_const_binop (PLUS_EXPR, arg01,
6550 build_int_cst (TREE_TYPE (arg01), 1));
6551 switch (tree_int_cst_sgn (arg1))
6552 {
6553 case -1:
6554 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6555 lo = prod;
6556 break;
6557
6558 case 0:
6559 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6560 lo = tmp;
6561 break;
6562
6563 case 1:
6564 neg_overflow = true;
6565 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6566 hi = prod;
6567 break;
6568
6569 default:
6570 gcc_unreachable ();
6571 }
6572 }
6573
6574 switch (code)
6575 {
6576 case EQ_EXPR:
6577 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6578 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6579 if (TREE_OVERFLOW (hi))
6580 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6581 if (TREE_OVERFLOW (lo))
6582 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6583 return build_range_check (loc, type, arg00, 1, lo, hi);
6584
6585 case NE_EXPR:
6586 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6587 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6588 if (TREE_OVERFLOW (hi))
6589 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6590 if (TREE_OVERFLOW (lo))
6591 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6592 return build_range_check (loc, type, arg00, 0, lo, hi);
6593
6594 case LT_EXPR:
6595 if (TREE_OVERFLOW (lo))
6596 {
6597 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6598 return omit_one_operand_loc (loc, type, tmp, arg00);
6599 }
6600 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6601
6602 case LE_EXPR:
6603 if (TREE_OVERFLOW (hi))
6604 {
6605 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6606 return omit_one_operand_loc (loc, type, tmp, arg00);
6607 }
6608 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6609
6610 case GT_EXPR:
6611 if (TREE_OVERFLOW (hi))
6612 {
6613 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6614 return omit_one_operand_loc (loc, type, tmp, arg00);
6615 }
6616 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6617
6618 case GE_EXPR:
6619 if (TREE_OVERFLOW (lo))
6620 {
6621 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6622 return omit_one_operand_loc (loc, type, tmp, arg00);
6623 }
6624 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6625
6626 default:
6627 break;
6628 }
6629
6630 return NULL_TREE;
6631 }
6632
6633
6634 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6635 equality/inequality test, then return a simplified form of the test
6636 using a sign testing. Otherwise return NULL. TYPE is the desired
6637 result type. */
6638
6639 static tree
6640 fold_single_bit_test_into_sign_test (location_t loc,
6641 enum tree_code code, tree arg0, tree arg1,
6642 tree result_type)
6643 {
6644 /* If this is testing a single bit, we can optimize the test. */
6645 if ((code == NE_EXPR || code == EQ_EXPR)
6646 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6647 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6648 {
6649 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6650 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6651 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6652
6653 if (arg00 != NULL_TREE
6654 /* This is only a win if casting to a signed type is cheap,
6655 i.e. when arg00's type is not a partial mode. */
6656 && TYPE_PRECISION (TREE_TYPE (arg00))
6657 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6658 {
6659 tree stype = signed_type_for (TREE_TYPE (arg00));
6660 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6661 result_type,
6662 fold_convert_loc (loc, stype, arg00),
6663 build_int_cst (stype, 0));
6664 }
6665 }
6666
6667 return NULL_TREE;
6668 }
6669
6670 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6671 equality/inequality test, then return a simplified form of
6672 the test using shifts and logical operations. Otherwise return
6673 NULL. TYPE is the desired result type. */
6674
6675 tree
6676 fold_single_bit_test (location_t loc, enum tree_code code,
6677 tree arg0, tree arg1, tree result_type)
6678 {
6679 /* If this is testing a single bit, we can optimize the test. */
6680 if ((code == NE_EXPR || code == EQ_EXPR)
6681 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6682 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6683 {
6684 tree inner = TREE_OPERAND (arg0, 0);
6685 tree type = TREE_TYPE (arg0);
6686 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6687 machine_mode operand_mode = TYPE_MODE (type);
6688 int ops_unsigned;
6689 tree signed_type, unsigned_type, intermediate_type;
6690 tree tem, one;
6691
6692 /* First, see if we can fold the single bit test into a sign-bit
6693 test. */
6694 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6695 result_type);
6696 if (tem)
6697 return tem;
6698
6699 /* Otherwise we have (A & C) != 0 where C is a single bit,
6700 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6701 Similarly for (A & C) == 0. */
6702
6703 /* If INNER is a right shift of a constant and it plus BITNUM does
6704 not overflow, adjust BITNUM and INNER. */
6705 if (TREE_CODE (inner) == RSHIFT_EXPR
6706 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6707 && bitnum < TYPE_PRECISION (type)
6708 && wi::ltu_p (TREE_OPERAND (inner, 1),
6709 TYPE_PRECISION (type) - bitnum))
6710 {
6711 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6712 inner = TREE_OPERAND (inner, 0);
6713 }
6714
6715 /* If we are going to be able to omit the AND below, we must do our
6716 operations as unsigned. If we must use the AND, we have a choice.
6717 Normally unsigned is faster, but for some machines signed is. */
6718 #ifdef LOAD_EXTEND_OP
6719 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6720 && !flag_syntax_only) ? 0 : 1;
6721 #else
6722 ops_unsigned = 1;
6723 #endif
6724
6725 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6726 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6727 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6728 inner = fold_convert_loc (loc, intermediate_type, inner);
6729
6730 if (bitnum != 0)
6731 inner = build2 (RSHIFT_EXPR, intermediate_type,
6732 inner, size_int (bitnum));
6733
6734 one = build_int_cst (intermediate_type, 1);
6735
6736 if (code == EQ_EXPR)
6737 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6738
6739 /* Put the AND last so it can combine with more things. */
6740 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6741
6742 /* Make sure to return the proper type. */
6743 inner = fold_convert_loc (loc, result_type, inner);
6744
6745 return inner;
6746 }
6747 return NULL_TREE;
6748 }
6749
6750 /* Check whether we are allowed to reorder operands arg0 and arg1,
6751 such that the evaluation of arg1 occurs before arg0. */
6752
6753 static bool
6754 reorder_operands_p (const_tree arg0, const_tree arg1)
6755 {
6756 if (! flag_evaluation_order)
6757 return true;
6758 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6759 return true;
6760 return ! TREE_SIDE_EFFECTS (arg0)
6761 && ! TREE_SIDE_EFFECTS (arg1);
6762 }
6763
6764 /* Test whether it is preferable two swap two operands, ARG0 and
6765 ARG1, for example because ARG0 is an integer constant and ARG1
6766 isn't. If REORDER is true, only recommend swapping if we can
6767 evaluate the operands in reverse order. */
6768
6769 bool
6770 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6771 {
6772 if (CONSTANT_CLASS_P (arg1))
6773 return 0;
6774 if (CONSTANT_CLASS_P (arg0))
6775 return 1;
6776
6777 STRIP_NOPS (arg0);
6778 STRIP_NOPS (arg1);
6779
6780 if (TREE_CONSTANT (arg1))
6781 return 0;
6782 if (TREE_CONSTANT (arg0))
6783 return 1;
6784
6785 if (reorder && flag_evaluation_order
6786 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6787 return 0;
6788
6789 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6790 for commutative and comparison operators. Ensuring a canonical
6791 form allows the optimizers to find additional redundancies without
6792 having to explicitly check for both orderings. */
6793 if (TREE_CODE (arg0) == SSA_NAME
6794 && TREE_CODE (arg1) == SSA_NAME
6795 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6796 return 1;
6797
6798 /* Put SSA_NAMEs last. */
6799 if (TREE_CODE (arg1) == SSA_NAME)
6800 return 0;
6801 if (TREE_CODE (arg0) == SSA_NAME)
6802 return 1;
6803
6804 /* Put variables last. */
6805 if (DECL_P (arg1))
6806 return 0;
6807 if (DECL_P (arg0))
6808 return 1;
6809
6810 return 0;
6811 }
6812
6813 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6814 ARG0 is extended to a wider type. */
6815
6816 static tree
6817 fold_widened_comparison (location_t loc, enum tree_code code,
6818 tree type, tree arg0, tree arg1)
6819 {
6820 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6821 tree arg1_unw;
6822 tree shorter_type, outer_type;
6823 tree min, max;
6824 bool above, below;
6825
6826 if (arg0_unw == arg0)
6827 return NULL_TREE;
6828 shorter_type = TREE_TYPE (arg0_unw);
6829
6830 #ifdef HAVE_canonicalize_funcptr_for_compare
6831 /* Disable this optimization if we're casting a function pointer
6832 type on targets that require function pointer canonicalization. */
6833 if (HAVE_canonicalize_funcptr_for_compare
6834 && TREE_CODE (shorter_type) == POINTER_TYPE
6835 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6836 return NULL_TREE;
6837 #endif
6838
6839 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6840 return NULL_TREE;
6841
6842 arg1_unw = get_unwidened (arg1, NULL_TREE);
6843
6844 /* If possible, express the comparison in the shorter mode. */
6845 if ((code == EQ_EXPR || code == NE_EXPR
6846 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6847 && (TREE_TYPE (arg1_unw) == shorter_type
6848 || ((TYPE_PRECISION (shorter_type)
6849 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6850 && (TYPE_UNSIGNED (shorter_type)
6851 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6852 || (TREE_CODE (arg1_unw) == INTEGER_CST
6853 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6854 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6855 && int_fits_type_p (arg1_unw, shorter_type))))
6856 return fold_build2_loc (loc, code, type, arg0_unw,
6857 fold_convert_loc (loc, shorter_type, arg1_unw));
6858
6859 if (TREE_CODE (arg1_unw) != INTEGER_CST
6860 || TREE_CODE (shorter_type) != INTEGER_TYPE
6861 || !int_fits_type_p (arg1_unw, shorter_type))
6862 return NULL_TREE;
6863
6864 /* If we are comparing with the integer that does not fit into the range
6865 of the shorter type, the result is known. */
6866 outer_type = TREE_TYPE (arg1_unw);
6867 min = lower_bound_in_type (outer_type, shorter_type);
6868 max = upper_bound_in_type (outer_type, shorter_type);
6869
6870 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6871 max, arg1_unw));
6872 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6873 arg1_unw, min));
6874
6875 switch (code)
6876 {
6877 case EQ_EXPR:
6878 if (above || below)
6879 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6880 break;
6881
6882 case NE_EXPR:
6883 if (above || below)
6884 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6885 break;
6886
6887 case LT_EXPR:
6888 case LE_EXPR:
6889 if (above)
6890 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6891 else if (below)
6892 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6893
6894 case GT_EXPR:
6895 case GE_EXPR:
6896 if (above)
6897 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6898 else if (below)
6899 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6900
6901 default:
6902 break;
6903 }
6904
6905 return NULL_TREE;
6906 }
6907
6908 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6909 ARG0 just the signedness is changed. */
6910
6911 static tree
6912 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6913 tree arg0, tree arg1)
6914 {
6915 tree arg0_inner;
6916 tree inner_type, outer_type;
6917
6918 if (!CONVERT_EXPR_P (arg0))
6919 return NULL_TREE;
6920
6921 outer_type = TREE_TYPE (arg0);
6922 arg0_inner = TREE_OPERAND (arg0, 0);
6923 inner_type = TREE_TYPE (arg0_inner);
6924
6925 #ifdef HAVE_canonicalize_funcptr_for_compare
6926 /* Disable this optimization if we're casting a function pointer
6927 type on targets that require function pointer canonicalization. */
6928 if (HAVE_canonicalize_funcptr_for_compare
6929 && TREE_CODE (inner_type) == POINTER_TYPE
6930 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6931 return NULL_TREE;
6932 #endif
6933
6934 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6935 return NULL_TREE;
6936
6937 if (TREE_CODE (arg1) != INTEGER_CST
6938 && !(CONVERT_EXPR_P (arg1)
6939 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6940 return NULL_TREE;
6941
6942 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6943 && code != NE_EXPR
6944 && code != EQ_EXPR)
6945 return NULL_TREE;
6946
6947 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6948 return NULL_TREE;
6949
6950 if (TREE_CODE (arg1) == INTEGER_CST)
6951 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6952 TREE_OVERFLOW (arg1));
6953 else
6954 arg1 = fold_convert_loc (loc, inner_type, arg1);
6955
6956 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6957 }
6958
6959
6960 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6961 means A >= Y && A != MAX, but in this case we know that
6962 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6963
6964 static tree
6965 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6966 {
6967 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6968
6969 if (TREE_CODE (bound) == LT_EXPR)
6970 a = TREE_OPERAND (bound, 0);
6971 else if (TREE_CODE (bound) == GT_EXPR)
6972 a = TREE_OPERAND (bound, 1);
6973 else
6974 return NULL_TREE;
6975
6976 typea = TREE_TYPE (a);
6977 if (!INTEGRAL_TYPE_P (typea)
6978 && !POINTER_TYPE_P (typea))
6979 return NULL_TREE;
6980
6981 if (TREE_CODE (ineq) == LT_EXPR)
6982 {
6983 a1 = TREE_OPERAND (ineq, 1);
6984 y = TREE_OPERAND (ineq, 0);
6985 }
6986 else if (TREE_CODE (ineq) == GT_EXPR)
6987 {
6988 a1 = TREE_OPERAND (ineq, 0);
6989 y = TREE_OPERAND (ineq, 1);
6990 }
6991 else
6992 return NULL_TREE;
6993
6994 if (TREE_TYPE (a1) != typea)
6995 return NULL_TREE;
6996
6997 if (POINTER_TYPE_P (typea))
6998 {
6999 /* Convert the pointer types into integer before taking the difference. */
7000 tree ta = fold_convert_loc (loc, ssizetype, a);
7001 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7002 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7003 }
7004 else
7005 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7006
7007 if (!diff || !integer_onep (diff))
7008 return NULL_TREE;
7009
7010 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7011 }
7012
7013 /* Fold a sum or difference of at least one multiplication.
7014 Returns the folded tree or NULL if no simplification could be made. */
7015
7016 static tree
7017 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7018 tree arg0, tree arg1)
7019 {
7020 tree arg00, arg01, arg10, arg11;
7021 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7022
7023 /* (A * C) +- (B * C) -> (A+-B) * C.
7024 (A * C) +- A -> A * (C+-1).
7025 We are most concerned about the case where C is a constant,
7026 but other combinations show up during loop reduction. Since
7027 it is not difficult, try all four possibilities. */
7028
7029 if (TREE_CODE (arg0) == MULT_EXPR)
7030 {
7031 arg00 = TREE_OPERAND (arg0, 0);
7032 arg01 = TREE_OPERAND (arg0, 1);
7033 }
7034 else if (TREE_CODE (arg0) == INTEGER_CST)
7035 {
7036 arg00 = build_one_cst (type);
7037 arg01 = arg0;
7038 }
7039 else
7040 {
7041 /* We cannot generate constant 1 for fract. */
7042 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7043 return NULL_TREE;
7044 arg00 = arg0;
7045 arg01 = build_one_cst (type);
7046 }
7047 if (TREE_CODE (arg1) == MULT_EXPR)
7048 {
7049 arg10 = TREE_OPERAND (arg1, 0);
7050 arg11 = TREE_OPERAND (arg1, 1);
7051 }
7052 else if (TREE_CODE (arg1) == INTEGER_CST)
7053 {
7054 arg10 = build_one_cst (type);
7055 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7056 the purpose of this canonicalization. */
7057 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7058 && negate_expr_p (arg1)
7059 && code == PLUS_EXPR)
7060 {
7061 arg11 = negate_expr (arg1);
7062 code = MINUS_EXPR;
7063 }
7064 else
7065 arg11 = arg1;
7066 }
7067 else
7068 {
7069 /* We cannot generate constant 1 for fract. */
7070 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7071 return NULL_TREE;
7072 arg10 = arg1;
7073 arg11 = build_one_cst (type);
7074 }
7075 same = NULL_TREE;
7076
7077 if (operand_equal_p (arg01, arg11, 0))
7078 same = arg01, alt0 = arg00, alt1 = arg10;
7079 else if (operand_equal_p (arg00, arg10, 0))
7080 same = arg00, alt0 = arg01, alt1 = arg11;
7081 else if (operand_equal_p (arg00, arg11, 0))
7082 same = arg00, alt0 = arg01, alt1 = arg10;
7083 else if (operand_equal_p (arg01, arg10, 0))
7084 same = arg01, alt0 = arg00, alt1 = arg11;
7085
7086 /* No identical multiplicands; see if we can find a common
7087 power-of-two factor in non-power-of-two multiplies. This
7088 can help in multi-dimensional array access. */
7089 else if (tree_fits_shwi_p (arg01)
7090 && tree_fits_shwi_p (arg11))
7091 {
7092 HOST_WIDE_INT int01, int11, tmp;
7093 bool swap = false;
7094 tree maybe_same;
7095 int01 = tree_to_shwi (arg01);
7096 int11 = tree_to_shwi (arg11);
7097
7098 /* Move min of absolute values to int11. */
7099 if (absu_hwi (int01) < absu_hwi (int11))
7100 {
7101 tmp = int01, int01 = int11, int11 = tmp;
7102 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7103 maybe_same = arg01;
7104 swap = true;
7105 }
7106 else
7107 maybe_same = arg11;
7108
7109 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7110 /* The remainder should not be a constant, otherwise we
7111 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7112 increased the number of multiplications necessary. */
7113 && TREE_CODE (arg10) != INTEGER_CST)
7114 {
7115 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7116 build_int_cst (TREE_TYPE (arg00),
7117 int01 / int11));
7118 alt1 = arg10;
7119 same = maybe_same;
7120 if (swap)
7121 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7122 }
7123 }
7124
7125 if (same)
7126 return fold_build2_loc (loc, MULT_EXPR, type,
7127 fold_build2_loc (loc, code, type,
7128 fold_convert_loc (loc, type, alt0),
7129 fold_convert_loc (loc, type, alt1)),
7130 fold_convert_loc (loc, type, same));
7131
7132 return NULL_TREE;
7133 }
7134
7135 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7136 specified by EXPR into the buffer PTR of length LEN bytes.
7137 Return the number of bytes placed in the buffer, or zero
7138 upon failure. */
7139
7140 static int
7141 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7142 {
7143 tree type = TREE_TYPE (expr);
7144 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7145 int byte, offset, word, words;
7146 unsigned char value;
7147
7148 if ((off == -1 && total_bytes > len)
7149 || off >= total_bytes)
7150 return 0;
7151 if (off == -1)
7152 off = 0;
7153 words = total_bytes / UNITS_PER_WORD;
7154
7155 for (byte = 0; byte < total_bytes; byte++)
7156 {
7157 int bitpos = byte * BITS_PER_UNIT;
7158 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7159 number of bytes. */
7160 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7161
7162 if (total_bytes > UNITS_PER_WORD)
7163 {
7164 word = byte / UNITS_PER_WORD;
7165 if (WORDS_BIG_ENDIAN)
7166 word = (words - 1) - word;
7167 offset = word * UNITS_PER_WORD;
7168 if (BYTES_BIG_ENDIAN)
7169 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7170 else
7171 offset += byte % UNITS_PER_WORD;
7172 }
7173 else
7174 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7175 if (offset >= off
7176 && offset - off < len)
7177 ptr[offset - off] = value;
7178 }
7179 return MIN (len, total_bytes - off);
7180 }
7181
7182
7183 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7184 specified by EXPR into the buffer PTR of length LEN bytes.
7185 Return the number of bytes placed in the buffer, or zero
7186 upon failure. */
7187
7188 static int
7189 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7190 {
7191 tree type = TREE_TYPE (expr);
7192 machine_mode mode = TYPE_MODE (type);
7193 int total_bytes = GET_MODE_SIZE (mode);
7194 FIXED_VALUE_TYPE value;
7195 tree i_value, i_type;
7196
7197 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7198 return 0;
7199
7200 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7201
7202 if (NULL_TREE == i_type
7203 || TYPE_PRECISION (i_type) != total_bytes)
7204 return 0;
7205
7206 value = TREE_FIXED_CST (expr);
7207 i_value = double_int_to_tree (i_type, value.data);
7208
7209 return native_encode_int (i_value, ptr, len, off);
7210 }
7211
7212
7213 /* Subroutine of native_encode_expr. Encode the REAL_CST
7214 specified by EXPR into the buffer PTR of length LEN bytes.
7215 Return the number of bytes placed in the buffer, or zero
7216 upon failure. */
7217
7218 static int
7219 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7220 {
7221 tree type = TREE_TYPE (expr);
7222 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7223 int byte, offset, word, words, bitpos;
7224 unsigned char value;
7225
7226 /* There are always 32 bits in each long, no matter the size of
7227 the hosts long. We handle floating point representations with
7228 up to 192 bits. */
7229 long tmp[6];
7230
7231 if ((off == -1 && total_bytes > len)
7232 || off >= total_bytes)
7233 return 0;
7234 if (off == -1)
7235 off = 0;
7236 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7237
7238 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7239
7240 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7241 bitpos += BITS_PER_UNIT)
7242 {
7243 byte = (bitpos / BITS_PER_UNIT) & 3;
7244 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7245
7246 if (UNITS_PER_WORD < 4)
7247 {
7248 word = byte / UNITS_PER_WORD;
7249 if (WORDS_BIG_ENDIAN)
7250 word = (words - 1) - word;
7251 offset = word * UNITS_PER_WORD;
7252 if (BYTES_BIG_ENDIAN)
7253 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7254 else
7255 offset += byte % UNITS_PER_WORD;
7256 }
7257 else
7258 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7259 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7260 if (offset >= off
7261 && offset - off < len)
7262 ptr[offset - off] = value;
7263 }
7264 return MIN (len, total_bytes - off);
7265 }
7266
7267 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7270 upon failure. */
7271
7272 static int
7273 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7274 {
7275 int rsize, isize;
7276 tree part;
7277
7278 part = TREE_REALPART (expr);
7279 rsize = native_encode_expr (part, ptr, len, off);
7280 if (off == -1
7281 && rsize == 0)
7282 return 0;
7283 part = TREE_IMAGPART (expr);
7284 if (off != -1)
7285 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7286 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7287 if (off == -1
7288 && isize != rsize)
7289 return 0;
7290 return rsize + isize;
7291 }
7292
7293
7294 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7295 specified by EXPR into the buffer PTR of length LEN bytes.
7296 Return the number of bytes placed in the buffer, or zero
7297 upon failure. */
7298
7299 static int
7300 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7301 {
7302 unsigned i, count;
7303 int size, offset;
7304 tree itype, elem;
7305
7306 offset = 0;
7307 count = VECTOR_CST_NELTS (expr);
7308 itype = TREE_TYPE (TREE_TYPE (expr));
7309 size = GET_MODE_SIZE (TYPE_MODE (itype));
7310 for (i = 0; i < count; i++)
7311 {
7312 if (off >= size)
7313 {
7314 off -= size;
7315 continue;
7316 }
7317 elem = VECTOR_CST_ELT (expr, i);
7318 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7319 if ((off == -1 && res != size)
7320 || res == 0)
7321 return 0;
7322 offset += res;
7323 if (offset >= len)
7324 return offset;
7325 if (off != -1)
7326 off = 0;
7327 }
7328 return offset;
7329 }
7330
7331
7332 /* Subroutine of native_encode_expr. Encode the STRING_CST
7333 specified by EXPR into the buffer PTR of length LEN bytes.
7334 Return the number of bytes placed in the buffer, or zero
7335 upon failure. */
7336
7337 static int
7338 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7339 {
7340 tree type = TREE_TYPE (expr);
7341 HOST_WIDE_INT total_bytes;
7342
7343 if (TREE_CODE (type) != ARRAY_TYPE
7344 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7345 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7346 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7347 return 0;
7348 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7349 if ((off == -1 && total_bytes > len)
7350 || off >= total_bytes)
7351 return 0;
7352 if (off == -1)
7353 off = 0;
7354 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7355 {
7356 int written = 0;
7357 if (off < TREE_STRING_LENGTH (expr))
7358 {
7359 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7360 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7361 }
7362 memset (ptr + written, 0,
7363 MIN (total_bytes - written, len - written));
7364 }
7365 else
7366 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7367 return MIN (total_bytes - off, len);
7368 }
7369
7370
7371 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7372 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7373 buffer PTR of length LEN bytes. If OFF is not -1 then start
7374 the encoding at byte offset OFF and encode at most LEN bytes.
7375 Return the number of bytes placed in the buffer, or zero upon failure. */
7376
7377 int
7378 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7379 {
7380 switch (TREE_CODE (expr))
7381 {
7382 case INTEGER_CST:
7383 return native_encode_int (expr, ptr, len, off);
7384
7385 case REAL_CST:
7386 return native_encode_real (expr, ptr, len, off);
7387
7388 case FIXED_CST:
7389 return native_encode_fixed (expr, ptr, len, off);
7390
7391 case COMPLEX_CST:
7392 return native_encode_complex (expr, ptr, len, off);
7393
7394 case VECTOR_CST:
7395 return native_encode_vector (expr, ptr, len, off);
7396
7397 case STRING_CST:
7398 return native_encode_string (expr, ptr, len, off);
7399
7400 default:
7401 return 0;
7402 }
7403 }
7404
7405
7406 /* Subroutine of native_interpret_expr. Interpret the contents of
7407 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7408 If the buffer cannot be interpreted, return NULL_TREE. */
7409
7410 static tree
7411 native_interpret_int (tree type, const unsigned char *ptr, int len)
7412 {
7413 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7414
7415 if (total_bytes > len
7416 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7417 return NULL_TREE;
7418
7419 wide_int result = wi::from_buffer (ptr, total_bytes);
7420
7421 return wide_int_to_tree (type, result);
7422 }
7423
7424
7425 /* Subroutine of native_interpret_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7427 If the buffer cannot be interpreted, return NULL_TREE. */
7428
7429 static tree
7430 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7431 {
7432 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7433 double_int result;
7434 FIXED_VALUE_TYPE fixed_value;
7435
7436 if (total_bytes > len
7437 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7438 return NULL_TREE;
7439
7440 result = double_int::from_buffer (ptr, total_bytes);
7441 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7442
7443 return build_fixed (type, fixed_value);
7444 }
7445
7446
7447 /* Subroutine of native_interpret_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7449 If the buffer cannot be interpreted, return NULL_TREE. */
7450
7451 static tree
7452 native_interpret_real (tree type, const unsigned char *ptr, int len)
7453 {
7454 machine_mode mode = TYPE_MODE (type);
7455 int total_bytes = GET_MODE_SIZE (mode);
7456 int byte, offset, word, words, bitpos;
7457 unsigned char value;
7458 /* There are always 32 bits in each long, no matter the size of
7459 the hosts long. We handle floating point representations with
7460 up to 192 bits. */
7461 REAL_VALUE_TYPE r;
7462 long tmp[6];
7463
7464 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7465 if (total_bytes > len || total_bytes > 24)
7466 return NULL_TREE;
7467 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7468
7469 memset (tmp, 0, sizeof (tmp));
7470 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7471 bitpos += BITS_PER_UNIT)
7472 {
7473 byte = (bitpos / BITS_PER_UNIT) & 3;
7474 if (UNITS_PER_WORD < 4)
7475 {
7476 word = byte / UNITS_PER_WORD;
7477 if (WORDS_BIG_ENDIAN)
7478 word = (words - 1) - word;
7479 offset = word * UNITS_PER_WORD;
7480 if (BYTES_BIG_ENDIAN)
7481 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7482 else
7483 offset += byte % UNITS_PER_WORD;
7484 }
7485 else
7486 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7487 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7488
7489 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7490 }
7491
7492 real_from_target (&r, tmp, mode);
7493 return build_real (type, r);
7494 }
7495
7496
7497 /* Subroutine of native_interpret_expr. Interpret the contents of
7498 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7499 If the buffer cannot be interpreted, return NULL_TREE. */
7500
7501 static tree
7502 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7503 {
7504 tree etype, rpart, ipart;
7505 int size;
7506
7507 etype = TREE_TYPE (type);
7508 size = GET_MODE_SIZE (TYPE_MODE (etype));
7509 if (size * 2 > len)
7510 return NULL_TREE;
7511 rpart = native_interpret_expr (etype, ptr, size);
7512 if (!rpart)
7513 return NULL_TREE;
7514 ipart = native_interpret_expr (etype, ptr+size, size);
7515 if (!ipart)
7516 return NULL_TREE;
7517 return build_complex (type, rpart, ipart);
7518 }
7519
7520
7521 /* Subroutine of native_interpret_expr. Interpret the contents of
7522 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7523 If the buffer cannot be interpreted, return NULL_TREE. */
7524
7525 static tree
7526 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7527 {
7528 tree etype, elem;
7529 int i, size, count;
7530 tree *elements;
7531
7532 etype = TREE_TYPE (type);
7533 size = GET_MODE_SIZE (TYPE_MODE (etype));
7534 count = TYPE_VECTOR_SUBPARTS (type);
7535 if (size * count > len)
7536 return NULL_TREE;
7537
7538 elements = XALLOCAVEC (tree, count);
7539 for (i = count - 1; i >= 0; i--)
7540 {
7541 elem = native_interpret_expr (etype, ptr+(i*size), size);
7542 if (!elem)
7543 return NULL_TREE;
7544 elements[i] = elem;
7545 }
7546 return build_vector (type, elements);
7547 }
7548
7549
7550 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7551 the buffer PTR of length LEN as a constant of type TYPE. For
7552 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7553 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7554 return NULL_TREE. */
7555
7556 tree
7557 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7558 {
7559 switch (TREE_CODE (type))
7560 {
7561 case INTEGER_TYPE:
7562 case ENUMERAL_TYPE:
7563 case BOOLEAN_TYPE:
7564 case POINTER_TYPE:
7565 case REFERENCE_TYPE:
7566 return native_interpret_int (type, ptr, len);
7567
7568 case REAL_TYPE:
7569 return native_interpret_real (type, ptr, len);
7570
7571 case FIXED_POINT_TYPE:
7572 return native_interpret_fixed (type, ptr, len);
7573
7574 case COMPLEX_TYPE:
7575 return native_interpret_complex (type, ptr, len);
7576
7577 case VECTOR_TYPE:
7578 return native_interpret_vector (type, ptr, len);
7579
7580 default:
7581 return NULL_TREE;
7582 }
7583 }
7584
7585 /* Returns true if we can interpret the contents of a native encoding
7586 as TYPE. */
7587
7588 static bool
7589 can_native_interpret_type_p (tree type)
7590 {
7591 switch (TREE_CODE (type))
7592 {
7593 case INTEGER_TYPE:
7594 case ENUMERAL_TYPE:
7595 case BOOLEAN_TYPE:
7596 case POINTER_TYPE:
7597 case REFERENCE_TYPE:
7598 case FIXED_POINT_TYPE:
7599 case REAL_TYPE:
7600 case COMPLEX_TYPE:
7601 case VECTOR_TYPE:
7602 return true;
7603 default:
7604 return false;
7605 }
7606 }
7607
7608 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7609 TYPE at compile-time. If we're unable to perform the conversion
7610 return NULL_TREE. */
7611
7612 static tree
7613 fold_view_convert_expr (tree type, tree expr)
7614 {
7615 /* We support up to 512-bit values (for V8DFmode). */
7616 unsigned char buffer[64];
7617 int len;
7618
7619 /* Check that the host and target are sane. */
7620 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7621 return NULL_TREE;
7622
7623 len = native_encode_expr (expr, buffer, sizeof (buffer));
7624 if (len == 0)
7625 return NULL_TREE;
7626
7627 return native_interpret_expr (type, buffer, len);
7628 }
7629
7630 /* Build an expression for the address of T. Folds away INDIRECT_REF
7631 to avoid confusing the gimplify process. */
7632
7633 tree
7634 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7635 {
7636 /* The size of the object is not relevant when talking about its address. */
7637 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7638 t = TREE_OPERAND (t, 0);
7639
7640 if (TREE_CODE (t) == INDIRECT_REF)
7641 {
7642 t = TREE_OPERAND (t, 0);
7643
7644 if (TREE_TYPE (t) != ptrtype)
7645 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7646 }
7647 else if (TREE_CODE (t) == MEM_REF
7648 && integer_zerop (TREE_OPERAND (t, 1)))
7649 return TREE_OPERAND (t, 0);
7650 else if (TREE_CODE (t) == MEM_REF
7651 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7652 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7653 TREE_OPERAND (t, 0),
7654 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7655 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7656 {
7657 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7658
7659 if (TREE_TYPE (t) != ptrtype)
7660 t = fold_convert_loc (loc, ptrtype, t);
7661 }
7662 else
7663 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7664
7665 return t;
7666 }
7667
7668 /* Build an expression for the address of T. */
7669
7670 tree
7671 build_fold_addr_expr_loc (location_t loc, tree t)
7672 {
7673 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7674
7675 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7676 }
7677
7678 /* Fold a unary expression of code CODE and type TYPE with operand
7679 OP0. Return the folded expression if folding is successful.
7680 Otherwise, return NULL_TREE. */
7681
7682 tree
7683 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7684 {
7685 tree tem;
7686 tree arg0;
7687 enum tree_code_class kind = TREE_CODE_CLASS (code);
7688
7689 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7690 && TREE_CODE_LENGTH (code) == 1);
7691
7692 arg0 = op0;
7693 if (arg0)
7694 {
7695 if (CONVERT_EXPR_CODE_P (code)
7696 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7697 {
7698 /* Don't use STRIP_NOPS, because signedness of argument type
7699 matters. */
7700 STRIP_SIGN_NOPS (arg0);
7701 }
7702 else
7703 {
7704 /* Strip any conversions that don't change the mode. This
7705 is safe for every expression, except for a comparison
7706 expression because its signedness is derived from its
7707 operands.
7708
7709 Note that this is done as an internal manipulation within
7710 the constant folder, in order to find the simplest
7711 representation of the arguments so that their form can be
7712 studied. In any cases, the appropriate type conversions
7713 should be put back in the tree that will get out of the
7714 constant folder. */
7715 STRIP_NOPS (arg0);
7716 }
7717
7718 if (CONSTANT_CLASS_P (arg0))
7719 {
7720 tree tem = const_unop (code, type, arg0);
7721 if (tem)
7722 {
7723 if (TREE_TYPE (tem) != type)
7724 tem = fold_convert_loc (loc, type, tem);
7725 return tem;
7726 }
7727 }
7728 }
7729
7730 tem = generic_simplify (loc, code, type, op0);
7731 if (tem)
7732 return tem;
7733
7734 if (TREE_CODE_CLASS (code) == tcc_unary)
7735 {
7736 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7737 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7738 fold_build1_loc (loc, code, type,
7739 fold_convert_loc (loc, TREE_TYPE (op0),
7740 TREE_OPERAND (arg0, 1))));
7741 else if (TREE_CODE (arg0) == COND_EXPR)
7742 {
7743 tree arg01 = TREE_OPERAND (arg0, 1);
7744 tree arg02 = TREE_OPERAND (arg0, 2);
7745 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7746 arg01 = fold_build1_loc (loc, code, type,
7747 fold_convert_loc (loc,
7748 TREE_TYPE (op0), arg01));
7749 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7750 arg02 = fold_build1_loc (loc, code, type,
7751 fold_convert_loc (loc,
7752 TREE_TYPE (op0), arg02));
7753 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7754 arg01, arg02);
7755
7756 /* If this was a conversion, and all we did was to move into
7757 inside the COND_EXPR, bring it back out. But leave it if
7758 it is a conversion from integer to integer and the
7759 result precision is no wider than a word since such a
7760 conversion is cheap and may be optimized away by combine,
7761 while it couldn't if it were outside the COND_EXPR. Then return
7762 so we don't get into an infinite recursion loop taking the
7763 conversion out and then back in. */
7764
7765 if ((CONVERT_EXPR_CODE_P (code)
7766 || code == NON_LVALUE_EXPR)
7767 && TREE_CODE (tem) == COND_EXPR
7768 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7769 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7770 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7771 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7772 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7773 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7774 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7775 && (INTEGRAL_TYPE_P
7776 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7777 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7778 || flag_syntax_only))
7779 tem = build1_loc (loc, code, type,
7780 build3 (COND_EXPR,
7781 TREE_TYPE (TREE_OPERAND
7782 (TREE_OPERAND (tem, 1), 0)),
7783 TREE_OPERAND (tem, 0),
7784 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7785 TREE_OPERAND (TREE_OPERAND (tem, 2),
7786 0)));
7787 return tem;
7788 }
7789 }
7790
7791 switch (code)
7792 {
7793 case NON_LVALUE_EXPR:
7794 if (!maybe_lvalue_p (op0))
7795 return fold_convert_loc (loc, type, op0);
7796 return NULL_TREE;
7797
7798 CASE_CONVERT:
7799 case FLOAT_EXPR:
7800 case FIX_TRUNC_EXPR:
7801 if (COMPARISON_CLASS_P (op0))
7802 {
7803 /* If we have (type) (a CMP b) and type is an integral type, return
7804 new expression involving the new type. Canonicalize
7805 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7806 non-integral type.
7807 Do not fold the result as that would not simplify further, also
7808 folding again results in recursions. */
7809 if (TREE_CODE (type) == BOOLEAN_TYPE)
7810 return build2_loc (loc, TREE_CODE (op0), type,
7811 TREE_OPERAND (op0, 0),
7812 TREE_OPERAND (op0, 1));
7813 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7814 && TREE_CODE (type) != VECTOR_TYPE)
7815 return build3_loc (loc, COND_EXPR, type, op0,
7816 constant_boolean_node (true, type),
7817 constant_boolean_node (false, type));
7818 }
7819
7820 /* Handle (T *)&A.B.C for A being of type T and B and C
7821 living at offset zero. This occurs frequently in
7822 C++ upcasting and then accessing the base. */
7823 if (TREE_CODE (op0) == ADDR_EXPR
7824 && POINTER_TYPE_P (type)
7825 && handled_component_p (TREE_OPERAND (op0, 0)))
7826 {
7827 HOST_WIDE_INT bitsize, bitpos;
7828 tree offset;
7829 machine_mode mode;
7830 int unsignedp, volatilep;
7831 tree base = TREE_OPERAND (op0, 0);
7832 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7833 &mode, &unsignedp, &volatilep, false);
7834 /* If the reference was to a (constant) zero offset, we can use
7835 the address of the base if it has the same base type
7836 as the result type and the pointer type is unqualified. */
7837 if (! offset && bitpos == 0
7838 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7839 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7840 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7841 return fold_convert_loc (loc, type,
7842 build_fold_addr_expr_loc (loc, base));
7843 }
7844
7845 if (TREE_CODE (op0) == MODIFY_EXPR
7846 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7847 /* Detect assigning a bitfield. */
7848 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7849 && DECL_BIT_FIELD
7850 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7851 {
7852 /* Don't leave an assignment inside a conversion
7853 unless assigning a bitfield. */
7854 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7855 /* First do the assignment, then return converted constant. */
7856 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7857 TREE_NO_WARNING (tem) = 1;
7858 TREE_USED (tem) = 1;
7859 return tem;
7860 }
7861
7862 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7863 constants (if x has signed type, the sign bit cannot be set
7864 in c). This folds extension into the BIT_AND_EXPR.
7865 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7866 very likely don't have maximal range for their precision and this
7867 transformation effectively doesn't preserve non-maximal ranges. */
7868 if (TREE_CODE (type) == INTEGER_TYPE
7869 && TREE_CODE (op0) == BIT_AND_EXPR
7870 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7871 {
7872 tree and_expr = op0;
7873 tree and0 = TREE_OPERAND (and_expr, 0);
7874 tree and1 = TREE_OPERAND (and_expr, 1);
7875 int change = 0;
7876
7877 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7878 || (TYPE_PRECISION (type)
7879 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7880 change = 1;
7881 else if (TYPE_PRECISION (TREE_TYPE (and1))
7882 <= HOST_BITS_PER_WIDE_INT
7883 && tree_fits_uhwi_p (and1))
7884 {
7885 unsigned HOST_WIDE_INT cst;
7886
7887 cst = tree_to_uhwi (and1);
7888 cst &= HOST_WIDE_INT_M1U
7889 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7890 change = (cst == 0);
7891 #ifdef LOAD_EXTEND_OP
7892 if (change
7893 && !flag_syntax_only
7894 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7895 == ZERO_EXTEND))
7896 {
7897 tree uns = unsigned_type_for (TREE_TYPE (and0));
7898 and0 = fold_convert_loc (loc, uns, and0);
7899 and1 = fold_convert_loc (loc, uns, and1);
7900 }
7901 #endif
7902 }
7903 if (change)
7904 {
7905 tem = force_fit_type (type, wi::to_widest (and1), 0,
7906 TREE_OVERFLOW (and1));
7907 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7908 fold_convert_loc (loc, type, and0), tem);
7909 }
7910 }
7911
7912 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7913 when one of the new casts will fold away. Conservatively we assume
7914 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7915 if (POINTER_TYPE_P (type)
7916 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7917 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7918 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7919 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7920 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7921 {
7922 tree arg00 = TREE_OPERAND (arg0, 0);
7923 tree arg01 = TREE_OPERAND (arg0, 1);
7924
7925 return fold_build_pointer_plus_loc
7926 (loc, fold_convert_loc (loc, type, arg00), arg01);
7927 }
7928
7929 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7930 of the same precision, and X is an integer type not narrower than
7931 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7932 if (INTEGRAL_TYPE_P (type)
7933 && TREE_CODE (op0) == BIT_NOT_EXPR
7934 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7935 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7936 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7937 {
7938 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7939 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7940 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7941 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7942 fold_convert_loc (loc, type, tem));
7943 }
7944
7945 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7946 type of X and Y (integer types only). */
7947 if (INTEGRAL_TYPE_P (type)
7948 && TREE_CODE (op0) == MULT_EXPR
7949 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7950 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7951 {
7952 /* Be careful not to introduce new overflows. */
7953 tree mult_type;
7954 if (TYPE_OVERFLOW_WRAPS (type))
7955 mult_type = type;
7956 else
7957 mult_type = unsigned_type_for (type);
7958
7959 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7960 {
7961 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7962 fold_convert_loc (loc, mult_type,
7963 TREE_OPERAND (op0, 0)),
7964 fold_convert_loc (loc, mult_type,
7965 TREE_OPERAND (op0, 1)));
7966 return fold_convert_loc (loc, type, tem);
7967 }
7968 }
7969
7970 return NULL_TREE;
7971
7972 case VIEW_CONVERT_EXPR:
7973 if (TREE_CODE (op0) == MEM_REF)
7974 return fold_build2_loc (loc, MEM_REF, type,
7975 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7976
7977 return NULL_TREE;
7978
7979 case NEGATE_EXPR:
7980 tem = fold_negate_expr (loc, arg0);
7981 if (tem)
7982 return fold_convert_loc (loc, type, tem);
7983 return NULL_TREE;
7984
7985 case ABS_EXPR:
7986 /* Convert fabs((double)float) into (double)fabsf(float). */
7987 if (TREE_CODE (arg0) == NOP_EXPR
7988 && TREE_CODE (type) == REAL_TYPE)
7989 {
7990 tree targ0 = strip_float_extensions (arg0);
7991 if (targ0 != arg0)
7992 return fold_convert_loc (loc, type,
7993 fold_build1_loc (loc, ABS_EXPR,
7994 TREE_TYPE (targ0),
7995 targ0));
7996 }
7997 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7998 else if (TREE_CODE (arg0) == ABS_EXPR)
7999 return arg0;
8000
8001 /* Strip sign ops from argument. */
8002 if (TREE_CODE (type) == REAL_TYPE)
8003 {
8004 tem = fold_strip_sign_ops (arg0);
8005 if (tem)
8006 return fold_build1_loc (loc, ABS_EXPR, type,
8007 fold_convert_loc (loc, type, tem));
8008 }
8009 return NULL_TREE;
8010
8011 case CONJ_EXPR:
8012 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8013 return fold_convert_loc (loc, type, arg0);
8014 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8015 {
8016 tree itype = TREE_TYPE (type);
8017 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8018 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8019 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8020 negate_expr (ipart));
8021 }
8022 if (TREE_CODE (arg0) == CONJ_EXPR)
8023 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8024 return NULL_TREE;
8025
8026 case BIT_NOT_EXPR:
8027 /* Convert ~ (-A) to A - 1. */
8028 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8029 return fold_build2_loc (loc, MINUS_EXPR, type,
8030 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8031 build_int_cst (type, 1));
8032 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8033 else if (INTEGRAL_TYPE_P (type)
8034 && ((TREE_CODE (arg0) == MINUS_EXPR
8035 && integer_onep (TREE_OPERAND (arg0, 1)))
8036 || (TREE_CODE (arg0) == PLUS_EXPR
8037 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8038 return fold_build1_loc (loc, NEGATE_EXPR, type,
8039 fold_convert_loc (loc, type,
8040 TREE_OPERAND (arg0, 0)));
8041 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8042 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8043 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8044 fold_convert_loc (loc, type,
8045 TREE_OPERAND (arg0, 0)))))
8046 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8047 fold_convert_loc (loc, type,
8048 TREE_OPERAND (arg0, 1)));
8049 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8050 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8051 fold_convert_loc (loc, type,
8052 TREE_OPERAND (arg0, 1)))))
8053 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8054 fold_convert_loc (loc, type,
8055 TREE_OPERAND (arg0, 0)), tem);
8056
8057 return NULL_TREE;
8058
8059 case TRUTH_NOT_EXPR:
8060 /* Note that the operand of this must be an int
8061 and its values must be 0 or 1.
8062 ("true" is a fixed value perhaps depending on the language,
8063 but we don't handle values other than 1 correctly yet.) */
8064 tem = fold_truth_not_expr (loc, arg0);
8065 if (!tem)
8066 return NULL_TREE;
8067 return fold_convert_loc (loc, type, tem);
8068
8069 case REALPART_EXPR:
8070 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8071 return fold_convert_loc (loc, type, arg0);
8072 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8073 {
8074 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8075 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8076 fold_build1_loc (loc, REALPART_EXPR, itype,
8077 TREE_OPERAND (arg0, 0)),
8078 fold_build1_loc (loc, REALPART_EXPR, itype,
8079 TREE_OPERAND (arg0, 1)));
8080 return fold_convert_loc (loc, type, tem);
8081 }
8082 if (TREE_CODE (arg0) == CONJ_EXPR)
8083 {
8084 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8085 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8086 TREE_OPERAND (arg0, 0));
8087 return fold_convert_loc (loc, type, tem);
8088 }
8089 if (TREE_CODE (arg0) == CALL_EXPR)
8090 {
8091 tree fn = get_callee_fndecl (arg0);
8092 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8093 switch (DECL_FUNCTION_CODE (fn))
8094 {
8095 CASE_FLT_FN (BUILT_IN_CEXPI):
8096 fn = mathfn_built_in (type, BUILT_IN_COS);
8097 if (fn)
8098 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8099 break;
8100
8101 default:
8102 break;
8103 }
8104 }
8105 return NULL_TREE;
8106
8107 case IMAGPART_EXPR:
8108 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8109 return build_zero_cst (type);
8110 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8111 {
8112 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8113 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8114 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8115 TREE_OPERAND (arg0, 0)),
8116 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8117 TREE_OPERAND (arg0, 1)));
8118 return fold_convert_loc (loc, type, tem);
8119 }
8120 if (TREE_CODE (arg0) == CONJ_EXPR)
8121 {
8122 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8123 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8124 return fold_convert_loc (loc, type, negate_expr (tem));
8125 }
8126 if (TREE_CODE (arg0) == CALL_EXPR)
8127 {
8128 tree fn = get_callee_fndecl (arg0);
8129 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8130 switch (DECL_FUNCTION_CODE (fn))
8131 {
8132 CASE_FLT_FN (BUILT_IN_CEXPI):
8133 fn = mathfn_built_in (type, BUILT_IN_SIN);
8134 if (fn)
8135 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8136 break;
8137
8138 default:
8139 break;
8140 }
8141 }
8142 return NULL_TREE;
8143
8144 case INDIRECT_REF:
8145 /* Fold *&X to X if X is an lvalue. */
8146 if (TREE_CODE (op0) == ADDR_EXPR)
8147 {
8148 tree op00 = TREE_OPERAND (op0, 0);
8149 if ((TREE_CODE (op00) == VAR_DECL
8150 || TREE_CODE (op00) == PARM_DECL
8151 || TREE_CODE (op00) == RESULT_DECL)
8152 && !TREE_READONLY (op00))
8153 return op00;
8154 }
8155 return NULL_TREE;
8156
8157 default:
8158 return NULL_TREE;
8159 } /* switch (code) */
8160 }
8161
8162
8163 /* If the operation was a conversion do _not_ mark a resulting constant
8164 with TREE_OVERFLOW if the original constant was not. These conversions
8165 have implementation defined behavior and retaining the TREE_OVERFLOW
8166 flag here would confuse later passes such as VRP. */
8167 tree
8168 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8169 tree type, tree op0)
8170 {
8171 tree res = fold_unary_loc (loc, code, type, op0);
8172 if (res
8173 && TREE_CODE (res) == INTEGER_CST
8174 && TREE_CODE (op0) == INTEGER_CST
8175 && CONVERT_EXPR_CODE_P (code))
8176 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8177
8178 return res;
8179 }
8180
8181 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8182 operands OP0 and OP1. LOC is the location of the resulting expression.
8183 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8184 Return the folded expression if folding is successful. Otherwise,
8185 return NULL_TREE. */
8186 static tree
8187 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8188 tree arg0, tree arg1, tree op0, tree op1)
8189 {
8190 tree tem;
8191
8192 /* We only do these simplifications if we are optimizing. */
8193 if (!optimize)
8194 return NULL_TREE;
8195
8196 /* Check for things like (A || B) && (A || C). We can convert this
8197 to A || (B && C). Note that either operator can be any of the four
8198 truth and/or operations and the transformation will still be
8199 valid. Also note that we only care about order for the
8200 ANDIF and ORIF operators. If B contains side effects, this
8201 might change the truth-value of A. */
8202 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8203 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8204 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8205 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8206 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8207 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8208 {
8209 tree a00 = TREE_OPERAND (arg0, 0);
8210 tree a01 = TREE_OPERAND (arg0, 1);
8211 tree a10 = TREE_OPERAND (arg1, 0);
8212 tree a11 = TREE_OPERAND (arg1, 1);
8213 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8214 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8215 && (code == TRUTH_AND_EXPR
8216 || code == TRUTH_OR_EXPR));
8217
8218 if (operand_equal_p (a00, a10, 0))
8219 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8220 fold_build2_loc (loc, code, type, a01, a11));
8221 else if (commutative && operand_equal_p (a00, a11, 0))
8222 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8223 fold_build2_loc (loc, code, type, a01, a10));
8224 else if (commutative && operand_equal_p (a01, a10, 0))
8225 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8226 fold_build2_loc (loc, code, type, a00, a11));
8227
8228 /* This case if tricky because we must either have commutative
8229 operators or else A10 must not have side-effects. */
8230
8231 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8232 && operand_equal_p (a01, a11, 0))
8233 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8234 fold_build2_loc (loc, code, type, a00, a10),
8235 a01);
8236 }
8237
8238 /* See if we can build a range comparison. */
8239 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8240 return tem;
8241
8242 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8243 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8244 {
8245 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8246 if (tem)
8247 return fold_build2_loc (loc, code, type, tem, arg1);
8248 }
8249
8250 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8251 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8252 {
8253 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8254 if (tem)
8255 return fold_build2_loc (loc, code, type, arg0, tem);
8256 }
8257
8258 /* Check for the possibility of merging component references. If our
8259 lhs is another similar operation, try to merge its rhs with our
8260 rhs. Then try to merge our lhs and rhs. */
8261 if (TREE_CODE (arg0) == code
8262 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8263 TREE_OPERAND (arg0, 1), arg1)))
8264 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8265
8266 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8267 return tem;
8268
8269 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8270 && (code == TRUTH_AND_EXPR
8271 || code == TRUTH_ANDIF_EXPR
8272 || code == TRUTH_OR_EXPR
8273 || code == TRUTH_ORIF_EXPR))
8274 {
8275 enum tree_code ncode, icode;
8276
8277 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8278 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8279 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8280
8281 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8282 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8283 We don't want to pack more than two leafs to a non-IF AND/OR
8284 expression.
8285 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8286 equal to IF-CODE, then we don't want to add right-hand operand.
8287 If the inner right-hand side of left-hand operand has
8288 side-effects, or isn't simple, then we can't add to it,
8289 as otherwise we might destroy if-sequence. */
8290 if (TREE_CODE (arg0) == icode
8291 && simple_operand_p_2 (arg1)
8292 /* Needed for sequence points to handle trappings, and
8293 side-effects. */
8294 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8295 {
8296 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8297 arg1);
8298 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8299 tem);
8300 }
8301 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8302 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8303 else if (TREE_CODE (arg1) == icode
8304 && simple_operand_p_2 (arg0)
8305 /* Needed for sequence points to handle trappings, and
8306 side-effects. */
8307 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8308 {
8309 tem = fold_build2_loc (loc, ncode, type,
8310 arg0, TREE_OPERAND (arg1, 0));
8311 return fold_build2_loc (loc, icode, type, tem,
8312 TREE_OPERAND (arg1, 1));
8313 }
8314 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8315 into (A OR B).
8316 For sequence point consistancy, we need to check for trapping,
8317 and side-effects. */
8318 else if (code == icode && simple_operand_p_2 (arg0)
8319 && simple_operand_p_2 (arg1))
8320 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8321 }
8322
8323 return NULL_TREE;
8324 }
8325
8326 /* Fold a binary expression of code CODE and type TYPE with operands
8327 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8328 Return the folded expression if folding is successful. Otherwise,
8329 return NULL_TREE. */
8330
8331 static tree
8332 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8333 {
8334 enum tree_code compl_code;
8335
8336 if (code == MIN_EXPR)
8337 compl_code = MAX_EXPR;
8338 else if (code == MAX_EXPR)
8339 compl_code = MIN_EXPR;
8340 else
8341 gcc_unreachable ();
8342
8343 /* MIN (MAX (a, b), b) == b. */
8344 if (TREE_CODE (op0) == compl_code
8345 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8346 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8347
8348 /* MIN (MAX (b, a), b) == b. */
8349 if (TREE_CODE (op0) == compl_code
8350 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8351 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8352 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8353
8354 /* MIN (a, MAX (a, b)) == a. */
8355 if (TREE_CODE (op1) == compl_code
8356 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8357 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8358 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8359
8360 /* MIN (a, MAX (b, a)) == a. */
8361 if (TREE_CODE (op1) == compl_code
8362 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8363 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8364 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8365
8366 return NULL_TREE;
8367 }
8368
8369 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8370 by changing CODE to reduce the magnitude of constants involved in
8371 ARG0 of the comparison.
8372 Returns a canonicalized comparison tree if a simplification was
8373 possible, otherwise returns NULL_TREE.
8374 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8375 valid if signed overflow is undefined. */
8376
8377 static tree
8378 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8379 tree arg0, tree arg1,
8380 bool *strict_overflow_p)
8381 {
8382 enum tree_code code0 = TREE_CODE (arg0);
8383 tree t, cst0 = NULL_TREE;
8384 int sgn0;
8385 bool swap = false;
8386
8387 /* Match A +- CST code arg1 and CST code arg1. We can change the
8388 first form only if overflow is undefined. */
8389 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8390 /* In principle pointers also have undefined overflow behavior,
8391 but that causes problems elsewhere. */
8392 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8393 && (code0 == MINUS_EXPR
8394 || code0 == PLUS_EXPR)
8395 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8396 || code0 == INTEGER_CST))
8397 return NULL_TREE;
8398
8399 /* Identify the constant in arg0 and its sign. */
8400 if (code0 == INTEGER_CST)
8401 cst0 = arg0;
8402 else
8403 cst0 = TREE_OPERAND (arg0, 1);
8404 sgn0 = tree_int_cst_sgn (cst0);
8405
8406 /* Overflowed constants and zero will cause problems. */
8407 if (integer_zerop (cst0)
8408 || TREE_OVERFLOW (cst0))
8409 return NULL_TREE;
8410
8411 /* See if we can reduce the magnitude of the constant in
8412 arg0 by changing the comparison code. */
8413 if (code0 == INTEGER_CST)
8414 {
8415 /* CST <= arg1 -> CST-1 < arg1. */
8416 if (code == LE_EXPR && sgn0 == 1)
8417 code = LT_EXPR;
8418 /* -CST < arg1 -> -CST-1 <= arg1. */
8419 else if (code == LT_EXPR && sgn0 == -1)
8420 code = LE_EXPR;
8421 /* CST > arg1 -> CST-1 >= arg1. */
8422 else if (code == GT_EXPR && sgn0 == 1)
8423 code = GE_EXPR;
8424 /* -CST >= arg1 -> -CST-1 > arg1. */
8425 else if (code == GE_EXPR && sgn0 == -1)
8426 code = GT_EXPR;
8427 else
8428 return NULL_TREE;
8429 /* arg1 code' CST' might be more canonical. */
8430 swap = true;
8431 }
8432 else
8433 {
8434 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8435 if (code == LT_EXPR
8436 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8437 code = LE_EXPR;
8438 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8439 else if (code == GT_EXPR
8440 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8441 code = GE_EXPR;
8442 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8443 else if (code == LE_EXPR
8444 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8445 code = LT_EXPR;
8446 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8447 else if (code == GE_EXPR
8448 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8449 code = GT_EXPR;
8450 else
8451 return NULL_TREE;
8452 *strict_overflow_p = true;
8453 }
8454
8455 /* Now build the constant reduced in magnitude. But not if that
8456 would produce one outside of its types range. */
8457 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8458 && ((sgn0 == 1
8459 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8460 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8461 || (sgn0 == -1
8462 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8463 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8464 /* We cannot swap the comparison here as that would cause us to
8465 endlessly recurse. */
8466 return NULL_TREE;
8467
8468 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8469 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8470 if (code0 != INTEGER_CST)
8471 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8472 t = fold_convert (TREE_TYPE (arg1), t);
8473
8474 /* If swapping might yield to a more canonical form, do so. */
8475 if (swap)
8476 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8477 else
8478 return fold_build2_loc (loc, code, type, t, arg1);
8479 }
8480
8481 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8482 overflow further. Try to decrease the magnitude of constants involved
8483 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8484 and put sole constants at the second argument position.
8485 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8486
8487 static tree
8488 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8489 tree arg0, tree arg1)
8490 {
8491 tree t;
8492 bool strict_overflow_p;
8493 const char * const warnmsg = G_("assuming signed overflow does not occur "
8494 "when reducing constant in comparison");
8495
8496 /* Try canonicalization by simplifying arg0. */
8497 strict_overflow_p = false;
8498 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8499 &strict_overflow_p);
8500 if (t)
8501 {
8502 if (strict_overflow_p)
8503 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8504 return t;
8505 }
8506
8507 /* Try canonicalization by simplifying arg1 using the swapped
8508 comparison. */
8509 code = swap_tree_comparison (code);
8510 strict_overflow_p = false;
8511 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8512 &strict_overflow_p);
8513 if (t && strict_overflow_p)
8514 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8515 return t;
8516 }
8517
8518 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8519 space. This is used to avoid issuing overflow warnings for
8520 expressions like &p->x which can not wrap. */
8521
8522 static bool
8523 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8524 {
8525 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8526 return true;
8527
8528 if (bitpos < 0)
8529 return true;
8530
8531 wide_int wi_offset;
8532 int precision = TYPE_PRECISION (TREE_TYPE (base));
8533 if (offset == NULL_TREE)
8534 wi_offset = wi::zero (precision);
8535 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8536 return true;
8537 else
8538 wi_offset = offset;
8539
8540 bool overflow;
8541 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8542 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8543 if (overflow)
8544 return true;
8545
8546 if (!wi::fits_uhwi_p (total))
8547 return true;
8548
8549 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8550 if (size <= 0)
8551 return true;
8552
8553 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8554 array. */
8555 if (TREE_CODE (base) == ADDR_EXPR)
8556 {
8557 HOST_WIDE_INT base_size;
8558
8559 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8560 if (base_size > 0 && size < base_size)
8561 size = base_size;
8562 }
8563
8564 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8565 }
8566
8567 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8568 kind INTEGER_CST. This makes sure to properly sign-extend the
8569 constant. */
8570
8571 static HOST_WIDE_INT
8572 size_low_cst (const_tree t)
8573 {
8574 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8575 int prec = TYPE_PRECISION (TREE_TYPE (t));
8576 if (prec < HOST_BITS_PER_WIDE_INT)
8577 return sext_hwi (w, prec);
8578 return w;
8579 }
8580
8581 /* Subroutine of fold_binary. This routine performs all of the
8582 transformations that are common to the equality/inequality
8583 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8584 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8585 fold_binary should call fold_binary. Fold a comparison with
8586 tree code CODE and type TYPE with operands OP0 and OP1. Return
8587 the folded comparison or NULL_TREE. */
8588
8589 static tree
8590 fold_comparison (location_t loc, enum tree_code code, tree type,
8591 tree op0, tree op1)
8592 {
8593 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8594 tree arg0, arg1, tem;
8595
8596 arg0 = op0;
8597 arg1 = op1;
8598
8599 STRIP_SIGN_NOPS (arg0);
8600 STRIP_SIGN_NOPS (arg1);
8601
8602 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8603 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8604 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8606 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8607 && TREE_CODE (arg1) == INTEGER_CST
8608 && !TREE_OVERFLOW (arg1))
8609 {
8610 const enum tree_code
8611 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8612 tree const1 = TREE_OPERAND (arg0, 1);
8613 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8614 tree variable = TREE_OPERAND (arg0, 0);
8615 tree new_const = int_const_binop (reverse_op, const2, const1);
8616
8617 /* If the constant operation overflowed this can be
8618 simplified as a comparison against INT_MAX/INT_MIN. */
8619 if (TREE_OVERFLOW (new_const)
8620 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8621 {
8622 int const1_sgn = tree_int_cst_sgn (const1);
8623 enum tree_code code2 = code;
8624
8625 /* Get the sign of the constant on the lhs if the
8626 operation were VARIABLE + CONST1. */
8627 if (TREE_CODE (arg0) == MINUS_EXPR)
8628 const1_sgn = -const1_sgn;
8629
8630 /* The sign of the constant determines if we overflowed
8631 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8632 Canonicalize to the INT_MIN overflow by swapping the comparison
8633 if necessary. */
8634 if (const1_sgn == -1)
8635 code2 = swap_tree_comparison (code);
8636
8637 /* We now can look at the canonicalized case
8638 VARIABLE + 1 CODE2 INT_MIN
8639 and decide on the result. */
8640 switch (code2)
8641 {
8642 case EQ_EXPR:
8643 case LT_EXPR:
8644 case LE_EXPR:
8645 return
8646 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8647
8648 case NE_EXPR:
8649 case GE_EXPR:
8650 case GT_EXPR:
8651 return
8652 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8653
8654 default:
8655 gcc_unreachable ();
8656 }
8657 }
8658 else
8659 {
8660 if (!equality_code)
8661 fold_overflow_warning ("assuming signed overflow does not occur "
8662 "when changing X +- C1 cmp C2 to "
8663 "X cmp C2 -+ C1",
8664 WARN_STRICT_OVERFLOW_COMPARISON);
8665 return fold_build2_loc (loc, code, type, variable, new_const);
8666 }
8667 }
8668
8669 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8670 if (TREE_CODE (arg0) == MINUS_EXPR
8671 && equality_code
8672 && integer_zerop (arg1))
8673 {
8674 /* ??? The transformation is valid for the other operators if overflow
8675 is undefined for the type, but performing it here badly interacts
8676 with the transformation in fold_cond_expr_with_comparison which
8677 attempts to synthetize ABS_EXPR. */
8678 if (!equality_code)
8679 fold_overflow_warning ("assuming signed overflow does not occur "
8680 "when changing X - Y cmp 0 to X cmp Y",
8681 WARN_STRICT_OVERFLOW_COMPARISON);
8682 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8683 TREE_OPERAND (arg0, 1));
8684 }
8685
8686 /* For comparisons of pointers we can decompose it to a compile time
8687 comparison of the base objects and the offsets into the object.
8688 This requires at least one operand being an ADDR_EXPR or a
8689 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8690 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8691 && (TREE_CODE (arg0) == ADDR_EXPR
8692 || TREE_CODE (arg1) == ADDR_EXPR
8693 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8694 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8695 {
8696 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8697 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8698 machine_mode mode;
8699 int volatilep, unsignedp;
8700 bool indirect_base0 = false, indirect_base1 = false;
8701
8702 /* Get base and offset for the access. Strip ADDR_EXPR for
8703 get_inner_reference, but put it back by stripping INDIRECT_REF
8704 off the base object if possible. indirect_baseN will be true
8705 if baseN is not an address but refers to the object itself. */
8706 base0 = arg0;
8707 if (TREE_CODE (arg0) == ADDR_EXPR)
8708 {
8709 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8710 &bitsize, &bitpos0, &offset0, &mode,
8711 &unsignedp, &volatilep, false);
8712 if (TREE_CODE (base0) == INDIRECT_REF)
8713 base0 = TREE_OPERAND (base0, 0);
8714 else
8715 indirect_base0 = true;
8716 }
8717 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8718 {
8719 base0 = TREE_OPERAND (arg0, 0);
8720 STRIP_SIGN_NOPS (base0);
8721 if (TREE_CODE (base0) == ADDR_EXPR)
8722 {
8723 base0 = TREE_OPERAND (base0, 0);
8724 indirect_base0 = true;
8725 }
8726 offset0 = TREE_OPERAND (arg0, 1);
8727 if (tree_fits_shwi_p (offset0))
8728 {
8729 HOST_WIDE_INT off = size_low_cst (offset0);
8730 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8731 * BITS_PER_UNIT)
8732 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8733 {
8734 bitpos0 = off * BITS_PER_UNIT;
8735 offset0 = NULL_TREE;
8736 }
8737 }
8738 }
8739
8740 base1 = arg1;
8741 if (TREE_CODE (arg1) == ADDR_EXPR)
8742 {
8743 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8744 &bitsize, &bitpos1, &offset1, &mode,
8745 &unsignedp, &volatilep, false);
8746 if (TREE_CODE (base1) == INDIRECT_REF)
8747 base1 = TREE_OPERAND (base1, 0);
8748 else
8749 indirect_base1 = true;
8750 }
8751 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8752 {
8753 base1 = TREE_OPERAND (arg1, 0);
8754 STRIP_SIGN_NOPS (base1);
8755 if (TREE_CODE (base1) == ADDR_EXPR)
8756 {
8757 base1 = TREE_OPERAND (base1, 0);
8758 indirect_base1 = true;
8759 }
8760 offset1 = TREE_OPERAND (arg1, 1);
8761 if (tree_fits_shwi_p (offset1))
8762 {
8763 HOST_WIDE_INT off = size_low_cst (offset1);
8764 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8765 * BITS_PER_UNIT)
8766 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8767 {
8768 bitpos1 = off * BITS_PER_UNIT;
8769 offset1 = NULL_TREE;
8770 }
8771 }
8772 }
8773
8774 /* A local variable can never be pointed to by
8775 the default SSA name of an incoming parameter. */
8776 if ((TREE_CODE (arg0) == ADDR_EXPR
8777 && indirect_base0
8778 && TREE_CODE (base0) == VAR_DECL
8779 && auto_var_in_fn_p (base0, current_function_decl)
8780 && !indirect_base1
8781 && TREE_CODE (base1) == SSA_NAME
8782 && SSA_NAME_IS_DEFAULT_DEF (base1)
8783 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8784 || (TREE_CODE (arg1) == ADDR_EXPR
8785 && indirect_base1
8786 && TREE_CODE (base1) == VAR_DECL
8787 && auto_var_in_fn_p (base1, current_function_decl)
8788 && !indirect_base0
8789 && TREE_CODE (base0) == SSA_NAME
8790 && SSA_NAME_IS_DEFAULT_DEF (base0)
8791 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8792 {
8793 if (code == NE_EXPR)
8794 return constant_boolean_node (1, type);
8795 else if (code == EQ_EXPR)
8796 return constant_boolean_node (0, type);
8797 }
8798 /* If we have equivalent bases we might be able to simplify. */
8799 else if (indirect_base0 == indirect_base1
8800 && operand_equal_p (base0, base1, 0))
8801 {
8802 /* We can fold this expression to a constant if the non-constant
8803 offset parts are equal. */
8804 if ((offset0 == offset1
8805 || (offset0 && offset1
8806 && operand_equal_p (offset0, offset1, 0)))
8807 && (code == EQ_EXPR
8808 || code == NE_EXPR
8809 || (indirect_base0 && DECL_P (base0))
8810 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8811
8812 {
8813 if (!equality_code
8814 && bitpos0 != bitpos1
8815 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8816 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8817 fold_overflow_warning (("assuming pointer wraparound does not "
8818 "occur when comparing P +- C1 with "
8819 "P +- C2"),
8820 WARN_STRICT_OVERFLOW_CONDITIONAL);
8821
8822 switch (code)
8823 {
8824 case EQ_EXPR:
8825 return constant_boolean_node (bitpos0 == bitpos1, type);
8826 case NE_EXPR:
8827 return constant_boolean_node (bitpos0 != bitpos1, type);
8828 case LT_EXPR:
8829 return constant_boolean_node (bitpos0 < bitpos1, type);
8830 case LE_EXPR:
8831 return constant_boolean_node (bitpos0 <= bitpos1, type);
8832 case GE_EXPR:
8833 return constant_boolean_node (bitpos0 >= bitpos1, type);
8834 case GT_EXPR:
8835 return constant_boolean_node (bitpos0 > bitpos1, type);
8836 default:;
8837 }
8838 }
8839 /* We can simplify the comparison to a comparison of the variable
8840 offset parts if the constant offset parts are equal.
8841 Be careful to use signed sizetype here because otherwise we
8842 mess with array offsets in the wrong way. This is possible
8843 because pointer arithmetic is restricted to retain within an
8844 object and overflow on pointer differences is undefined as of
8845 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8846 else if (bitpos0 == bitpos1
8847 && (equality_code
8848 || (indirect_base0 && DECL_P (base0))
8849 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8850 {
8851 /* By converting to signed sizetype we cover middle-end pointer
8852 arithmetic which operates on unsigned pointer types of size
8853 type size and ARRAY_REF offsets which are properly sign or
8854 zero extended from their type in case it is narrower than
8855 sizetype. */
8856 if (offset0 == NULL_TREE)
8857 offset0 = build_int_cst (ssizetype, 0);
8858 else
8859 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8860 if (offset1 == NULL_TREE)
8861 offset1 = build_int_cst (ssizetype, 0);
8862 else
8863 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8864
8865 if (!equality_code
8866 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8867 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8868 fold_overflow_warning (("assuming pointer wraparound does not "
8869 "occur when comparing P +- C1 with "
8870 "P +- C2"),
8871 WARN_STRICT_OVERFLOW_COMPARISON);
8872
8873 return fold_build2_loc (loc, code, type, offset0, offset1);
8874 }
8875 }
8876 /* For non-equal bases we can simplify if they are addresses
8877 of local binding decls or constants. */
8878 else if (indirect_base0 && indirect_base1
8879 /* We know that !operand_equal_p (base0, base1, 0)
8880 because the if condition was false. But make
8881 sure two decls are not the same. */
8882 && base0 != base1
8883 && TREE_CODE (arg0) == ADDR_EXPR
8884 && TREE_CODE (arg1) == ADDR_EXPR
8885 && (((TREE_CODE (base0) == VAR_DECL
8886 || TREE_CODE (base0) == PARM_DECL)
8887 && (targetm.binds_local_p (base0)
8888 || CONSTANT_CLASS_P (base1)))
8889 || CONSTANT_CLASS_P (base0))
8890 && (((TREE_CODE (base1) == VAR_DECL
8891 || TREE_CODE (base1) == PARM_DECL)
8892 && (targetm.binds_local_p (base1)
8893 || CONSTANT_CLASS_P (base0)))
8894 || CONSTANT_CLASS_P (base1)))
8895 {
8896 if (code == EQ_EXPR)
8897 return omit_two_operands_loc (loc, type, boolean_false_node,
8898 arg0, arg1);
8899 else if (code == NE_EXPR)
8900 return omit_two_operands_loc (loc, type, boolean_true_node,
8901 arg0, arg1);
8902 }
8903 /* For equal offsets we can simplify to a comparison of the
8904 base addresses. */
8905 else if (bitpos0 == bitpos1
8906 && (indirect_base0
8907 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8908 && (indirect_base1
8909 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8910 && ((offset0 == offset1)
8911 || (offset0 && offset1
8912 && operand_equal_p (offset0, offset1, 0))))
8913 {
8914 if (indirect_base0)
8915 base0 = build_fold_addr_expr_loc (loc, base0);
8916 if (indirect_base1)
8917 base1 = build_fold_addr_expr_loc (loc, base1);
8918 return fold_build2_loc (loc, code, type, base0, base1);
8919 }
8920 }
8921
8922 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8923 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8924 the resulting offset is smaller in absolute value than the
8925 original one and has the same sign. */
8926 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8927 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8928 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8929 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8930 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8931 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8932 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8933 {
8934 tree const1 = TREE_OPERAND (arg0, 1);
8935 tree const2 = TREE_OPERAND (arg1, 1);
8936 tree variable1 = TREE_OPERAND (arg0, 0);
8937 tree variable2 = TREE_OPERAND (arg1, 0);
8938 tree cst;
8939 const char * const warnmsg = G_("assuming signed overflow does not "
8940 "occur when combining constants around "
8941 "a comparison");
8942
8943 /* Put the constant on the side where it doesn't overflow and is
8944 of lower absolute value and of same sign than before. */
8945 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8946 ? MINUS_EXPR : PLUS_EXPR,
8947 const2, const1);
8948 if (!TREE_OVERFLOW (cst)
8949 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8950 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8951 {
8952 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8953 return fold_build2_loc (loc, code, type,
8954 variable1,
8955 fold_build2_loc (loc, TREE_CODE (arg1),
8956 TREE_TYPE (arg1),
8957 variable2, cst));
8958 }
8959
8960 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8961 ? MINUS_EXPR : PLUS_EXPR,
8962 const1, const2);
8963 if (!TREE_OVERFLOW (cst)
8964 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8965 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8966 {
8967 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8968 return fold_build2_loc (loc, code, type,
8969 fold_build2_loc (loc, TREE_CODE (arg0),
8970 TREE_TYPE (arg0),
8971 variable1, cst),
8972 variable2);
8973 }
8974 }
8975
8976 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8977 signed arithmetic case. That form is created by the compiler
8978 often enough for folding it to be of value. One example is in
8979 computing loop trip counts after Operator Strength Reduction. */
8980 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8981 && TREE_CODE (arg0) == MULT_EXPR
8982 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8983 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8984 && integer_zerop (arg1))
8985 {
8986 tree const1 = TREE_OPERAND (arg0, 1);
8987 tree const2 = arg1; /* zero */
8988 tree variable1 = TREE_OPERAND (arg0, 0);
8989 enum tree_code cmp_code = code;
8990
8991 /* Handle unfolded multiplication by zero. */
8992 if (integer_zerop (const1))
8993 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8994
8995 fold_overflow_warning (("assuming signed overflow does not occur when "
8996 "eliminating multiplication in comparison "
8997 "with zero"),
8998 WARN_STRICT_OVERFLOW_COMPARISON);
8999
9000 /* If const1 is negative we swap the sense of the comparison. */
9001 if (tree_int_cst_sgn (const1) < 0)
9002 cmp_code = swap_tree_comparison (cmp_code);
9003
9004 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9005 }
9006
9007 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9008 if (tem)
9009 return tem;
9010
9011 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9012 {
9013 tree targ0 = strip_float_extensions (arg0);
9014 tree targ1 = strip_float_extensions (arg1);
9015 tree newtype = TREE_TYPE (targ0);
9016
9017 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9018 newtype = TREE_TYPE (targ1);
9019
9020 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9021 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9022 return fold_build2_loc (loc, code, type,
9023 fold_convert_loc (loc, newtype, targ0),
9024 fold_convert_loc (loc, newtype, targ1));
9025
9026 /* (-a) CMP (-b) -> b CMP a */
9027 if (TREE_CODE (arg0) == NEGATE_EXPR
9028 && TREE_CODE (arg1) == NEGATE_EXPR)
9029 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9030 TREE_OPERAND (arg0, 0));
9031
9032 if (TREE_CODE (arg1) == REAL_CST)
9033 {
9034 REAL_VALUE_TYPE cst;
9035 cst = TREE_REAL_CST (arg1);
9036
9037 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9038 if (TREE_CODE (arg0) == NEGATE_EXPR)
9039 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9040 TREE_OPERAND (arg0, 0),
9041 build_real (TREE_TYPE (arg1),
9042 real_value_negate (&cst)));
9043
9044 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9045 /* a CMP (-0) -> a CMP 0 */
9046 if (REAL_VALUE_MINUS_ZERO (cst))
9047 return fold_build2_loc (loc, code, type, arg0,
9048 build_real (TREE_TYPE (arg1), dconst0));
9049
9050 /* x != NaN is always true, other ops are always false. */
9051 if (REAL_VALUE_ISNAN (cst)
9052 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9053 {
9054 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9055 return omit_one_operand_loc (loc, type, tem, arg0);
9056 }
9057
9058 /* Fold comparisons against infinity. */
9059 if (REAL_VALUE_ISINF (cst)
9060 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9061 {
9062 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9063 if (tem != NULL_TREE)
9064 return tem;
9065 }
9066 }
9067
9068 /* If this is a comparison of a real constant with a PLUS_EXPR
9069 or a MINUS_EXPR of a real constant, we can convert it into a
9070 comparison with a revised real constant as long as no overflow
9071 occurs when unsafe_math_optimizations are enabled. */
9072 if (flag_unsafe_math_optimizations
9073 && TREE_CODE (arg1) == REAL_CST
9074 && (TREE_CODE (arg0) == PLUS_EXPR
9075 || TREE_CODE (arg0) == MINUS_EXPR)
9076 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9077 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9078 ? MINUS_EXPR : PLUS_EXPR,
9079 arg1, TREE_OPERAND (arg0, 1)))
9080 && !TREE_OVERFLOW (tem))
9081 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9082
9083 /* Likewise, we can simplify a comparison of a real constant with
9084 a MINUS_EXPR whose first operand is also a real constant, i.e.
9085 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9086 floating-point types only if -fassociative-math is set. */
9087 if (flag_associative_math
9088 && TREE_CODE (arg1) == REAL_CST
9089 && TREE_CODE (arg0) == MINUS_EXPR
9090 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9091 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9092 arg1))
9093 && !TREE_OVERFLOW (tem))
9094 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9095 TREE_OPERAND (arg0, 1), tem);
9096
9097 /* Fold comparisons against built-in math functions. */
9098 if (TREE_CODE (arg1) == REAL_CST
9099 && flag_unsafe_math_optimizations
9100 && ! flag_errno_math)
9101 {
9102 enum built_in_function fcode = builtin_mathfn_code (arg0);
9103
9104 if (fcode != END_BUILTINS)
9105 {
9106 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9107 if (tem != NULL_TREE)
9108 return tem;
9109 }
9110 }
9111 }
9112
9113 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9114 && CONVERT_EXPR_P (arg0))
9115 {
9116 /* If we are widening one operand of an integer comparison,
9117 see if the other operand is similarly being widened. Perhaps we
9118 can do the comparison in the narrower type. */
9119 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9120 if (tem)
9121 return tem;
9122
9123 /* Or if we are changing signedness. */
9124 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9125 if (tem)
9126 return tem;
9127 }
9128
9129 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9130 constant, we can simplify it. */
9131 if (TREE_CODE (arg1) == INTEGER_CST
9132 && (TREE_CODE (arg0) == MIN_EXPR
9133 || TREE_CODE (arg0) == MAX_EXPR)
9134 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9135 {
9136 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9137 if (tem)
9138 return tem;
9139 }
9140
9141 /* Simplify comparison of something with itself. (For IEEE
9142 floating-point, we can only do some of these simplifications.) */
9143 if (operand_equal_p (arg0, arg1, 0))
9144 {
9145 switch (code)
9146 {
9147 case EQ_EXPR:
9148 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9149 || ! HONOR_NANS (element_mode (arg0)))
9150 return constant_boolean_node (1, type);
9151 break;
9152
9153 case GE_EXPR:
9154 case LE_EXPR:
9155 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9156 || ! HONOR_NANS (element_mode (arg0)))
9157 return constant_boolean_node (1, type);
9158 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9159
9160 case NE_EXPR:
9161 /* For NE, we can only do this simplification if integer
9162 or we don't honor IEEE floating point NaNs. */
9163 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9164 && HONOR_NANS (element_mode (arg0)))
9165 break;
9166 /* ... fall through ... */
9167 case GT_EXPR:
9168 case LT_EXPR:
9169 return constant_boolean_node (0, type);
9170 default:
9171 gcc_unreachable ();
9172 }
9173 }
9174
9175 /* If we are comparing an expression that just has comparisons
9176 of two integer values, arithmetic expressions of those comparisons,
9177 and constants, we can simplify it. There are only three cases
9178 to check: the two values can either be equal, the first can be
9179 greater, or the second can be greater. Fold the expression for
9180 those three values. Since each value must be 0 or 1, we have
9181 eight possibilities, each of which corresponds to the constant 0
9182 or 1 or one of the six possible comparisons.
9183
9184 This handles common cases like (a > b) == 0 but also handles
9185 expressions like ((x > y) - (y > x)) > 0, which supposedly
9186 occur in macroized code. */
9187
9188 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9189 {
9190 tree cval1 = 0, cval2 = 0;
9191 int save_p = 0;
9192
9193 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9194 /* Don't handle degenerate cases here; they should already
9195 have been handled anyway. */
9196 && cval1 != 0 && cval2 != 0
9197 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9198 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9199 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9200 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9201 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9202 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9203 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9204 {
9205 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9206 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9207
9208 /* We can't just pass T to eval_subst in case cval1 or cval2
9209 was the same as ARG1. */
9210
9211 tree high_result
9212 = fold_build2_loc (loc, code, type,
9213 eval_subst (loc, arg0, cval1, maxval,
9214 cval2, minval),
9215 arg1);
9216 tree equal_result
9217 = fold_build2_loc (loc, code, type,
9218 eval_subst (loc, arg0, cval1, maxval,
9219 cval2, maxval),
9220 arg1);
9221 tree low_result
9222 = fold_build2_loc (loc, code, type,
9223 eval_subst (loc, arg0, cval1, minval,
9224 cval2, maxval),
9225 arg1);
9226
9227 /* All three of these results should be 0 or 1. Confirm they are.
9228 Then use those values to select the proper code to use. */
9229
9230 if (TREE_CODE (high_result) == INTEGER_CST
9231 && TREE_CODE (equal_result) == INTEGER_CST
9232 && TREE_CODE (low_result) == INTEGER_CST)
9233 {
9234 /* Make a 3-bit mask with the high-order bit being the
9235 value for `>', the next for '=', and the low for '<'. */
9236 switch ((integer_onep (high_result) * 4)
9237 + (integer_onep (equal_result) * 2)
9238 + integer_onep (low_result))
9239 {
9240 case 0:
9241 /* Always false. */
9242 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9243 case 1:
9244 code = LT_EXPR;
9245 break;
9246 case 2:
9247 code = EQ_EXPR;
9248 break;
9249 case 3:
9250 code = LE_EXPR;
9251 break;
9252 case 4:
9253 code = GT_EXPR;
9254 break;
9255 case 5:
9256 code = NE_EXPR;
9257 break;
9258 case 6:
9259 code = GE_EXPR;
9260 break;
9261 case 7:
9262 /* Always true. */
9263 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9264 }
9265
9266 if (save_p)
9267 {
9268 tem = save_expr (build2 (code, type, cval1, cval2));
9269 SET_EXPR_LOCATION (tem, loc);
9270 return tem;
9271 }
9272 return fold_build2_loc (loc, code, type, cval1, cval2);
9273 }
9274 }
9275 }
9276
9277 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9278 into a single range test. */
9279 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9280 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9281 && TREE_CODE (arg1) == INTEGER_CST
9282 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9283 && !integer_zerop (TREE_OPERAND (arg0, 1))
9284 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9285 && !TREE_OVERFLOW (arg1))
9286 {
9287 tem = fold_div_compare (loc, code, type, arg0, arg1);
9288 if (tem != NULL_TREE)
9289 return tem;
9290 }
9291
9292 /* Fold ~X op ~Y as Y op X. */
9293 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9294 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9295 {
9296 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9297 return fold_build2_loc (loc, code, type,
9298 fold_convert_loc (loc, cmp_type,
9299 TREE_OPERAND (arg1, 0)),
9300 TREE_OPERAND (arg0, 0));
9301 }
9302
9303 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9304 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9305 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9306 {
9307 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9308 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9309 TREE_OPERAND (arg0, 0),
9310 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9311 fold_convert_loc (loc, cmp_type, arg1)));
9312 }
9313
9314 return NULL_TREE;
9315 }
9316
9317
9318 /* Subroutine of fold_binary. Optimize complex multiplications of the
9319 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9320 argument EXPR represents the expression "z" of type TYPE. */
9321
9322 static tree
9323 fold_mult_zconjz (location_t loc, tree type, tree expr)
9324 {
9325 tree itype = TREE_TYPE (type);
9326 tree rpart, ipart, tem;
9327
9328 if (TREE_CODE (expr) == COMPLEX_EXPR)
9329 {
9330 rpart = TREE_OPERAND (expr, 0);
9331 ipart = TREE_OPERAND (expr, 1);
9332 }
9333 else if (TREE_CODE (expr) == COMPLEX_CST)
9334 {
9335 rpart = TREE_REALPART (expr);
9336 ipart = TREE_IMAGPART (expr);
9337 }
9338 else
9339 {
9340 expr = save_expr (expr);
9341 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9342 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9343 }
9344
9345 rpart = save_expr (rpart);
9346 ipart = save_expr (ipart);
9347 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9348 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9349 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9350 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9351 build_zero_cst (itype));
9352 }
9353
9354
9355 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9356 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9357 guarantees that P and N have the same least significant log2(M) bits.
9358 N is not otherwise constrained. In particular, N is not normalized to
9359 0 <= N < M as is common. In general, the precise value of P is unknown.
9360 M is chosen as large as possible such that constant N can be determined.
9361
9362 Returns M and sets *RESIDUE to N.
9363
9364 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9365 account. This is not always possible due to PR 35705.
9366 */
9367
9368 static unsigned HOST_WIDE_INT
9369 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9370 bool allow_func_align)
9371 {
9372 enum tree_code code;
9373
9374 *residue = 0;
9375
9376 code = TREE_CODE (expr);
9377 if (code == ADDR_EXPR)
9378 {
9379 unsigned int bitalign;
9380 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9381 *residue /= BITS_PER_UNIT;
9382 return bitalign / BITS_PER_UNIT;
9383 }
9384 else if (code == POINTER_PLUS_EXPR)
9385 {
9386 tree op0, op1;
9387 unsigned HOST_WIDE_INT modulus;
9388 enum tree_code inner_code;
9389
9390 op0 = TREE_OPERAND (expr, 0);
9391 STRIP_NOPS (op0);
9392 modulus = get_pointer_modulus_and_residue (op0, residue,
9393 allow_func_align);
9394
9395 op1 = TREE_OPERAND (expr, 1);
9396 STRIP_NOPS (op1);
9397 inner_code = TREE_CODE (op1);
9398 if (inner_code == INTEGER_CST)
9399 {
9400 *residue += TREE_INT_CST_LOW (op1);
9401 return modulus;
9402 }
9403 else if (inner_code == MULT_EXPR)
9404 {
9405 op1 = TREE_OPERAND (op1, 1);
9406 if (TREE_CODE (op1) == INTEGER_CST)
9407 {
9408 unsigned HOST_WIDE_INT align;
9409
9410 /* Compute the greatest power-of-2 divisor of op1. */
9411 align = TREE_INT_CST_LOW (op1);
9412 align &= -align;
9413
9414 /* If align is non-zero and less than *modulus, replace
9415 *modulus with align., If align is 0, then either op1 is 0
9416 or the greatest power-of-2 divisor of op1 doesn't fit in an
9417 unsigned HOST_WIDE_INT. In either case, no additional
9418 constraint is imposed. */
9419 if (align)
9420 modulus = MIN (modulus, align);
9421
9422 return modulus;
9423 }
9424 }
9425 }
9426
9427 /* If we get here, we were unable to determine anything useful about the
9428 expression. */
9429 return 1;
9430 }
9431
9432 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9433 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9434
9435 static bool
9436 vec_cst_ctor_to_array (tree arg, tree *elts)
9437 {
9438 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9439
9440 if (TREE_CODE (arg) == VECTOR_CST)
9441 {
9442 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9443 elts[i] = VECTOR_CST_ELT (arg, i);
9444 }
9445 else if (TREE_CODE (arg) == CONSTRUCTOR)
9446 {
9447 constructor_elt *elt;
9448
9449 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9450 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9451 return false;
9452 else
9453 elts[i] = elt->value;
9454 }
9455 else
9456 return false;
9457 for (; i < nelts; i++)
9458 elts[i]
9459 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9460 return true;
9461 }
9462
9463 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9464 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9465 NULL_TREE otherwise. */
9466
9467 static tree
9468 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9469 {
9470 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9471 tree *elts;
9472 bool need_ctor = false;
9473
9474 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9475 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9476 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9477 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9478 return NULL_TREE;
9479
9480 elts = XALLOCAVEC (tree, nelts * 3);
9481 if (!vec_cst_ctor_to_array (arg0, elts)
9482 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9483 return NULL_TREE;
9484
9485 for (i = 0; i < nelts; i++)
9486 {
9487 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9488 need_ctor = true;
9489 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9490 }
9491
9492 if (need_ctor)
9493 {
9494 vec<constructor_elt, va_gc> *v;
9495 vec_alloc (v, nelts);
9496 for (i = 0; i < nelts; i++)
9497 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9498 return build_constructor (type, v);
9499 }
9500 else
9501 return build_vector (type, &elts[2 * nelts]);
9502 }
9503
9504 /* Try to fold a pointer difference of type TYPE two address expressions of
9505 array references AREF0 and AREF1 using location LOC. Return a
9506 simplified expression for the difference or NULL_TREE. */
9507
9508 static tree
9509 fold_addr_of_array_ref_difference (location_t loc, tree type,
9510 tree aref0, tree aref1)
9511 {
9512 tree base0 = TREE_OPERAND (aref0, 0);
9513 tree base1 = TREE_OPERAND (aref1, 0);
9514 tree base_offset = build_int_cst (type, 0);
9515
9516 /* If the bases are array references as well, recurse. If the bases
9517 are pointer indirections compute the difference of the pointers.
9518 If the bases are equal, we are set. */
9519 if ((TREE_CODE (base0) == ARRAY_REF
9520 && TREE_CODE (base1) == ARRAY_REF
9521 && (base_offset
9522 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9523 || (INDIRECT_REF_P (base0)
9524 && INDIRECT_REF_P (base1)
9525 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9526 TREE_OPERAND (base0, 0),
9527 TREE_OPERAND (base1, 0))))
9528 || operand_equal_p (base0, base1, 0))
9529 {
9530 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9531 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9532 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9533 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9534 return fold_build2_loc (loc, PLUS_EXPR, type,
9535 base_offset,
9536 fold_build2_loc (loc, MULT_EXPR, type,
9537 diff, esz));
9538 }
9539 return NULL_TREE;
9540 }
9541
9542 /* If the real or vector real constant CST of type TYPE has an exact
9543 inverse, return it, else return NULL. */
9544
9545 tree
9546 exact_inverse (tree type, tree cst)
9547 {
9548 REAL_VALUE_TYPE r;
9549 tree unit_type, *elts;
9550 machine_mode mode;
9551 unsigned vec_nelts, i;
9552
9553 switch (TREE_CODE (cst))
9554 {
9555 case REAL_CST:
9556 r = TREE_REAL_CST (cst);
9557
9558 if (exact_real_inverse (TYPE_MODE (type), &r))
9559 return build_real (type, r);
9560
9561 return NULL_TREE;
9562
9563 case VECTOR_CST:
9564 vec_nelts = VECTOR_CST_NELTS (cst);
9565 elts = XALLOCAVEC (tree, vec_nelts);
9566 unit_type = TREE_TYPE (type);
9567 mode = TYPE_MODE (unit_type);
9568
9569 for (i = 0; i < vec_nelts; i++)
9570 {
9571 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9572 if (!exact_real_inverse (mode, &r))
9573 return NULL_TREE;
9574 elts[i] = build_real (unit_type, r);
9575 }
9576
9577 return build_vector (type, elts);
9578
9579 default:
9580 return NULL_TREE;
9581 }
9582 }
9583
9584 /* Mask out the tz least significant bits of X of type TYPE where
9585 tz is the number of trailing zeroes in Y. */
9586 static wide_int
9587 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9588 {
9589 int tz = wi::ctz (y);
9590 if (tz > 0)
9591 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9592 return x;
9593 }
9594
9595 /* Return true when T is an address and is known to be nonzero.
9596 For floating point we further ensure that T is not denormal.
9597 Similar logic is present in nonzero_address in rtlanal.h.
9598
9599 If the return value is based on the assumption that signed overflow
9600 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9601 change *STRICT_OVERFLOW_P. */
9602
9603 static bool
9604 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9605 {
9606 tree type = TREE_TYPE (t);
9607 enum tree_code code;
9608
9609 /* Doing something useful for floating point would need more work. */
9610 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9611 return false;
9612
9613 code = TREE_CODE (t);
9614 switch (TREE_CODE_CLASS (code))
9615 {
9616 case tcc_unary:
9617 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9618 strict_overflow_p);
9619 case tcc_binary:
9620 case tcc_comparison:
9621 return tree_binary_nonzero_warnv_p (code, type,
9622 TREE_OPERAND (t, 0),
9623 TREE_OPERAND (t, 1),
9624 strict_overflow_p);
9625 case tcc_constant:
9626 case tcc_declaration:
9627 case tcc_reference:
9628 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9629
9630 default:
9631 break;
9632 }
9633
9634 switch (code)
9635 {
9636 case TRUTH_NOT_EXPR:
9637 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9638 strict_overflow_p);
9639
9640 case TRUTH_AND_EXPR:
9641 case TRUTH_OR_EXPR:
9642 case TRUTH_XOR_EXPR:
9643 return tree_binary_nonzero_warnv_p (code, type,
9644 TREE_OPERAND (t, 0),
9645 TREE_OPERAND (t, 1),
9646 strict_overflow_p);
9647
9648 case COND_EXPR:
9649 case CONSTRUCTOR:
9650 case OBJ_TYPE_REF:
9651 case ASSERT_EXPR:
9652 case ADDR_EXPR:
9653 case WITH_SIZE_EXPR:
9654 case SSA_NAME:
9655 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9656
9657 case COMPOUND_EXPR:
9658 case MODIFY_EXPR:
9659 case BIND_EXPR:
9660 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9661 strict_overflow_p);
9662
9663 case SAVE_EXPR:
9664 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9665 strict_overflow_p);
9666
9667 case CALL_EXPR:
9668 {
9669 tree fndecl = get_callee_fndecl (t);
9670 if (!fndecl) return false;
9671 if (flag_delete_null_pointer_checks && !flag_check_new
9672 && DECL_IS_OPERATOR_NEW (fndecl)
9673 && !TREE_NOTHROW (fndecl))
9674 return true;
9675 if (flag_delete_null_pointer_checks
9676 && lookup_attribute ("returns_nonnull",
9677 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9678 return true;
9679 return alloca_call_p (t);
9680 }
9681
9682 default:
9683 break;
9684 }
9685 return false;
9686 }
9687
9688 /* Return true when T is an address and is known to be nonzero.
9689 Handle warnings about undefined signed overflow. */
9690
9691 static bool
9692 tree_expr_nonzero_p (tree t)
9693 {
9694 bool ret, strict_overflow_p;
9695
9696 strict_overflow_p = false;
9697 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9698 if (strict_overflow_p)
9699 fold_overflow_warning (("assuming signed overflow does not occur when "
9700 "determining that expression is always "
9701 "non-zero"),
9702 WARN_STRICT_OVERFLOW_MISC);
9703 return ret;
9704 }
9705
9706 /* Fold a binary expression of code CODE and type TYPE with operands
9707 OP0 and OP1. LOC is the location of the resulting expression.
9708 Return the folded expression if folding is successful. Otherwise,
9709 return NULL_TREE. */
9710
9711 tree
9712 fold_binary_loc (location_t loc,
9713 enum tree_code code, tree type, tree op0, tree op1)
9714 {
9715 enum tree_code_class kind = TREE_CODE_CLASS (code);
9716 tree arg0, arg1, tem;
9717 tree t1 = NULL_TREE;
9718 bool strict_overflow_p;
9719 unsigned int prec;
9720
9721 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9722 && TREE_CODE_LENGTH (code) == 2
9723 && op0 != NULL_TREE
9724 && op1 != NULL_TREE);
9725
9726 arg0 = op0;
9727 arg1 = op1;
9728
9729 /* Strip any conversions that don't change the mode. This is
9730 safe for every expression, except for a comparison expression
9731 because its signedness is derived from its operands. So, in
9732 the latter case, only strip conversions that don't change the
9733 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9734 preserved.
9735
9736 Note that this is done as an internal manipulation within the
9737 constant folder, in order to find the simplest representation
9738 of the arguments so that their form can be studied. In any
9739 cases, the appropriate type conversions should be put back in
9740 the tree that will get out of the constant folder. */
9741
9742 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9743 {
9744 STRIP_SIGN_NOPS (arg0);
9745 STRIP_SIGN_NOPS (arg1);
9746 }
9747 else
9748 {
9749 STRIP_NOPS (arg0);
9750 STRIP_NOPS (arg1);
9751 }
9752
9753 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9754 constant but we can't do arithmetic on them. */
9755 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9756 {
9757 if (kind == tcc_binary)
9758 {
9759 /* Make sure type and arg0 have the same saturating flag. */
9760 gcc_checking_assert (TYPE_SATURATING (type)
9761 == TYPE_SATURATING (TREE_TYPE (arg0)));
9762 tem = const_binop (code, arg0, arg1);
9763 }
9764 else if (kind == tcc_comparison)
9765 tem = fold_relational_const (code, type, arg0, arg1);
9766 else
9767 tem = NULL_TREE;
9768
9769 if (tem != NULL_TREE)
9770 {
9771 if (TREE_TYPE (tem) != type)
9772 tem = fold_convert_loc (loc, type, tem);
9773 return tem;
9774 }
9775 }
9776
9777 /* If this is a commutative operation, and ARG0 is a constant, move it
9778 to ARG1 to reduce the number of tests below. */
9779 if (commutative_tree_code (code)
9780 && tree_swap_operands_p (arg0, arg1, true))
9781 return fold_build2_loc (loc, code, type, op1, op0);
9782
9783 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9784 to ARG1 to reduce the number of tests below. */
9785 if (kind == tcc_comparison
9786 && tree_swap_operands_p (arg0, arg1, true))
9787 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9788
9789 tem = generic_simplify (loc, code, type, op0, op1);
9790 if (tem)
9791 return tem;
9792
9793 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9794
9795 First check for cases where an arithmetic operation is applied to a
9796 compound, conditional, or comparison operation. Push the arithmetic
9797 operation inside the compound or conditional to see if any folding
9798 can then be done. Convert comparison to conditional for this purpose.
9799 The also optimizes non-constant cases that used to be done in
9800 expand_expr.
9801
9802 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9803 one of the operands is a comparison and the other is a comparison, a
9804 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9805 code below would make the expression more complex. Change it to a
9806 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9807 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9808
9809 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9810 || code == EQ_EXPR || code == NE_EXPR)
9811 && TREE_CODE (type) != VECTOR_TYPE
9812 && ((truth_value_p (TREE_CODE (arg0))
9813 && (truth_value_p (TREE_CODE (arg1))
9814 || (TREE_CODE (arg1) == BIT_AND_EXPR
9815 && integer_onep (TREE_OPERAND (arg1, 1)))))
9816 || (truth_value_p (TREE_CODE (arg1))
9817 && (truth_value_p (TREE_CODE (arg0))
9818 || (TREE_CODE (arg0) == BIT_AND_EXPR
9819 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9820 {
9821 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9822 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9823 : TRUTH_XOR_EXPR,
9824 boolean_type_node,
9825 fold_convert_loc (loc, boolean_type_node, arg0),
9826 fold_convert_loc (loc, boolean_type_node, arg1));
9827
9828 if (code == EQ_EXPR)
9829 tem = invert_truthvalue_loc (loc, tem);
9830
9831 return fold_convert_loc (loc, type, tem);
9832 }
9833
9834 if (TREE_CODE_CLASS (code) == tcc_binary
9835 || TREE_CODE_CLASS (code) == tcc_comparison)
9836 {
9837 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9838 {
9839 tem = fold_build2_loc (loc, code, type,
9840 fold_convert_loc (loc, TREE_TYPE (op0),
9841 TREE_OPERAND (arg0, 1)), op1);
9842 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9843 tem);
9844 }
9845 if (TREE_CODE (arg1) == COMPOUND_EXPR
9846 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9847 {
9848 tem = fold_build2_loc (loc, code, type, op0,
9849 fold_convert_loc (loc, TREE_TYPE (op1),
9850 TREE_OPERAND (arg1, 1)));
9851 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9852 tem);
9853 }
9854
9855 if (TREE_CODE (arg0) == COND_EXPR
9856 || TREE_CODE (arg0) == VEC_COND_EXPR
9857 || COMPARISON_CLASS_P (arg0))
9858 {
9859 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9860 arg0, arg1,
9861 /*cond_first_p=*/1);
9862 if (tem != NULL_TREE)
9863 return tem;
9864 }
9865
9866 if (TREE_CODE (arg1) == COND_EXPR
9867 || TREE_CODE (arg1) == VEC_COND_EXPR
9868 || COMPARISON_CLASS_P (arg1))
9869 {
9870 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9871 arg1, arg0,
9872 /*cond_first_p=*/0);
9873 if (tem != NULL_TREE)
9874 return tem;
9875 }
9876 }
9877
9878 switch (code)
9879 {
9880 case MEM_REF:
9881 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9882 if (TREE_CODE (arg0) == ADDR_EXPR
9883 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9884 {
9885 tree iref = TREE_OPERAND (arg0, 0);
9886 return fold_build2 (MEM_REF, type,
9887 TREE_OPERAND (iref, 0),
9888 int_const_binop (PLUS_EXPR, arg1,
9889 TREE_OPERAND (iref, 1)));
9890 }
9891
9892 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9893 if (TREE_CODE (arg0) == ADDR_EXPR
9894 && handled_component_p (TREE_OPERAND (arg0, 0)))
9895 {
9896 tree base;
9897 HOST_WIDE_INT coffset;
9898 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9899 &coffset);
9900 if (!base)
9901 return NULL_TREE;
9902 return fold_build2 (MEM_REF, type,
9903 build_fold_addr_expr (base),
9904 int_const_binop (PLUS_EXPR, arg1,
9905 size_int (coffset)));
9906 }
9907
9908 return NULL_TREE;
9909
9910 case POINTER_PLUS_EXPR:
9911 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9912 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9913 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9914 return fold_convert_loc (loc, type,
9915 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9916 fold_convert_loc (loc, sizetype,
9917 arg1),
9918 fold_convert_loc (loc, sizetype,
9919 arg0)));
9920
9921 /* PTR_CST +p CST -> CST1 */
9922 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9923 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9924 fold_convert_loc (loc, type, arg1));
9925
9926 return NULL_TREE;
9927
9928 case PLUS_EXPR:
9929 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9930 {
9931 /* X + (X / CST) * -CST is X % CST. */
9932 if (TREE_CODE (arg1) == MULT_EXPR
9933 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9934 && operand_equal_p (arg0,
9935 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9936 {
9937 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9938 tree cst1 = TREE_OPERAND (arg1, 1);
9939 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9940 cst1, cst0);
9941 if (sum && integer_zerop (sum))
9942 return fold_convert_loc (loc, type,
9943 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9944 TREE_TYPE (arg0), arg0,
9945 cst0));
9946 }
9947 }
9948
9949 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9950 one. Make sure the type is not saturating and has the signedness of
9951 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9952 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9953 if ((TREE_CODE (arg0) == MULT_EXPR
9954 || TREE_CODE (arg1) == MULT_EXPR)
9955 && !TYPE_SATURATING (type)
9956 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9957 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9958 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9959 {
9960 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9961 if (tem)
9962 return tem;
9963 }
9964
9965 if (! FLOAT_TYPE_P (type))
9966 {
9967 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9968 with a constant, and the two constants have no bits in common,
9969 we should treat this as a BIT_IOR_EXPR since this may produce more
9970 simplifications. */
9971 if (TREE_CODE (arg0) == BIT_AND_EXPR
9972 && TREE_CODE (arg1) == BIT_AND_EXPR
9973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9974 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9975 && wi::bit_and (TREE_OPERAND (arg0, 1),
9976 TREE_OPERAND (arg1, 1)) == 0)
9977 {
9978 code = BIT_IOR_EXPR;
9979 goto bit_ior;
9980 }
9981
9982 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9983 (plus (plus (mult) (mult)) (foo)) so that we can
9984 take advantage of the factoring cases below. */
9985 if (TYPE_OVERFLOW_WRAPS (type)
9986 && (((TREE_CODE (arg0) == PLUS_EXPR
9987 || TREE_CODE (arg0) == MINUS_EXPR)
9988 && TREE_CODE (arg1) == MULT_EXPR)
9989 || ((TREE_CODE (arg1) == PLUS_EXPR
9990 || TREE_CODE (arg1) == MINUS_EXPR)
9991 && TREE_CODE (arg0) == MULT_EXPR)))
9992 {
9993 tree parg0, parg1, parg, marg;
9994 enum tree_code pcode;
9995
9996 if (TREE_CODE (arg1) == MULT_EXPR)
9997 parg = arg0, marg = arg1;
9998 else
9999 parg = arg1, marg = arg0;
10000 pcode = TREE_CODE (parg);
10001 parg0 = TREE_OPERAND (parg, 0);
10002 parg1 = TREE_OPERAND (parg, 1);
10003 STRIP_NOPS (parg0);
10004 STRIP_NOPS (parg1);
10005
10006 if (TREE_CODE (parg0) == MULT_EXPR
10007 && TREE_CODE (parg1) != MULT_EXPR)
10008 return fold_build2_loc (loc, pcode, type,
10009 fold_build2_loc (loc, PLUS_EXPR, type,
10010 fold_convert_loc (loc, type,
10011 parg0),
10012 fold_convert_loc (loc, type,
10013 marg)),
10014 fold_convert_loc (loc, type, parg1));
10015 if (TREE_CODE (parg0) != MULT_EXPR
10016 && TREE_CODE (parg1) == MULT_EXPR)
10017 return
10018 fold_build2_loc (loc, PLUS_EXPR, type,
10019 fold_convert_loc (loc, type, parg0),
10020 fold_build2_loc (loc, pcode, type,
10021 fold_convert_loc (loc, type, marg),
10022 fold_convert_loc (loc, type,
10023 parg1)));
10024 }
10025 }
10026 else
10027 {
10028 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10029 to __complex__ ( x, y ). This is not the same for SNaNs or
10030 if signed zeros are involved. */
10031 if (!HONOR_SNANS (element_mode (arg0))
10032 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10033 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10034 {
10035 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10036 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10037 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10038 bool arg0rz = false, arg0iz = false;
10039 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10040 || (arg0i && (arg0iz = real_zerop (arg0i))))
10041 {
10042 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10043 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10044 if (arg0rz && arg1i && real_zerop (arg1i))
10045 {
10046 tree rp = arg1r ? arg1r
10047 : build1 (REALPART_EXPR, rtype, arg1);
10048 tree ip = arg0i ? arg0i
10049 : build1 (IMAGPART_EXPR, rtype, arg0);
10050 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10051 }
10052 else if (arg0iz && arg1r && real_zerop (arg1r))
10053 {
10054 tree rp = arg0r ? arg0r
10055 : build1 (REALPART_EXPR, rtype, arg0);
10056 tree ip = arg1i ? arg1i
10057 : build1 (IMAGPART_EXPR, rtype, arg1);
10058 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10059 }
10060 }
10061 }
10062
10063 if (flag_unsafe_math_optimizations
10064 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10065 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10066 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10067 return tem;
10068
10069 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10070 We associate floats only if the user has specified
10071 -fassociative-math. */
10072 if (flag_associative_math
10073 && TREE_CODE (arg1) == PLUS_EXPR
10074 && TREE_CODE (arg0) != MULT_EXPR)
10075 {
10076 tree tree10 = TREE_OPERAND (arg1, 0);
10077 tree tree11 = TREE_OPERAND (arg1, 1);
10078 if (TREE_CODE (tree11) == MULT_EXPR
10079 && TREE_CODE (tree10) == MULT_EXPR)
10080 {
10081 tree tree0;
10082 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10083 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10084 }
10085 }
10086 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10087 We associate floats only if the user has specified
10088 -fassociative-math. */
10089 if (flag_associative_math
10090 && TREE_CODE (arg0) == PLUS_EXPR
10091 && TREE_CODE (arg1) != MULT_EXPR)
10092 {
10093 tree tree00 = TREE_OPERAND (arg0, 0);
10094 tree tree01 = TREE_OPERAND (arg0, 1);
10095 if (TREE_CODE (tree01) == MULT_EXPR
10096 && TREE_CODE (tree00) == MULT_EXPR)
10097 {
10098 tree tree0;
10099 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10100 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10101 }
10102 }
10103 }
10104
10105 bit_rotate:
10106 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10107 is a rotate of A by C1 bits. */
10108 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10109 is a rotate of A by B bits. */
10110 {
10111 enum tree_code code0, code1;
10112 tree rtype;
10113 code0 = TREE_CODE (arg0);
10114 code1 = TREE_CODE (arg1);
10115 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10116 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10117 && operand_equal_p (TREE_OPERAND (arg0, 0),
10118 TREE_OPERAND (arg1, 0), 0)
10119 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10120 TYPE_UNSIGNED (rtype))
10121 /* Only create rotates in complete modes. Other cases are not
10122 expanded properly. */
10123 && (element_precision (rtype)
10124 == element_precision (TYPE_MODE (rtype))))
10125 {
10126 tree tree01, tree11;
10127 enum tree_code code01, code11;
10128
10129 tree01 = TREE_OPERAND (arg0, 1);
10130 tree11 = TREE_OPERAND (arg1, 1);
10131 STRIP_NOPS (tree01);
10132 STRIP_NOPS (tree11);
10133 code01 = TREE_CODE (tree01);
10134 code11 = TREE_CODE (tree11);
10135 if (code01 == INTEGER_CST
10136 && code11 == INTEGER_CST
10137 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10138 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10139 {
10140 tem = build2_loc (loc, LROTATE_EXPR,
10141 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10142 TREE_OPERAND (arg0, 0),
10143 code0 == LSHIFT_EXPR ? tree01 : tree11);
10144 return fold_convert_loc (loc, type, tem);
10145 }
10146 else if (code11 == MINUS_EXPR)
10147 {
10148 tree tree110, tree111;
10149 tree110 = TREE_OPERAND (tree11, 0);
10150 tree111 = TREE_OPERAND (tree11, 1);
10151 STRIP_NOPS (tree110);
10152 STRIP_NOPS (tree111);
10153 if (TREE_CODE (tree110) == INTEGER_CST
10154 && 0 == compare_tree_int (tree110,
10155 element_precision
10156 (TREE_TYPE (TREE_OPERAND
10157 (arg0, 0))))
10158 && operand_equal_p (tree01, tree111, 0))
10159 return
10160 fold_convert_loc (loc, type,
10161 build2 ((code0 == LSHIFT_EXPR
10162 ? LROTATE_EXPR
10163 : RROTATE_EXPR),
10164 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10165 TREE_OPERAND (arg0, 0), tree01));
10166 }
10167 else if (code01 == MINUS_EXPR)
10168 {
10169 tree tree010, tree011;
10170 tree010 = TREE_OPERAND (tree01, 0);
10171 tree011 = TREE_OPERAND (tree01, 1);
10172 STRIP_NOPS (tree010);
10173 STRIP_NOPS (tree011);
10174 if (TREE_CODE (tree010) == INTEGER_CST
10175 && 0 == compare_tree_int (tree010,
10176 element_precision
10177 (TREE_TYPE (TREE_OPERAND
10178 (arg0, 0))))
10179 && operand_equal_p (tree11, tree011, 0))
10180 return fold_convert_loc
10181 (loc, type,
10182 build2 ((code0 != LSHIFT_EXPR
10183 ? LROTATE_EXPR
10184 : RROTATE_EXPR),
10185 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10186 TREE_OPERAND (arg0, 0), tree11));
10187 }
10188 }
10189 }
10190
10191 associate:
10192 /* In most languages, can't associate operations on floats through
10193 parentheses. Rather than remember where the parentheses were, we
10194 don't associate floats at all, unless the user has specified
10195 -fassociative-math.
10196 And, we need to make sure type is not saturating. */
10197
10198 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10199 && !TYPE_SATURATING (type))
10200 {
10201 tree var0, con0, lit0, minus_lit0;
10202 tree var1, con1, lit1, minus_lit1;
10203 tree atype = type;
10204 bool ok = true;
10205
10206 /* Split both trees into variables, constants, and literals. Then
10207 associate each group together, the constants with literals,
10208 then the result with variables. This increases the chances of
10209 literals being recombined later and of generating relocatable
10210 expressions for the sum of a constant and literal. */
10211 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10212 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10213 code == MINUS_EXPR);
10214
10215 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10216 if (code == MINUS_EXPR)
10217 code = PLUS_EXPR;
10218
10219 /* With undefined overflow prefer doing association in a type
10220 which wraps on overflow, if that is one of the operand types. */
10221 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10222 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10223 {
10224 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10225 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10226 atype = TREE_TYPE (arg0);
10227 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10228 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10229 atype = TREE_TYPE (arg1);
10230 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10231 }
10232
10233 /* With undefined overflow we can only associate constants with one
10234 variable, and constants whose association doesn't overflow. */
10235 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10236 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10237 {
10238 if (var0 && var1)
10239 {
10240 tree tmp0 = var0;
10241 tree tmp1 = var1;
10242
10243 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10244 tmp0 = TREE_OPERAND (tmp0, 0);
10245 if (CONVERT_EXPR_P (tmp0)
10246 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10247 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10248 <= TYPE_PRECISION (atype)))
10249 tmp0 = TREE_OPERAND (tmp0, 0);
10250 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10251 tmp1 = TREE_OPERAND (tmp1, 0);
10252 if (CONVERT_EXPR_P (tmp1)
10253 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10254 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10255 <= TYPE_PRECISION (atype)))
10256 tmp1 = TREE_OPERAND (tmp1, 0);
10257 /* The only case we can still associate with two variables
10258 is if they are the same, modulo negation and bit-pattern
10259 preserving conversions. */
10260 if (!operand_equal_p (tmp0, tmp1, 0))
10261 ok = false;
10262 }
10263 }
10264
10265 /* Only do something if we found more than two objects. Otherwise,
10266 nothing has changed and we risk infinite recursion. */
10267 if (ok
10268 && (2 < ((var0 != 0) + (var1 != 0)
10269 + (con0 != 0) + (con1 != 0)
10270 + (lit0 != 0) + (lit1 != 0)
10271 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10272 {
10273 bool any_overflows = false;
10274 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10275 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10276 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10277 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10278 var0 = associate_trees (loc, var0, var1, code, atype);
10279 con0 = associate_trees (loc, con0, con1, code, atype);
10280 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10281 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10282 code, atype);
10283
10284 /* Preserve the MINUS_EXPR if the negative part of the literal is
10285 greater than the positive part. Otherwise, the multiplicative
10286 folding code (i.e extract_muldiv) may be fooled in case
10287 unsigned constants are subtracted, like in the following
10288 example: ((X*2 + 4) - 8U)/2. */
10289 if (minus_lit0 && lit0)
10290 {
10291 if (TREE_CODE (lit0) == INTEGER_CST
10292 && TREE_CODE (minus_lit0) == INTEGER_CST
10293 && tree_int_cst_lt (lit0, minus_lit0))
10294 {
10295 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10296 MINUS_EXPR, atype);
10297 lit0 = 0;
10298 }
10299 else
10300 {
10301 lit0 = associate_trees (loc, lit0, minus_lit0,
10302 MINUS_EXPR, atype);
10303 minus_lit0 = 0;
10304 }
10305 }
10306
10307 /* Don't introduce overflows through reassociation. */
10308 if (!any_overflows
10309 && ((lit0 && TREE_OVERFLOW (lit0))
10310 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10311 return NULL_TREE;
10312
10313 if (minus_lit0)
10314 {
10315 if (con0 == 0)
10316 return
10317 fold_convert_loc (loc, type,
10318 associate_trees (loc, var0, minus_lit0,
10319 MINUS_EXPR, atype));
10320 else
10321 {
10322 con0 = associate_trees (loc, con0, minus_lit0,
10323 MINUS_EXPR, atype);
10324 return
10325 fold_convert_loc (loc, type,
10326 associate_trees (loc, var0, con0,
10327 PLUS_EXPR, atype));
10328 }
10329 }
10330
10331 con0 = associate_trees (loc, con0, lit0, code, atype);
10332 return
10333 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10334 code, atype));
10335 }
10336 }
10337
10338 return NULL_TREE;
10339
10340 case MINUS_EXPR:
10341 /* Pointer simplifications for subtraction, simple reassociations. */
10342 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10343 {
10344 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10345 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10346 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10347 {
10348 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10349 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10350 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10351 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10352 return fold_build2_loc (loc, PLUS_EXPR, type,
10353 fold_build2_loc (loc, MINUS_EXPR, type,
10354 arg00, arg10),
10355 fold_build2_loc (loc, MINUS_EXPR, type,
10356 arg01, arg11));
10357 }
10358 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10359 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10360 {
10361 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10362 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10363 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10364 fold_convert_loc (loc, type, arg1));
10365 if (tmp)
10366 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10367 }
10368 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10369 simplifies. */
10370 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10371 {
10372 tree arg10 = fold_convert_loc (loc, type,
10373 TREE_OPERAND (arg1, 0));
10374 tree arg11 = fold_convert_loc (loc, type,
10375 TREE_OPERAND (arg1, 1));
10376 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10377 fold_convert_loc (loc, type, arg0),
10378 arg10);
10379 if (tmp)
10380 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10381 }
10382 }
10383 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10384 if (TREE_CODE (arg0) == NEGATE_EXPR
10385 && negate_expr_p (arg1)
10386 && reorder_operands_p (arg0, arg1))
10387 return fold_build2_loc (loc, MINUS_EXPR, type,
10388 fold_convert_loc (loc, type,
10389 negate_expr (arg1)),
10390 fold_convert_loc (loc, type,
10391 TREE_OPERAND (arg0, 0)));
10392 /* Convert -A - 1 to ~A. */
10393 if (TREE_CODE (arg0) == NEGATE_EXPR
10394 && integer_each_onep (arg1)
10395 && !TYPE_OVERFLOW_TRAPS (type))
10396 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10397 fold_convert_loc (loc, type,
10398 TREE_OPERAND (arg0, 0)));
10399
10400 /* Convert -1 - A to ~A. */
10401 if (TREE_CODE (type) != COMPLEX_TYPE
10402 && integer_all_onesp (arg0))
10403 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10404
10405
10406 /* X - (X / Y) * Y is X % Y. */
10407 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10408 && TREE_CODE (arg1) == MULT_EXPR
10409 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10410 && operand_equal_p (arg0,
10411 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10412 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10413 TREE_OPERAND (arg1, 1), 0))
10414 return
10415 fold_convert_loc (loc, type,
10416 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10417 arg0, TREE_OPERAND (arg1, 1)));
10418
10419 if (! FLOAT_TYPE_P (type))
10420 {
10421 /* Fold A - (A & B) into ~B & A. */
10422 if (!TREE_SIDE_EFFECTS (arg0)
10423 && TREE_CODE (arg1) == BIT_AND_EXPR)
10424 {
10425 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10426 {
10427 tree arg10 = fold_convert_loc (loc, type,
10428 TREE_OPERAND (arg1, 0));
10429 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10430 fold_build1_loc (loc, BIT_NOT_EXPR,
10431 type, arg10),
10432 fold_convert_loc (loc, type, arg0));
10433 }
10434 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10435 {
10436 tree arg11 = fold_convert_loc (loc,
10437 type, TREE_OPERAND (arg1, 1));
10438 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10439 fold_build1_loc (loc, BIT_NOT_EXPR,
10440 type, arg11),
10441 fold_convert_loc (loc, type, arg0));
10442 }
10443 }
10444
10445 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10446 any power of 2 minus 1. */
10447 if (TREE_CODE (arg0) == BIT_AND_EXPR
10448 && TREE_CODE (arg1) == BIT_AND_EXPR
10449 && operand_equal_p (TREE_OPERAND (arg0, 0),
10450 TREE_OPERAND (arg1, 0), 0))
10451 {
10452 tree mask0 = TREE_OPERAND (arg0, 1);
10453 tree mask1 = TREE_OPERAND (arg1, 1);
10454 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10455
10456 if (operand_equal_p (tem, mask1, 0))
10457 {
10458 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10459 TREE_OPERAND (arg0, 0), mask1);
10460 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10461 }
10462 }
10463 }
10464
10465 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10466 __complex__ ( x, -y ). This is not the same for SNaNs or if
10467 signed zeros are involved. */
10468 if (!HONOR_SNANS (element_mode (arg0))
10469 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10470 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10471 {
10472 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10473 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10474 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10475 bool arg0rz = false, arg0iz = false;
10476 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10477 || (arg0i && (arg0iz = real_zerop (arg0i))))
10478 {
10479 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10480 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10481 if (arg0rz && arg1i && real_zerop (arg1i))
10482 {
10483 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10484 arg1r ? arg1r
10485 : build1 (REALPART_EXPR, rtype, arg1));
10486 tree ip = arg0i ? arg0i
10487 : build1 (IMAGPART_EXPR, rtype, arg0);
10488 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10489 }
10490 else if (arg0iz && arg1r && real_zerop (arg1r))
10491 {
10492 tree rp = arg0r ? arg0r
10493 : build1 (REALPART_EXPR, rtype, arg0);
10494 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10495 arg1i ? arg1i
10496 : build1 (IMAGPART_EXPR, rtype, arg1));
10497 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10498 }
10499 }
10500 }
10501
10502 /* A - B -> A + (-B) if B is easily negatable. */
10503 if (negate_expr_p (arg1)
10504 && !TYPE_OVERFLOW_SANITIZED (type)
10505 && ((FLOAT_TYPE_P (type)
10506 /* Avoid this transformation if B is a positive REAL_CST. */
10507 && (TREE_CODE (arg1) != REAL_CST
10508 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10509 || INTEGRAL_TYPE_P (type)))
10510 return fold_build2_loc (loc, PLUS_EXPR, type,
10511 fold_convert_loc (loc, type, arg0),
10512 fold_convert_loc (loc, type,
10513 negate_expr (arg1)));
10514
10515 /* Try folding difference of addresses. */
10516 {
10517 HOST_WIDE_INT diff;
10518
10519 if ((TREE_CODE (arg0) == ADDR_EXPR
10520 || TREE_CODE (arg1) == ADDR_EXPR)
10521 && ptr_difference_const (arg0, arg1, &diff))
10522 return build_int_cst_type (type, diff);
10523 }
10524
10525 /* Fold &a[i] - &a[j] to i-j. */
10526 if (TREE_CODE (arg0) == ADDR_EXPR
10527 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10528 && TREE_CODE (arg1) == ADDR_EXPR
10529 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10530 {
10531 tree tem = fold_addr_of_array_ref_difference (loc, type,
10532 TREE_OPERAND (arg0, 0),
10533 TREE_OPERAND (arg1, 0));
10534 if (tem)
10535 return tem;
10536 }
10537
10538 if (FLOAT_TYPE_P (type)
10539 && flag_unsafe_math_optimizations
10540 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10541 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10542 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10543 return tem;
10544
10545 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10546 one. Make sure the type is not saturating and has the signedness of
10547 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10548 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10549 if ((TREE_CODE (arg0) == MULT_EXPR
10550 || TREE_CODE (arg1) == MULT_EXPR)
10551 && !TYPE_SATURATING (type)
10552 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10553 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10554 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10555 {
10556 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10557 if (tem)
10558 return tem;
10559 }
10560
10561 goto associate;
10562
10563 case MULT_EXPR:
10564 /* (-A) * (-B) -> A * B */
10565 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10566 return fold_build2_loc (loc, MULT_EXPR, type,
10567 fold_convert_loc (loc, type,
10568 TREE_OPERAND (arg0, 0)),
10569 fold_convert_loc (loc, type,
10570 negate_expr (arg1)));
10571 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10572 return fold_build2_loc (loc, MULT_EXPR, type,
10573 fold_convert_loc (loc, type,
10574 negate_expr (arg0)),
10575 fold_convert_loc (loc, type,
10576 TREE_OPERAND (arg1, 0)));
10577
10578 if (! FLOAT_TYPE_P (type))
10579 {
10580 /* Transform x * -C into -x * C if x is easily negatable. */
10581 if (TREE_CODE (arg1) == INTEGER_CST
10582 && tree_int_cst_sgn (arg1) == -1
10583 && negate_expr_p (arg0)
10584 && (tem = negate_expr (arg1)) != arg1
10585 && !TREE_OVERFLOW (tem))
10586 return fold_build2_loc (loc, MULT_EXPR, type,
10587 fold_convert_loc (loc, type,
10588 negate_expr (arg0)),
10589 tem);
10590
10591 /* (a * (1 << b)) is (a << b) */
10592 if (TREE_CODE (arg1) == LSHIFT_EXPR
10593 && integer_onep (TREE_OPERAND (arg1, 0)))
10594 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10595 TREE_OPERAND (arg1, 1));
10596 if (TREE_CODE (arg0) == LSHIFT_EXPR
10597 && integer_onep (TREE_OPERAND (arg0, 0)))
10598 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10599 TREE_OPERAND (arg0, 1));
10600
10601 /* (A + A) * C -> A * 2 * C */
10602 if (TREE_CODE (arg0) == PLUS_EXPR
10603 && TREE_CODE (arg1) == INTEGER_CST
10604 && operand_equal_p (TREE_OPERAND (arg0, 0),
10605 TREE_OPERAND (arg0, 1), 0))
10606 return fold_build2_loc (loc, MULT_EXPR, type,
10607 omit_one_operand_loc (loc, type,
10608 TREE_OPERAND (arg0, 0),
10609 TREE_OPERAND (arg0, 1)),
10610 fold_build2_loc (loc, MULT_EXPR, type,
10611 build_int_cst (type, 2) , arg1));
10612
10613 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10614 sign-changing only. */
10615 if (TREE_CODE (arg1) == INTEGER_CST
10616 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10617 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10618 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10619
10620 strict_overflow_p = false;
10621 if (TREE_CODE (arg1) == INTEGER_CST
10622 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10623 &strict_overflow_p)))
10624 {
10625 if (strict_overflow_p)
10626 fold_overflow_warning (("assuming signed overflow does not "
10627 "occur when simplifying "
10628 "multiplication"),
10629 WARN_STRICT_OVERFLOW_MISC);
10630 return fold_convert_loc (loc, type, tem);
10631 }
10632
10633 /* Optimize z * conj(z) for integer complex numbers. */
10634 if (TREE_CODE (arg0) == CONJ_EXPR
10635 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10636 return fold_mult_zconjz (loc, type, arg1);
10637 if (TREE_CODE (arg1) == CONJ_EXPR
10638 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10639 return fold_mult_zconjz (loc, type, arg0);
10640 }
10641 else
10642 {
10643 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10644 the result for floating point types due to rounding so it is applied
10645 only if -fassociative-math was specify. */
10646 if (flag_associative_math
10647 && TREE_CODE (arg0) == RDIV_EXPR
10648 && TREE_CODE (arg1) == REAL_CST
10649 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10650 {
10651 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10652 arg1);
10653 if (tem)
10654 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10655 TREE_OPERAND (arg0, 1));
10656 }
10657
10658 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10659 if (operand_equal_p (arg0, arg1, 0))
10660 {
10661 tree tem = fold_strip_sign_ops (arg0);
10662 if (tem != NULL_TREE)
10663 {
10664 tem = fold_convert_loc (loc, type, tem);
10665 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10666 }
10667 }
10668
10669 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10670 This is not the same for NaNs or if signed zeros are
10671 involved. */
10672 if (!HONOR_NANS (element_mode (arg0))
10673 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10674 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10675 && TREE_CODE (arg1) == COMPLEX_CST
10676 && real_zerop (TREE_REALPART (arg1)))
10677 {
10678 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10679 if (real_onep (TREE_IMAGPART (arg1)))
10680 return
10681 fold_build2_loc (loc, COMPLEX_EXPR, type,
10682 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10683 rtype, arg0)),
10684 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10685 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10686 return
10687 fold_build2_loc (loc, COMPLEX_EXPR, type,
10688 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10689 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10690 rtype, arg0)));
10691 }
10692
10693 /* Optimize z * conj(z) for floating point complex numbers.
10694 Guarded by flag_unsafe_math_optimizations as non-finite
10695 imaginary components don't produce scalar results. */
10696 if (flag_unsafe_math_optimizations
10697 && TREE_CODE (arg0) == CONJ_EXPR
10698 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10699 return fold_mult_zconjz (loc, type, arg1);
10700 if (flag_unsafe_math_optimizations
10701 && TREE_CODE (arg1) == CONJ_EXPR
10702 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10703 return fold_mult_zconjz (loc, type, arg0);
10704
10705 if (flag_unsafe_math_optimizations)
10706 {
10707 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10708 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10709
10710 /* Optimizations of root(...)*root(...). */
10711 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10712 {
10713 tree rootfn, arg;
10714 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10715 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10716
10717 /* Optimize sqrt(x)*sqrt(x) as x. */
10718 if (BUILTIN_SQRT_P (fcode0)
10719 && operand_equal_p (arg00, arg10, 0)
10720 && ! HONOR_SNANS (element_mode (type)))
10721 return arg00;
10722
10723 /* Optimize root(x)*root(y) as root(x*y). */
10724 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10725 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10726 return build_call_expr_loc (loc, rootfn, 1, arg);
10727 }
10728
10729 /* Optimize expN(x)*expN(y) as expN(x+y). */
10730 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10731 {
10732 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10733 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10734 CALL_EXPR_ARG (arg0, 0),
10735 CALL_EXPR_ARG (arg1, 0));
10736 return build_call_expr_loc (loc, expfn, 1, arg);
10737 }
10738
10739 /* Optimizations of pow(...)*pow(...). */
10740 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10741 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10742 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10743 {
10744 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10745 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10746 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10747 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10748
10749 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10750 if (operand_equal_p (arg01, arg11, 0))
10751 {
10752 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10753 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10754 arg00, arg10);
10755 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10756 }
10757
10758 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10759 if (operand_equal_p (arg00, arg10, 0))
10760 {
10761 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10762 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10763 arg01, arg11);
10764 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10765 }
10766 }
10767
10768 /* Optimize tan(x)*cos(x) as sin(x). */
10769 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10770 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10771 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10772 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10773 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10774 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10775 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10776 CALL_EXPR_ARG (arg1, 0), 0))
10777 {
10778 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10779
10780 if (sinfn != NULL_TREE)
10781 return build_call_expr_loc (loc, sinfn, 1,
10782 CALL_EXPR_ARG (arg0, 0));
10783 }
10784
10785 /* Optimize x*pow(x,c) as pow(x,c+1). */
10786 if (fcode1 == BUILT_IN_POW
10787 || fcode1 == BUILT_IN_POWF
10788 || fcode1 == BUILT_IN_POWL)
10789 {
10790 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10791 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10792 if (TREE_CODE (arg11) == REAL_CST
10793 && !TREE_OVERFLOW (arg11)
10794 && operand_equal_p (arg0, arg10, 0))
10795 {
10796 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10797 REAL_VALUE_TYPE c;
10798 tree arg;
10799
10800 c = TREE_REAL_CST (arg11);
10801 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10802 arg = build_real (type, c);
10803 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10804 }
10805 }
10806
10807 /* Optimize pow(x,c)*x as pow(x,c+1). */
10808 if (fcode0 == BUILT_IN_POW
10809 || fcode0 == BUILT_IN_POWF
10810 || fcode0 == BUILT_IN_POWL)
10811 {
10812 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10813 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10814 if (TREE_CODE (arg01) == REAL_CST
10815 && !TREE_OVERFLOW (arg01)
10816 && operand_equal_p (arg1, arg00, 0))
10817 {
10818 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10819 REAL_VALUE_TYPE c;
10820 tree arg;
10821
10822 c = TREE_REAL_CST (arg01);
10823 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10824 arg = build_real (type, c);
10825 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10826 }
10827 }
10828
10829 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10830 if (!in_gimple_form
10831 && optimize
10832 && operand_equal_p (arg0, arg1, 0))
10833 {
10834 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10835
10836 if (powfn)
10837 {
10838 tree arg = build_real (type, dconst2);
10839 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10840 }
10841 }
10842 }
10843 }
10844 goto associate;
10845
10846 case BIT_IOR_EXPR:
10847 bit_ior:
10848 /* ~X | X is -1. */
10849 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10850 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10851 {
10852 t1 = build_zero_cst (type);
10853 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10854 return omit_one_operand_loc (loc, type, t1, arg1);
10855 }
10856
10857 /* X | ~X is -1. */
10858 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10859 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10860 {
10861 t1 = build_zero_cst (type);
10862 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10863 return omit_one_operand_loc (loc, type, t1, arg0);
10864 }
10865
10866 /* Canonicalize (X & C1) | C2. */
10867 if (TREE_CODE (arg0) == BIT_AND_EXPR
10868 && TREE_CODE (arg1) == INTEGER_CST
10869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10870 {
10871 int width = TYPE_PRECISION (type), w;
10872 wide_int c1 = TREE_OPERAND (arg0, 1);
10873 wide_int c2 = arg1;
10874
10875 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10876 if ((c1 & c2) == c1)
10877 return omit_one_operand_loc (loc, type, arg1,
10878 TREE_OPERAND (arg0, 0));
10879
10880 wide_int msk = wi::mask (width, false,
10881 TYPE_PRECISION (TREE_TYPE (arg1)));
10882
10883 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10884 if (msk.and_not (c1 | c2) == 0)
10885 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10886 TREE_OPERAND (arg0, 0), arg1);
10887
10888 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10889 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10890 mode which allows further optimizations. */
10891 c1 &= msk;
10892 c2 &= msk;
10893 wide_int c3 = c1.and_not (c2);
10894 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10895 {
10896 wide_int mask = wi::mask (w, false,
10897 TYPE_PRECISION (type));
10898 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10899 {
10900 c3 = mask;
10901 break;
10902 }
10903 }
10904
10905 if (c3 != c1)
10906 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10907 fold_build2_loc (loc, BIT_AND_EXPR, type,
10908 TREE_OPERAND (arg0, 0),
10909 wide_int_to_tree (type,
10910 c3)),
10911 arg1);
10912 }
10913
10914 /* (X & ~Y) | (~X & Y) is X ^ Y */
10915 if (TREE_CODE (arg0) == BIT_AND_EXPR
10916 && TREE_CODE (arg1) == BIT_AND_EXPR)
10917 {
10918 tree a0, a1, l0, l1, n0, n1;
10919
10920 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10921 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10922
10923 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10924 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10925
10926 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10927 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10928
10929 if ((operand_equal_p (n0, a0, 0)
10930 && operand_equal_p (n1, a1, 0))
10931 || (operand_equal_p (n0, a1, 0)
10932 && operand_equal_p (n1, a0, 0)))
10933 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10934 }
10935
10936 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10937 if (t1 != NULL_TREE)
10938 return t1;
10939
10940 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10941
10942 This results in more efficient code for machines without a NAND
10943 instruction. Combine will canonicalize to the first form
10944 which will allow use of NAND instructions provided by the
10945 backend if they exist. */
10946 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10947 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10948 {
10949 return
10950 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10951 build2 (BIT_AND_EXPR, type,
10952 fold_convert_loc (loc, type,
10953 TREE_OPERAND (arg0, 0)),
10954 fold_convert_loc (loc, type,
10955 TREE_OPERAND (arg1, 0))));
10956 }
10957
10958 /* See if this can be simplified into a rotate first. If that
10959 is unsuccessful continue in the association code. */
10960 goto bit_rotate;
10961
10962 case BIT_XOR_EXPR:
10963 /* ~X ^ X is -1. */
10964 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10965 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10966 {
10967 t1 = build_zero_cst (type);
10968 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10969 return omit_one_operand_loc (loc, type, t1, arg1);
10970 }
10971
10972 /* X ^ ~X is -1. */
10973 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10974 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10975 {
10976 t1 = build_zero_cst (type);
10977 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10978 return omit_one_operand_loc (loc, type, t1, arg0);
10979 }
10980
10981 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10982 with a constant, and the two constants have no bits in common,
10983 we should treat this as a BIT_IOR_EXPR since this may produce more
10984 simplifications. */
10985 if (TREE_CODE (arg0) == BIT_AND_EXPR
10986 && TREE_CODE (arg1) == BIT_AND_EXPR
10987 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10988 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10989 && wi::bit_and (TREE_OPERAND (arg0, 1),
10990 TREE_OPERAND (arg1, 1)) == 0)
10991 {
10992 code = BIT_IOR_EXPR;
10993 goto bit_ior;
10994 }
10995
10996 /* (X | Y) ^ X -> Y & ~ X*/
10997 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10998 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10999 {
11000 tree t2 = TREE_OPERAND (arg0, 1);
11001 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11002 arg1);
11003 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11004 fold_convert_loc (loc, type, t2),
11005 fold_convert_loc (loc, type, t1));
11006 return t1;
11007 }
11008
11009 /* (Y | X) ^ X -> Y & ~ X*/
11010 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11011 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11012 {
11013 tree t2 = TREE_OPERAND (arg0, 0);
11014 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11015 arg1);
11016 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11017 fold_convert_loc (loc, type, t2),
11018 fold_convert_loc (loc, type, t1));
11019 return t1;
11020 }
11021
11022 /* X ^ (X | Y) -> Y & ~ X*/
11023 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11024 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11025 {
11026 tree t2 = TREE_OPERAND (arg1, 1);
11027 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11028 arg0);
11029 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11030 fold_convert_loc (loc, type, t2),
11031 fold_convert_loc (loc, type, t1));
11032 return t1;
11033 }
11034
11035 /* X ^ (Y | X) -> Y & ~ X*/
11036 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11037 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11038 {
11039 tree t2 = TREE_OPERAND (arg1, 0);
11040 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11041 arg0);
11042 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11043 fold_convert_loc (loc, type, t2),
11044 fold_convert_loc (loc, type, t1));
11045 return t1;
11046 }
11047
11048 /* Convert ~X ^ ~Y to X ^ Y. */
11049 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11050 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11051 return fold_build2_loc (loc, code, type,
11052 fold_convert_loc (loc, type,
11053 TREE_OPERAND (arg0, 0)),
11054 fold_convert_loc (loc, type,
11055 TREE_OPERAND (arg1, 0)));
11056
11057 /* Convert ~X ^ C to X ^ ~C. */
11058 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11059 && TREE_CODE (arg1) == INTEGER_CST)
11060 return fold_build2_loc (loc, code, type,
11061 fold_convert_loc (loc, type,
11062 TREE_OPERAND (arg0, 0)),
11063 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11064
11065 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11066 if (TREE_CODE (arg0) == BIT_AND_EXPR
11067 && INTEGRAL_TYPE_P (type)
11068 && integer_onep (TREE_OPERAND (arg0, 1))
11069 && integer_onep (arg1))
11070 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11071 build_zero_cst (TREE_TYPE (arg0)));
11072
11073 /* Fold (X & Y) ^ Y as ~X & Y. */
11074 if (TREE_CODE (arg0) == BIT_AND_EXPR
11075 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11076 {
11077 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11078 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11079 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11080 fold_convert_loc (loc, type, arg1));
11081 }
11082 /* Fold (X & Y) ^ X as ~Y & X. */
11083 if (TREE_CODE (arg0) == BIT_AND_EXPR
11084 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11085 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11086 {
11087 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11088 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11089 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11090 fold_convert_loc (loc, type, arg1));
11091 }
11092 /* Fold X ^ (X & Y) as X & ~Y. */
11093 if (TREE_CODE (arg1) == BIT_AND_EXPR
11094 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11095 {
11096 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11097 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11098 fold_convert_loc (loc, type, arg0),
11099 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11100 }
11101 /* Fold X ^ (Y & X) as ~Y & X. */
11102 if (TREE_CODE (arg1) == BIT_AND_EXPR
11103 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11104 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11105 {
11106 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11107 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11108 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11109 fold_convert_loc (loc, type, arg0));
11110 }
11111
11112 /* See if this can be simplified into a rotate first. If that
11113 is unsuccessful continue in the association code. */
11114 goto bit_rotate;
11115
11116 case BIT_AND_EXPR:
11117 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11118 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11119 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11120 || (TREE_CODE (arg0) == EQ_EXPR
11121 && integer_zerop (TREE_OPERAND (arg0, 1))))
11122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11123 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11124
11125 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11126 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11127 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11128 || (TREE_CODE (arg1) == EQ_EXPR
11129 && integer_zerop (TREE_OPERAND (arg1, 1))))
11130 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11131 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11132
11133 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11134 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11135 && INTEGRAL_TYPE_P (type)
11136 && integer_onep (TREE_OPERAND (arg0, 1))
11137 && integer_onep (arg1))
11138 {
11139 tree tem2;
11140 tem = TREE_OPERAND (arg0, 0);
11141 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11142 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11143 tem, tem2);
11144 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11145 build_zero_cst (TREE_TYPE (tem)));
11146 }
11147 /* Fold ~X & 1 as (X & 1) == 0. */
11148 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11149 && INTEGRAL_TYPE_P (type)
11150 && integer_onep (arg1))
11151 {
11152 tree tem2;
11153 tem = TREE_OPERAND (arg0, 0);
11154 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11155 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11156 tem, tem2);
11157 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11158 build_zero_cst (TREE_TYPE (tem)));
11159 }
11160 /* Fold !X & 1 as X == 0. */
11161 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11162 && integer_onep (arg1))
11163 {
11164 tem = TREE_OPERAND (arg0, 0);
11165 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11166 build_zero_cst (TREE_TYPE (tem)));
11167 }
11168
11169 /* Fold (X ^ Y) & Y as ~X & Y. */
11170 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11171 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11172 {
11173 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11174 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11175 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11176 fold_convert_loc (loc, type, arg1));
11177 }
11178 /* Fold (X ^ Y) & X as ~Y & X. */
11179 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11180 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11181 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11182 {
11183 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11184 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11185 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11186 fold_convert_loc (loc, type, arg1));
11187 }
11188 /* Fold X & (X ^ Y) as X & ~Y. */
11189 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11190 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11191 {
11192 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11193 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11194 fold_convert_loc (loc, type, arg0),
11195 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11196 }
11197 /* Fold X & (Y ^ X) as ~Y & X. */
11198 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11199 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11200 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11201 {
11202 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11203 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11204 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11205 fold_convert_loc (loc, type, arg0));
11206 }
11207
11208 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11209 multiple of 1 << CST. */
11210 if (TREE_CODE (arg1) == INTEGER_CST)
11211 {
11212 wide_int cst1 = arg1;
11213 wide_int ncst1 = -cst1;
11214 if ((cst1 & ncst1) == ncst1
11215 && multiple_of_p (type, arg0,
11216 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11217 return fold_convert_loc (loc, type, arg0);
11218 }
11219
11220 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11221 bits from CST2. */
11222 if (TREE_CODE (arg1) == INTEGER_CST
11223 && TREE_CODE (arg0) == MULT_EXPR
11224 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11225 {
11226 wide_int warg1 = arg1;
11227 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11228
11229 if (masked == 0)
11230 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11231 arg0, arg1);
11232 else if (masked != warg1)
11233 {
11234 /* Avoid the transform if arg1 is a mask of some
11235 mode which allows further optimizations. */
11236 int pop = wi::popcount (warg1);
11237 if (!(pop >= BITS_PER_UNIT
11238 && exact_log2 (pop) != -1
11239 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11240 return fold_build2_loc (loc, code, type, op0,
11241 wide_int_to_tree (type, masked));
11242 }
11243 }
11244
11245 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11246 ((A & N) + B) & M -> (A + B) & M
11247 Similarly if (N & M) == 0,
11248 ((A | N) + B) & M -> (A + B) & M
11249 and for - instead of + (or unary - instead of +)
11250 and/or ^ instead of |.
11251 If B is constant and (B & M) == 0, fold into A & M. */
11252 if (TREE_CODE (arg1) == INTEGER_CST)
11253 {
11254 wide_int cst1 = arg1;
11255 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11256 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11257 && (TREE_CODE (arg0) == PLUS_EXPR
11258 || TREE_CODE (arg0) == MINUS_EXPR
11259 || TREE_CODE (arg0) == NEGATE_EXPR)
11260 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11261 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11262 {
11263 tree pmop[2];
11264 int which = 0;
11265 wide_int cst0;
11266
11267 /* Now we know that arg0 is (C + D) or (C - D) or
11268 -C and arg1 (M) is == (1LL << cst) - 1.
11269 Store C into PMOP[0] and D into PMOP[1]. */
11270 pmop[0] = TREE_OPERAND (arg0, 0);
11271 pmop[1] = NULL;
11272 if (TREE_CODE (arg0) != NEGATE_EXPR)
11273 {
11274 pmop[1] = TREE_OPERAND (arg0, 1);
11275 which = 1;
11276 }
11277
11278 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11279 which = -1;
11280
11281 for (; which >= 0; which--)
11282 switch (TREE_CODE (pmop[which]))
11283 {
11284 case BIT_AND_EXPR:
11285 case BIT_IOR_EXPR:
11286 case BIT_XOR_EXPR:
11287 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11288 != INTEGER_CST)
11289 break;
11290 cst0 = TREE_OPERAND (pmop[which], 1);
11291 cst0 &= cst1;
11292 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11293 {
11294 if (cst0 != cst1)
11295 break;
11296 }
11297 else if (cst0 != 0)
11298 break;
11299 /* If C or D is of the form (A & N) where
11300 (N & M) == M, or of the form (A | N) or
11301 (A ^ N) where (N & M) == 0, replace it with A. */
11302 pmop[which] = TREE_OPERAND (pmop[which], 0);
11303 break;
11304 case INTEGER_CST:
11305 /* If C or D is a N where (N & M) == 0, it can be
11306 omitted (assumed 0). */
11307 if ((TREE_CODE (arg0) == PLUS_EXPR
11308 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11309 && (cst1 & pmop[which]) == 0)
11310 pmop[which] = NULL;
11311 break;
11312 default:
11313 break;
11314 }
11315
11316 /* Only build anything new if we optimized one or both arguments
11317 above. */
11318 if (pmop[0] != TREE_OPERAND (arg0, 0)
11319 || (TREE_CODE (arg0) != NEGATE_EXPR
11320 && pmop[1] != TREE_OPERAND (arg0, 1)))
11321 {
11322 tree utype = TREE_TYPE (arg0);
11323 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11324 {
11325 /* Perform the operations in a type that has defined
11326 overflow behavior. */
11327 utype = unsigned_type_for (TREE_TYPE (arg0));
11328 if (pmop[0] != NULL)
11329 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11330 if (pmop[1] != NULL)
11331 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11332 }
11333
11334 if (TREE_CODE (arg0) == NEGATE_EXPR)
11335 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11336 else if (TREE_CODE (arg0) == PLUS_EXPR)
11337 {
11338 if (pmop[0] != NULL && pmop[1] != NULL)
11339 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11340 pmop[0], pmop[1]);
11341 else if (pmop[0] != NULL)
11342 tem = pmop[0];
11343 else if (pmop[1] != NULL)
11344 tem = pmop[1];
11345 else
11346 return build_int_cst (type, 0);
11347 }
11348 else if (pmop[0] == NULL)
11349 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11350 else
11351 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11352 pmop[0], pmop[1]);
11353 /* TEM is now the new binary +, - or unary - replacement. */
11354 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11355 fold_convert_loc (loc, utype, arg1));
11356 return fold_convert_loc (loc, type, tem);
11357 }
11358 }
11359 }
11360
11361 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11362 if (t1 != NULL_TREE)
11363 return t1;
11364 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11365 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11366 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11367 {
11368 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11369
11370 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11371 if (mask == -1)
11372 return
11373 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11374 }
11375
11376 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11377
11378 This results in more efficient code for machines without a NOR
11379 instruction. Combine will canonicalize to the first form
11380 which will allow use of NOR instructions provided by the
11381 backend if they exist. */
11382 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11383 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11384 {
11385 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11386 build2 (BIT_IOR_EXPR, type,
11387 fold_convert_loc (loc, type,
11388 TREE_OPERAND (arg0, 0)),
11389 fold_convert_loc (loc, type,
11390 TREE_OPERAND (arg1, 0))));
11391 }
11392
11393 /* If arg0 is derived from the address of an object or function, we may
11394 be able to fold this expression using the object or function's
11395 alignment. */
11396 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11397 {
11398 unsigned HOST_WIDE_INT modulus, residue;
11399 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11400
11401 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11402 integer_onep (arg1));
11403
11404 /* This works because modulus is a power of 2. If this weren't the
11405 case, we'd have to replace it by its greatest power-of-2
11406 divisor: modulus & -modulus. */
11407 if (low < modulus)
11408 return build_int_cst (type, residue & low);
11409 }
11410
11411 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11412 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11413 if the new mask might be further optimized. */
11414 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11415 || TREE_CODE (arg0) == RSHIFT_EXPR)
11416 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11417 && TREE_CODE (arg1) == INTEGER_CST
11418 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11419 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11420 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11421 < TYPE_PRECISION (TREE_TYPE (arg0))))
11422 {
11423 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11424 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11425 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11426 tree shift_type = TREE_TYPE (arg0);
11427
11428 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11429 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11430 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11431 && TYPE_PRECISION (TREE_TYPE (arg0))
11432 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11433 {
11434 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11435 tree arg00 = TREE_OPERAND (arg0, 0);
11436 /* See if more bits can be proven as zero because of
11437 zero extension. */
11438 if (TREE_CODE (arg00) == NOP_EXPR
11439 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11440 {
11441 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11442 if (TYPE_PRECISION (inner_type)
11443 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11444 && TYPE_PRECISION (inner_type) < prec)
11445 {
11446 prec = TYPE_PRECISION (inner_type);
11447 /* See if we can shorten the right shift. */
11448 if (shiftc < prec)
11449 shift_type = inner_type;
11450 /* Otherwise X >> C1 is all zeros, so we'll optimize
11451 it into (X, 0) later on by making sure zerobits
11452 is all ones. */
11453 }
11454 }
11455 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11456 if (shiftc < prec)
11457 {
11458 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11459 zerobits <<= prec - shiftc;
11460 }
11461 /* For arithmetic shift if sign bit could be set, zerobits
11462 can contain actually sign bits, so no transformation is
11463 possible, unless MASK masks them all away. In that
11464 case the shift needs to be converted into logical shift. */
11465 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11466 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11467 {
11468 if ((mask & zerobits) == 0)
11469 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11470 else
11471 zerobits = 0;
11472 }
11473 }
11474
11475 /* ((X << 16) & 0xff00) is (X, 0). */
11476 if ((mask & zerobits) == mask)
11477 return omit_one_operand_loc (loc, type,
11478 build_int_cst (type, 0), arg0);
11479
11480 newmask = mask | zerobits;
11481 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11482 {
11483 /* Only do the transformation if NEWMASK is some integer
11484 mode's mask. */
11485 for (prec = BITS_PER_UNIT;
11486 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11487 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11488 break;
11489 if (prec < HOST_BITS_PER_WIDE_INT
11490 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11491 {
11492 tree newmaskt;
11493
11494 if (shift_type != TREE_TYPE (arg0))
11495 {
11496 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11497 fold_convert_loc (loc, shift_type,
11498 TREE_OPERAND (arg0, 0)),
11499 TREE_OPERAND (arg0, 1));
11500 tem = fold_convert_loc (loc, type, tem);
11501 }
11502 else
11503 tem = op0;
11504 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11505 if (!tree_int_cst_equal (newmaskt, arg1))
11506 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11507 }
11508 }
11509 }
11510
11511 goto associate;
11512
11513 case RDIV_EXPR:
11514 /* Don't touch a floating-point divide by zero unless the mode
11515 of the constant can represent infinity. */
11516 if (TREE_CODE (arg1) == REAL_CST
11517 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11518 && real_zerop (arg1))
11519 return NULL_TREE;
11520
11521 /* (-A) / (-B) -> A / B */
11522 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11523 return fold_build2_loc (loc, RDIV_EXPR, type,
11524 TREE_OPERAND (arg0, 0),
11525 negate_expr (arg1));
11526 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11527 return fold_build2_loc (loc, RDIV_EXPR, type,
11528 negate_expr (arg0),
11529 TREE_OPERAND (arg1, 0));
11530
11531 /* Convert A/B/C to A/(B*C). */
11532 if (flag_reciprocal_math
11533 && TREE_CODE (arg0) == RDIV_EXPR)
11534 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11535 fold_build2_loc (loc, MULT_EXPR, type,
11536 TREE_OPERAND (arg0, 1), arg1));
11537
11538 /* Convert A/(B/C) to (A/B)*C. */
11539 if (flag_reciprocal_math
11540 && TREE_CODE (arg1) == RDIV_EXPR)
11541 return fold_build2_loc (loc, MULT_EXPR, type,
11542 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11543 TREE_OPERAND (arg1, 0)),
11544 TREE_OPERAND (arg1, 1));
11545
11546 /* Convert C1/(X*C2) into (C1/C2)/X. */
11547 if (flag_reciprocal_math
11548 && TREE_CODE (arg1) == MULT_EXPR
11549 && TREE_CODE (arg0) == REAL_CST
11550 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11551 {
11552 tree tem = const_binop (RDIV_EXPR, arg0,
11553 TREE_OPERAND (arg1, 1));
11554 if (tem)
11555 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11556 TREE_OPERAND (arg1, 0));
11557 }
11558
11559 if (flag_unsafe_math_optimizations)
11560 {
11561 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11562 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11563
11564 /* Optimize sin(x)/cos(x) as tan(x). */
11565 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11566 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11567 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11568 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11569 CALL_EXPR_ARG (arg1, 0), 0))
11570 {
11571 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11572
11573 if (tanfn != NULL_TREE)
11574 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11575 }
11576
11577 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11578 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11579 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11580 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11581 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11582 CALL_EXPR_ARG (arg1, 0), 0))
11583 {
11584 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11585
11586 if (tanfn != NULL_TREE)
11587 {
11588 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11589 CALL_EXPR_ARG (arg0, 0));
11590 return fold_build2_loc (loc, RDIV_EXPR, type,
11591 build_real (type, dconst1), tmp);
11592 }
11593 }
11594
11595 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11596 NaNs or Infinities. */
11597 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11598 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11599 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11600 {
11601 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11602 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11603
11604 if (! HONOR_NANS (element_mode (arg00))
11605 && ! HONOR_INFINITIES (element_mode (arg00))
11606 && operand_equal_p (arg00, arg01, 0))
11607 {
11608 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11609
11610 if (cosfn != NULL_TREE)
11611 return build_call_expr_loc (loc, cosfn, 1, arg00);
11612 }
11613 }
11614
11615 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11616 NaNs or Infinities. */
11617 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11618 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11619 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11620 {
11621 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11622 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11623
11624 if (! HONOR_NANS (element_mode (arg00))
11625 && ! HONOR_INFINITIES (element_mode (arg00))
11626 && operand_equal_p (arg00, arg01, 0))
11627 {
11628 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11629
11630 if (cosfn != NULL_TREE)
11631 {
11632 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11633 return fold_build2_loc (loc, RDIV_EXPR, type,
11634 build_real (type, dconst1),
11635 tmp);
11636 }
11637 }
11638 }
11639
11640 /* Optimize pow(x,c)/x as pow(x,c-1). */
11641 if (fcode0 == BUILT_IN_POW
11642 || fcode0 == BUILT_IN_POWF
11643 || fcode0 == BUILT_IN_POWL)
11644 {
11645 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11646 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11647 if (TREE_CODE (arg01) == REAL_CST
11648 && !TREE_OVERFLOW (arg01)
11649 && operand_equal_p (arg1, arg00, 0))
11650 {
11651 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11652 REAL_VALUE_TYPE c;
11653 tree arg;
11654
11655 c = TREE_REAL_CST (arg01);
11656 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11657 arg = build_real (type, c);
11658 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11659 }
11660 }
11661
11662 /* Optimize a/root(b/c) into a*root(c/b). */
11663 if (BUILTIN_ROOT_P (fcode1))
11664 {
11665 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11666
11667 if (TREE_CODE (rootarg) == RDIV_EXPR)
11668 {
11669 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11670 tree b = TREE_OPERAND (rootarg, 0);
11671 tree c = TREE_OPERAND (rootarg, 1);
11672
11673 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11674
11675 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11676 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11677 }
11678 }
11679
11680 /* Optimize x/expN(y) into x*expN(-y). */
11681 if (BUILTIN_EXPONENT_P (fcode1))
11682 {
11683 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11684 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11685 arg1 = build_call_expr_loc (loc,
11686 expfn, 1,
11687 fold_convert_loc (loc, type, arg));
11688 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11689 }
11690
11691 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11692 if (fcode1 == BUILT_IN_POW
11693 || fcode1 == BUILT_IN_POWF
11694 || fcode1 == BUILT_IN_POWL)
11695 {
11696 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11697 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11698 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11699 tree neg11 = fold_convert_loc (loc, type,
11700 negate_expr (arg11));
11701 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11702 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11703 }
11704 }
11705 return NULL_TREE;
11706
11707 case TRUNC_DIV_EXPR:
11708 /* Optimize (X & (-A)) / A where A is a power of 2,
11709 to X >> log2(A) */
11710 if (TREE_CODE (arg0) == BIT_AND_EXPR
11711 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11712 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11713 {
11714 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11715 arg1, TREE_OPERAND (arg0, 1));
11716 if (sum && integer_zerop (sum)) {
11717 tree pow2 = build_int_cst (integer_type_node,
11718 wi::exact_log2 (arg1));
11719 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11720 TREE_OPERAND (arg0, 0), pow2);
11721 }
11722 }
11723
11724 /* Fall through */
11725
11726 case FLOOR_DIV_EXPR:
11727 /* Simplify A / (B << N) where A and B are positive and B is
11728 a power of 2, to A >> (N + log2(B)). */
11729 strict_overflow_p = false;
11730 if (TREE_CODE (arg1) == LSHIFT_EXPR
11731 && (TYPE_UNSIGNED (type)
11732 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11733 {
11734 tree sval = TREE_OPERAND (arg1, 0);
11735 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11736 {
11737 tree sh_cnt = TREE_OPERAND (arg1, 1);
11738 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11739 wi::exact_log2 (sval));
11740
11741 if (strict_overflow_p)
11742 fold_overflow_warning (("assuming signed overflow does not "
11743 "occur when simplifying A / (B << N)"),
11744 WARN_STRICT_OVERFLOW_MISC);
11745
11746 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11747 sh_cnt, pow2);
11748 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11749 fold_convert_loc (loc, type, arg0), sh_cnt);
11750 }
11751 }
11752
11753 /* Fall through */
11754
11755 case ROUND_DIV_EXPR:
11756 case CEIL_DIV_EXPR:
11757 case EXACT_DIV_EXPR:
11758 if (integer_zerop (arg1))
11759 return NULL_TREE;
11760
11761 /* Convert -A / -B to A / B when the type is signed and overflow is
11762 undefined. */
11763 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11764 && TREE_CODE (arg0) == NEGATE_EXPR
11765 && negate_expr_p (arg1))
11766 {
11767 if (INTEGRAL_TYPE_P (type))
11768 fold_overflow_warning (("assuming signed overflow does not occur "
11769 "when distributing negation across "
11770 "division"),
11771 WARN_STRICT_OVERFLOW_MISC);
11772 return fold_build2_loc (loc, code, type,
11773 fold_convert_loc (loc, type,
11774 TREE_OPERAND (arg0, 0)),
11775 fold_convert_loc (loc, type,
11776 negate_expr (arg1)));
11777 }
11778 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11779 && TREE_CODE (arg1) == NEGATE_EXPR
11780 && negate_expr_p (arg0))
11781 {
11782 if (INTEGRAL_TYPE_P (type))
11783 fold_overflow_warning (("assuming signed overflow does not occur "
11784 "when distributing negation across "
11785 "division"),
11786 WARN_STRICT_OVERFLOW_MISC);
11787 return fold_build2_loc (loc, code, type,
11788 fold_convert_loc (loc, type,
11789 negate_expr (arg0)),
11790 fold_convert_loc (loc, type,
11791 TREE_OPERAND (arg1, 0)));
11792 }
11793
11794 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11795 operation, EXACT_DIV_EXPR.
11796
11797 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11798 At one time others generated faster code, it's not clear if they do
11799 after the last round to changes to the DIV code in expmed.c. */
11800 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11801 && multiple_of_p (type, arg0, arg1))
11802 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11803
11804 strict_overflow_p = false;
11805 if (TREE_CODE (arg1) == INTEGER_CST
11806 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11807 &strict_overflow_p)))
11808 {
11809 if (strict_overflow_p)
11810 fold_overflow_warning (("assuming signed overflow does not occur "
11811 "when simplifying division"),
11812 WARN_STRICT_OVERFLOW_MISC);
11813 return fold_convert_loc (loc, type, tem);
11814 }
11815
11816 return NULL_TREE;
11817
11818 case CEIL_MOD_EXPR:
11819 case FLOOR_MOD_EXPR:
11820 case ROUND_MOD_EXPR:
11821 case TRUNC_MOD_EXPR:
11822 /* X % -Y is the same as X % Y. */
11823 if (code == TRUNC_MOD_EXPR
11824 && !TYPE_UNSIGNED (type)
11825 && TREE_CODE (arg1) == NEGATE_EXPR
11826 && !TYPE_OVERFLOW_TRAPS (type))
11827 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11828 fold_convert_loc (loc, type,
11829 TREE_OPERAND (arg1, 0)));
11830
11831 strict_overflow_p = false;
11832 if (TREE_CODE (arg1) == INTEGER_CST
11833 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11834 &strict_overflow_p)))
11835 {
11836 if (strict_overflow_p)
11837 fold_overflow_warning (("assuming signed overflow does not occur "
11838 "when simplifying modulus"),
11839 WARN_STRICT_OVERFLOW_MISC);
11840 return fold_convert_loc (loc, type, tem);
11841 }
11842
11843 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11844 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11845 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11846 && (TYPE_UNSIGNED (type)
11847 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11848 {
11849 tree c = arg1;
11850 /* Also optimize A % (C << N) where C is a power of 2,
11851 to A & ((C << N) - 1). */
11852 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11853 c = TREE_OPERAND (arg1, 0);
11854
11855 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11856 {
11857 tree mask
11858 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11859 build_int_cst (TREE_TYPE (arg1), 1));
11860 if (strict_overflow_p)
11861 fold_overflow_warning (("assuming signed overflow does not "
11862 "occur when simplifying "
11863 "X % (power of two)"),
11864 WARN_STRICT_OVERFLOW_MISC);
11865 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11866 fold_convert_loc (loc, type, arg0),
11867 fold_convert_loc (loc, type, mask));
11868 }
11869 }
11870
11871 return NULL_TREE;
11872
11873 case LROTATE_EXPR:
11874 case RROTATE_EXPR:
11875 case RSHIFT_EXPR:
11876 case LSHIFT_EXPR:
11877 /* Since negative shift count is not well-defined,
11878 don't try to compute it in the compiler. */
11879 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11880 return NULL_TREE;
11881
11882 prec = element_precision (type);
11883
11884 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11885 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11886 && tree_to_uhwi (arg1) < prec
11887 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11888 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11889 {
11890 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11891 + tree_to_uhwi (arg1));
11892
11893 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11894 being well defined. */
11895 if (low >= prec)
11896 {
11897 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11898 low = low % prec;
11899 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11900 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11901 TREE_OPERAND (arg0, 0));
11902 else
11903 low = prec - 1;
11904 }
11905
11906 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11907 build_int_cst (TREE_TYPE (arg1), low));
11908 }
11909
11910 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11911 into x & ((unsigned)-1 >> c) for unsigned types. */
11912 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11913 || (TYPE_UNSIGNED (type)
11914 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11915 && tree_fits_uhwi_p (arg1)
11916 && tree_to_uhwi (arg1) < prec
11917 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11918 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11919 {
11920 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11921 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11922 tree lshift;
11923 tree arg00;
11924
11925 if (low0 == low1)
11926 {
11927 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11928
11929 lshift = build_minus_one_cst (type);
11930 lshift = const_binop (code, lshift, arg1);
11931
11932 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11933 }
11934 }
11935
11936 /* If we have a rotate of a bit operation with the rotate count and
11937 the second operand of the bit operation both constant,
11938 permute the two operations. */
11939 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11940 && (TREE_CODE (arg0) == BIT_AND_EXPR
11941 || TREE_CODE (arg0) == BIT_IOR_EXPR
11942 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11943 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11944 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11945 fold_build2_loc (loc, code, type,
11946 TREE_OPERAND (arg0, 0), arg1),
11947 fold_build2_loc (loc, code, type,
11948 TREE_OPERAND (arg0, 1), arg1));
11949
11950 /* Two consecutive rotates adding up to the some integer
11951 multiple of the precision of the type can be ignored. */
11952 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11953 && TREE_CODE (arg0) == RROTATE_EXPR
11954 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11955 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
11956 prec) == 0)
11957 return TREE_OPERAND (arg0, 0);
11958
11959 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11960 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11961 if the latter can be further optimized. */
11962 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11963 && TREE_CODE (arg0) == BIT_AND_EXPR
11964 && TREE_CODE (arg1) == INTEGER_CST
11965 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11966 {
11967 tree mask = fold_build2_loc (loc, code, type,
11968 fold_convert_loc (loc, type,
11969 TREE_OPERAND (arg0, 1)),
11970 arg1);
11971 tree shift = fold_build2_loc (loc, code, type,
11972 fold_convert_loc (loc, type,
11973 TREE_OPERAND (arg0, 0)),
11974 arg1);
11975 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11976 if (tem)
11977 return tem;
11978 }
11979
11980 return NULL_TREE;
11981
11982 case MIN_EXPR:
11983 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11984 if (tem)
11985 return tem;
11986 goto associate;
11987
11988 case MAX_EXPR:
11989 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11990 if (tem)
11991 return tem;
11992 goto associate;
11993
11994 case TRUTH_ANDIF_EXPR:
11995 /* Note that the operands of this must be ints
11996 and their values must be 0 or 1.
11997 ("true" is a fixed value perhaps depending on the language.) */
11998 /* If first arg is constant zero, return it. */
11999 if (integer_zerop (arg0))
12000 return fold_convert_loc (loc, type, arg0);
12001 case TRUTH_AND_EXPR:
12002 /* If either arg is constant true, drop it. */
12003 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12004 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12005 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12006 /* Preserve sequence points. */
12007 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12008 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12009 /* If second arg is constant zero, result is zero, but first arg
12010 must be evaluated. */
12011 if (integer_zerop (arg1))
12012 return omit_one_operand_loc (loc, type, arg1, arg0);
12013 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12014 case will be handled here. */
12015 if (integer_zerop (arg0))
12016 return omit_one_operand_loc (loc, type, arg0, arg1);
12017
12018 /* !X && X is always false. */
12019 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12021 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12022 /* X && !X is always false. */
12023 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12024 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12025 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12026
12027 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12028 means A >= Y && A != MAX, but in this case we know that
12029 A < X <= MAX. */
12030
12031 if (!TREE_SIDE_EFFECTS (arg0)
12032 && !TREE_SIDE_EFFECTS (arg1))
12033 {
12034 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12035 if (tem && !operand_equal_p (tem, arg0, 0))
12036 return fold_build2_loc (loc, code, type, tem, arg1);
12037
12038 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12039 if (tem && !operand_equal_p (tem, arg1, 0))
12040 return fold_build2_loc (loc, code, type, arg0, tem);
12041 }
12042
12043 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12044 != NULL_TREE)
12045 return tem;
12046
12047 return NULL_TREE;
12048
12049 case TRUTH_ORIF_EXPR:
12050 /* Note that the operands of this must be ints
12051 and their values must be 0 or true.
12052 ("true" is a fixed value perhaps depending on the language.) */
12053 /* If first arg is constant true, return it. */
12054 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12055 return fold_convert_loc (loc, type, arg0);
12056 case TRUTH_OR_EXPR:
12057 /* If either arg is constant zero, drop it. */
12058 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12059 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12060 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12061 /* Preserve sequence points. */
12062 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12063 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12064 /* If second arg is constant true, result is true, but we must
12065 evaluate first arg. */
12066 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12067 return omit_one_operand_loc (loc, type, arg1, arg0);
12068 /* Likewise for first arg, but note this only occurs here for
12069 TRUTH_OR_EXPR. */
12070 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12071 return omit_one_operand_loc (loc, type, arg0, arg1);
12072
12073 /* !X || X is always true. */
12074 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12075 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12076 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12077 /* X || !X is always true. */
12078 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12079 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12080 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12081
12082 /* (X && !Y) || (!X && Y) is X ^ Y */
12083 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12084 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12085 {
12086 tree a0, a1, l0, l1, n0, n1;
12087
12088 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12089 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12090
12091 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12092 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12093
12094 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12095 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12096
12097 if ((operand_equal_p (n0, a0, 0)
12098 && operand_equal_p (n1, a1, 0))
12099 || (operand_equal_p (n0, a1, 0)
12100 && operand_equal_p (n1, a0, 0)))
12101 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12102 }
12103
12104 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12105 != NULL_TREE)
12106 return tem;
12107
12108 return NULL_TREE;
12109
12110 case TRUTH_XOR_EXPR:
12111 /* If the second arg is constant zero, drop it. */
12112 if (integer_zerop (arg1))
12113 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12114 /* If the second arg is constant true, this is a logical inversion. */
12115 if (integer_onep (arg1))
12116 {
12117 tem = invert_truthvalue_loc (loc, arg0);
12118 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12119 }
12120 /* Identical arguments cancel to zero. */
12121 if (operand_equal_p (arg0, arg1, 0))
12122 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12123
12124 /* !X ^ X is always true. */
12125 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12126 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12127 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12128
12129 /* X ^ !X is always true. */
12130 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12131 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12132 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12133
12134 return NULL_TREE;
12135
12136 case EQ_EXPR:
12137 case NE_EXPR:
12138 STRIP_NOPS (arg0);
12139 STRIP_NOPS (arg1);
12140
12141 tem = fold_comparison (loc, code, type, op0, op1);
12142 if (tem != NULL_TREE)
12143 return tem;
12144
12145 /* bool_var != 0 becomes bool_var. */
12146 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12147 && code == NE_EXPR)
12148 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12149
12150 /* bool_var == 1 becomes bool_var. */
12151 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12152 && code == EQ_EXPR)
12153 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12154
12155 /* bool_var != 1 becomes !bool_var. */
12156 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12157 && code == NE_EXPR)
12158 return fold_convert_loc (loc, type,
12159 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12160 TREE_TYPE (arg0), arg0));
12161
12162 /* bool_var == 0 becomes !bool_var. */
12163 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12164 && code == EQ_EXPR)
12165 return fold_convert_loc (loc, type,
12166 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12167 TREE_TYPE (arg0), arg0));
12168
12169 /* !exp != 0 becomes !exp */
12170 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12171 && code == NE_EXPR)
12172 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12173
12174 /* If this is an equality comparison of the address of two non-weak,
12175 unaliased symbols neither of which are extern (since we do not
12176 have access to attributes for externs), then we know the result. */
12177 if (TREE_CODE (arg0) == ADDR_EXPR
12178 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12179 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12180 && ! lookup_attribute ("alias",
12181 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12182 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12183 && TREE_CODE (arg1) == ADDR_EXPR
12184 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12185 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12186 && ! lookup_attribute ("alias",
12187 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12188 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12189 {
12190 /* We know that we're looking at the address of two
12191 non-weak, unaliased, static _DECL nodes.
12192
12193 It is both wasteful and incorrect to call operand_equal_p
12194 to compare the two ADDR_EXPR nodes. It is wasteful in that
12195 all we need to do is test pointer equality for the arguments
12196 to the two ADDR_EXPR nodes. It is incorrect to use
12197 operand_equal_p as that function is NOT equivalent to a
12198 C equality test. It can in fact return false for two
12199 objects which would test as equal using the C equality
12200 operator. */
12201 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12202 return constant_boolean_node (equal
12203 ? code == EQ_EXPR : code != EQ_EXPR,
12204 type);
12205 }
12206
12207 /* Similarly for a NEGATE_EXPR. */
12208 if (TREE_CODE (arg0) == NEGATE_EXPR
12209 && TREE_CODE (arg1) == INTEGER_CST
12210 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12211 arg1)))
12212 && TREE_CODE (tem) == INTEGER_CST
12213 && !TREE_OVERFLOW (tem))
12214 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12215
12216 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12217 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12218 && TREE_CODE (arg1) == INTEGER_CST
12219 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12220 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12221 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12222 fold_convert_loc (loc,
12223 TREE_TYPE (arg0),
12224 arg1),
12225 TREE_OPERAND (arg0, 1)));
12226
12227 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12228 if ((TREE_CODE (arg0) == PLUS_EXPR
12229 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12230 || TREE_CODE (arg0) == MINUS_EXPR)
12231 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12232 0)),
12233 arg1, 0)
12234 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12235 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12236 {
12237 tree val = TREE_OPERAND (arg0, 1);
12238 return omit_two_operands_loc (loc, type,
12239 fold_build2_loc (loc, code, type,
12240 val,
12241 build_int_cst (TREE_TYPE (val),
12242 0)),
12243 TREE_OPERAND (arg0, 0), arg1);
12244 }
12245
12246 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12247 if (TREE_CODE (arg0) == MINUS_EXPR
12248 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12249 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12250 1)),
12251 arg1, 0)
12252 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12253 {
12254 return omit_two_operands_loc (loc, type,
12255 code == NE_EXPR
12256 ? boolean_true_node : boolean_false_node,
12257 TREE_OPERAND (arg0, 1), arg1);
12258 }
12259
12260 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12261 if (TREE_CODE (arg0) == ABS_EXPR
12262 && (integer_zerop (arg1) || real_zerop (arg1)))
12263 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12264
12265 /* If this is an EQ or NE comparison with zero and ARG0 is
12266 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12267 two operations, but the latter can be done in one less insn
12268 on machines that have only two-operand insns or on which a
12269 constant cannot be the first operand. */
12270 if (TREE_CODE (arg0) == BIT_AND_EXPR
12271 && integer_zerop (arg1))
12272 {
12273 tree arg00 = TREE_OPERAND (arg0, 0);
12274 tree arg01 = TREE_OPERAND (arg0, 1);
12275 if (TREE_CODE (arg00) == LSHIFT_EXPR
12276 && integer_onep (TREE_OPERAND (arg00, 0)))
12277 {
12278 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12279 arg01, TREE_OPERAND (arg00, 1));
12280 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12281 build_int_cst (TREE_TYPE (arg0), 1));
12282 return fold_build2_loc (loc, code, type,
12283 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12284 arg1);
12285 }
12286 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12287 && integer_onep (TREE_OPERAND (arg01, 0)))
12288 {
12289 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12290 arg00, TREE_OPERAND (arg01, 1));
12291 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12292 build_int_cst (TREE_TYPE (arg0), 1));
12293 return fold_build2_loc (loc, code, type,
12294 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12295 arg1);
12296 }
12297 }
12298
12299 /* If this is an NE or EQ comparison of zero against the result of a
12300 signed MOD operation whose second operand is a power of 2, make
12301 the MOD operation unsigned since it is simpler and equivalent. */
12302 if (integer_zerop (arg1)
12303 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12304 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12305 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12306 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12307 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12308 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12309 {
12310 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12311 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12312 fold_convert_loc (loc, newtype,
12313 TREE_OPERAND (arg0, 0)),
12314 fold_convert_loc (loc, newtype,
12315 TREE_OPERAND (arg0, 1)));
12316
12317 return fold_build2_loc (loc, code, type, newmod,
12318 fold_convert_loc (loc, newtype, arg1));
12319 }
12320
12321 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12322 C1 is a valid shift constant, and C2 is a power of two, i.e.
12323 a single bit. */
12324 if (TREE_CODE (arg0) == BIT_AND_EXPR
12325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12326 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12327 == INTEGER_CST
12328 && integer_pow2p (TREE_OPERAND (arg0, 1))
12329 && integer_zerop (arg1))
12330 {
12331 tree itype = TREE_TYPE (arg0);
12332 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12333 prec = TYPE_PRECISION (itype);
12334
12335 /* Check for a valid shift count. */
12336 if (wi::ltu_p (arg001, prec))
12337 {
12338 tree arg01 = TREE_OPERAND (arg0, 1);
12339 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12340 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12341 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12342 can be rewritten as (X & (C2 << C1)) != 0. */
12343 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12344 {
12345 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12346 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12347 return fold_build2_loc (loc, code, type, tem,
12348 fold_convert_loc (loc, itype, arg1));
12349 }
12350 /* Otherwise, for signed (arithmetic) shifts,
12351 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12352 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12353 else if (!TYPE_UNSIGNED (itype))
12354 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12355 arg000, build_int_cst (itype, 0));
12356 /* Otherwise, of unsigned (logical) shifts,
12357 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12358 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12359 else
12360 return omit_one_operand_loc (loc, type,
12361 code == EQ_EXPR ? integer_one_node
12362 : integer_zero_node,
12363 arg000);
12364 }
12365 }
12366
12367 /* If we have (A & C) == C where C is a power of 2, convert this into
12368 (A & C) != 0. Similarly for NE_EXPR. */
12369 if (TREE_CODE (arg0) == BIT_AND_EXPR
12370 && integer_pow2p (TREE_OPERAND (arg0, 1))
12371 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12372 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12373 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12374 integer_zero_node));
12375
12376 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12377 bit, then fold the expression into A < 0 or A >= 0. */
12378 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12379 if (tem)
12380 return tem;
12381
12382 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12383 Similarly for NE_EXPR. */
12384 if (TREE_CODE (arg0) == BIT_AND_EXPR
12385 && TREE_CODE (arg1) == INTEGER_CST
12386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12387 {
12388 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12389 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12390 TREE_OPERAND (arg0, 1));
12391 tree dandnotc
12392 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12393 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12394 notc);
12395 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12396 if (integer_nonzerop (dandnotc))
12397 return omit_one_operand_loc (loc, type, rslt, arg0);
12398 }
12399
12400 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12401 Similarly for NE_EXPR. */
12402 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12403 && TREE_CODE (arg1) == INTEGER_CST
12404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12405 {
12406 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12407 tree candnotd
12408 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12409 TREE_OPERAND (arg0, 1),
12410 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12411 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12412 if (integer_nonzerop (candnotd))
12413 return omit_one_operand_loc (loc, type, rslt, arg0);
12414 }
12415
12416 /* If this is a comparison of a field, we may be able to simplify it. */
12417 if ((TREE_CODE (arg0) == COMPONENT_REF
12418 || TREE_CODE (arg0) == BIT_FIELD_REF)
12419 /* Handle the constant case even without -O
12420 to make sure the warnings are given. */
12421 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12422 {
12423 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12424 if (t1)
12425 return t1;
12426 }
12427
12428 /* Optimize comparisons of strlen vs zero to a compare of the
12429 first character of the string vs zero. To wit,
12430 strlen(ptr) == 0 => *ptr == 0
12431 strlen(ptr) != 0 => *ptr != 0
12432 Other cases should reduce to one of these two (or a constant)
12433 due to the return value of strlen being unsigned. */
12434 if (TREE_CODE (arg0) == CALL_EXPR
12435 && integer_zerop (arg1))
12436 {
12437 tree fndecl = get_callee_fndecl (arg0);
12438
12439 if (fndecl
12440 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12441 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12442 && call_expr_nargs (arg0) == 1
12443 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12444 {
12445 tree iref = build_fold_indirect_ref_loc (loc,
12446 CALL_EXPR_ARG (arg0, 0));
12447 return fold_build2_loc (loc, code, type, iref,
12448 build_int_cst (TREE_TYPE (iref), 0));
12449 }
12450 }
12451
12452 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12453 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12454 if (TREE_CODE (arg0) == RSHIFT_EXPR
12455 && integer_zerop (arg1)
12456 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12457 {
12458 tree arg00 = TREE_OPERAND (arg0, 0);
12459 tree arg01 = TREE_OPERAND (arg0, 1);
12460 tree itype = TREE_TYPE (arg00);
12461 if (wi::eq_p (arg01, element_precision (itype) - 1))
12462 {
12463 if (TYPE_UNSIGNED (itype))
12464 {
12465 itype = signed_type_for (itype);
12466 arg00 = fold_convert_loc (loc, itype, arg00);
12467 }
12468 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12469 type, arg00, build_zero_cst (itype));
12470 }
12471 }
12472
12473 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12474 if (integer_zerop (arg1)
12475 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12476 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12477 TREE_OPERAND (arg0, 1));
12478
12479 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12480 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12481 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12482 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12483 build_zero_cst (TREE_TYPE (arg0)));
12484 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12485 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12486 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12487 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12488 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12489 build_zero_cst (TREE_TYPE (arg0)));
12490
12491 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12492 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12493 && TREE_CODE (arg1) == INTEGER_CST
12494 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12495 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12496 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12497 TREE_OPERAND (arg0, 1), arg1));
12498
12499 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12500 (X & C) == 0 when C is a single bit. */
12501 if (TREE_CODE (arg0) == BIT_AND_EXPR
12502 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12503 && integer_zerop (arg1)
12504 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12505 {
12506 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12507 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12508 TREE_OPERAND (arg0, 1));
12509 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12510 type, tem,
12511 fold_convert_loc (loc, TREE_TYPE (arg0),
12512 arg1));
12513 }
12514
12515 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12516 constant C is a power of two, i.e. a single bit. */
12517 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12518 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12519 && integer_zerop (arg1)
12520 && integer_pow2p (TREE_OPERAND (arg0, 1))
12521 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12522 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12523 {
12524 tree arg00 = TREE_OPERAND (arg0, 0);
12525 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12526 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12527 }
12528
12529 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12530 when is C is a power of two, i.e. a single bit. */
12531 if (TREE_CODE (arg0) == BIT_AND_EXPR
12532 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12533 && integer_zerop (arg1)
12534 && integer_pow2p (TREE_OPERAND (arg0, 1))
12535 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12536 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12537 {
12538 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12539 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12540 arg000, TREE_OPERAND (arg0, 1));
12541 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12542 tem, build_int_cst (TREE_TYPE (tem), 0));
12543 }
12544
12545 if (integer_zerop (arg1)
12546 && tree_expr_nonzero_p (arg0))
12547 {
12548 tree res = constant_boolean_node (code==NE_EXPR, type);
12549 return omit_one_operand_loc (loc, type, res, arg0);
12550 }
12551
12552 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12553 if (TREE_CODE (arg0) == NEGATE_EXPR
12554 && TREE_CODE (arg1) == NEGATE_EXPR)
12555 return fold_build2_loc (loc, code, type,
12556 TREE_OPERAND (arg0, 0),
12557 fold_convert_loc (loc, TREE_TYPE (arg0),
12558 TREE_OPERAND (arg1, 0)));
12559
12560 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12561 if (TREE_CODE (arg0) == BIT_AND_EXPR
12562 && TREE_CODE (arg1) == BIT_AND_EXPR)
12563 {
12564 tree arg00 = TREE_OPERAND (arg0, 0);
12565 tree arg01 = TREE_OPERAND (arg0, 1);
12566 tree arg10 = TREE_OPERAND (arg1, 0);
12567 tree arg11 = TREE_OPERAND (arg1, 1);
12568 tree itype = TREE_TYPE (arg0);
12569
12570 if (operand_equal_p (arg01, arg11, 0))
12571 return fold_build2_loc (loc, code, type,
12572 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12573 fold_build2_loc (loc,
12574 BIT_XOR_EXPR, itype,
12575 arg00, arg10),
12576 arg01),
12577 build_zero_cst (itype));
12578
12579 if (operand_equal_p (arg01, arg10, 0))
12580 return fold_build2_loc (loc, code, type,
12581 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12582 fold_build2_loc (loc,
12583 BIT_XOR_EXPR, itype,
12584 arg00, arg11),
12585 arg01),
12586 build_zero_cst (itype));
12587
12588 if (operand_equal_p (arg00, arg11, 0))
12589 return fold_build2_loc (loc, code, type,
12590 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12591 fold_build2_loc (loc,
12592 BIT_XOR_EXPR, itype,
12593 arg01, arg10),
12594 arg00),
12595 build_zero_cst (itype));
12596
12597 if (operand_equal_p (arg00, arg10, 0))
12598 return fold_build2_loc (loc, code, type,
12599 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12600 fold_build2_loc (loc,
12601 BIT_XOR_EXPR, itype,
12602 arg01, arg11),
12603 arg00),
12604 build_zero_cst (itype));
12605 }
12606
12607 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12608 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12609 {
12610 tree arg00 = TREE_OPERAND (arg0, 0);
12611 tree arg01 = TREE_OPERAND (arg0, 1);
12612 tree arg10 = TREE_OPERAND (arg1, 0);
12613 tree arg11 = TREE_OPERAND (arg1, 1);
12614 tree itype = TREE_TYPE (arg0);
12615
12616 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12617 operand_equal_p guarantees no side-effects so we don't need
12618 to use omit_one_operand on Z. */
12619 if (operand_equal_p (arg01, arg11, 0))
12620 return fold_build2_loc (loc, code, type, arg00,
12621 fold_convert_loc (loc, TREE_TYPE (arg00),
12622 arg10));
12623 if (operand_equal_p (arg01, arg10, 0))
12624 return fold_build2_loc (loc, code, type, arg00,
12625 fold_convert_loc (loc, TREE_TYPE (arg00),
12626 arg11));
12627 if (operand_equal_p (arg00, arg11, 0))
12628 return fold_build2_loc (loc, code, type, arg01,
12629 fold_convert_loc (loc, TREE_TYPE (arg01),
12630 arg10));
12631 if (operand_equal_p (arg00, arg10, 0))
12632 return fold_build2_loc (loc, code, type, arg01,
12633 fold_convert_loc (loc, TREE_TYPE (arg01),
12634 arg11));
12635
12636 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12637 if (TREE_CODE (arg01) == INTEGER_CST
12638 && TREE_CODE (arg11) == INTEGER_CST)
12639 {
12640 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12641 fold_convert_loc (loc, itype, arg11));
12642 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12643 return fold_build2_loc (loc, code, type, tem,
12644 fold_convert_loc (loc, itype, arg10));
12645 }
12646 }
12647
12648 /* Attempt to simplify equality/inequality comparisons of complex
12649 values. Only lower the comparison if the result is known or
12650 can be simplified to a single scalar comparison. */
12651 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12652 || TREE_CODE (arg0) == COMPLEX_CST)
12653 && (TREE_CODE (arg1) == COMPLEX_EXPR
12654 || TREE_CODE (arg1) == COMPLEX_CST))
12655 {
12656 tree real0, imag0, real1, imag1;
12657 tree rcond, icond;
12658
12659 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12660 {
12661 real0 = TREE_OPERAND (arg0, 0);
12662 imag0 = TREE_OPERAND (arg0, 1);
12663 }
12664 else
12665 {
12666 real0 = TREE_REALPART (arg0);
12667 imag0 = TREE_IMAGPART (arg0);
12668 }
12669
12670 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12671 {
12672 real1 = TREE_OPERAND (arg1, 0);
12673 imag1 = TREE_OPERAND (arg1, 1);
12674 }
12675 else
12676 {
12677 real1 = TREE_REALPART (arg1);
12678 imag1 = TREE_IMAGPART (arg1);
12679 }
12680
12681 rcond = fold_binary_loc (loc, code, type, real0, real1);
12682 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12683 {
12684 if (integer_zerop (rcond))
12685 {
12686 if (code == EQ_EXPR)
12687 return omit_two_operands_loc (loc, type, boolean_false_node,
12688 imag0, imag1);
12689 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12690 }
12691 else
12692 {
12693 if (code == NE_EXPR)
12694 return omit_two_operands_loc (loc, type, boolean_true_node,
12695 imag0, imag1);
12696 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12697 }
12698 }
12699
12700 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12701 if (icond && TREE_CODE (icond) == INTEGER_CST)
12702 {
12703 if (integer_zerop (icond))
12704 {
12705 if (code == EQ_EXPR)
12706 return omit_two_operands_loc (loc, type, boolean_false_node,
12707 real0, real1);
12708 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12709 }
12710 else
12711 {
12712 if (code == NE_EXPR)
12713 return omit_two_operands_loc (loc, type, boolean_true_node,
12714 real0, real1);
12715 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12716 }
12717 }
12718 }
12719
12720 return NULL_TREE;
12721
12722 case LT_EXPR:
12723 case GT_EXPR:
12724 case LE_EXPR:
12725 case GE_EXPR:
12726 tem = fold_comparison (loc, code, type, op0, op1);
12727 if (tem != NULL_TREE)
12728 return tem;
12729
12730 /* Transform comparisons of the form X +- C CMP X. */
12731 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12732 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12733 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12734 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12735 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12736 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12737 {
12738 tree arg01 = TREE_OPERAND (arg0, 1);
12739 enum tree_code code0 = TREE_CODE (arg0);
12740 int is_positive;
12741
12742 if (TREE_CODE (arg01) == REAL_CST)
12743 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12744 else
12745 is_positive = tree_int_cst_sgn (arg01);
12746
12747 /* (X - c) > X becomes false. */
12748 if (code == GT_EXPR
12749 && ((code0 == MINUS_EXPR && is_positive >= 0)
12750 || (code0 == PLUS_EXPR && is_positive <= 0)))
12751 {
12752 if (TREE_CODE (arg01) == INTEGER_CST
12753 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12754 fold_overflow_warning (("assuming signed overflow does not "
12755 "occur when assuming that (X - c) > X "
12756 "is always false"),
12757 WARN_STRICT_OVERFLOW_ALL);
12758 return constant_boolean_node (0, type);
12759 }
12760
12761 /* Likewise (X + c) < X becomes false. */
12762 if (code == LT_EXPR
12763 && ((code0 == PLUS_EXPR && is_positive >= 0)
12764 || (code0 == MINUS_EXPR && is_positive <= 0)))
12765 {
12766 if (TREE_CODE (arg01) == INTEGER_CST
12767 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12768 fold_overflow_warning (("assuming signed overflow does not "
12769 "occur when assuming that "
12770 "(X + c) < X is always false"),
12771 WARN_STRICT_OVERFLOW_ALL);
12772 return constant_boolean_node (0, type);
12773 }
12774
12775 /* Convert (X - c) <= X to true. */
12776 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12777 && code == LE_EXPR
12778 && ((code0 == MINUS_EXPR && is_positive >= 0)
12779 || (code0 == PLUS_EXPR && is_positive <= 0)))
12780 {
12781 if (TREE_CODE (arg01) == INTEGER_CST
12782 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12783 fold_overflow_warning (("assuming signed overflow does not "
12784 "occur when assuming that "
12785 "(X - c) <= X is always true"),
12786 WARN_STRICT_OVERFLOW_ALL);
12787 return constant_boolean_node (1, type);
12788 }
12789
12790 /* Convert (X + c) >= X to true. */
12791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12792 && code == GE_EXPR
12793 && ((code0 == PLUS_EXPR && is_positive >= 0)
12794 || (code0 == MINUS_EXPR && is_positive <= 0)))
12795 {
12796 if (TREE_CODE (arg01) == INTEGER_CST
12797 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12798 fold_overflow_warning (("assuming signed overflow does not "
12799 "occur when assuming that "
12800 "(X + c) >= X is always true"),
12801 WARN_STRICT_OVERFLOW_ALL);
12802 return constant_boolean_node (1, type);
12803 }
12804
12805 if (TREE_CODE (arg01) == INTEGER_CST)
12806 {
12807 /* Convert X + c > X and X - c < X to true for integers. */
12808 if (code == GT_EXPR
12809 && ((code0 == PLUS_EXPR && is_positive > 0)
12810 || (code0 == MINUS_EXPR && is_positive < 0)))
12811 {
12812 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12813 fold_overflow_warning (("assuming signed overflow does "
12814 "not occur when assuming that "
12815 "(X + c) > X is always true"),
12816 WARN_STRICT_OVERFLOW_ALL);
12817 return constant_boolean_node (1, type);
12818 }
12819
12820 if (code == LT_EXPR
12821 && ((code0 == MINUS_EXPR && is_positive > 0)
12822 || (code0 == PLUS_EXPR && is_positive < 0)))
12823 {
12824 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12825 fold_overflow_warning (("assuming signed overflow does "
12826 "not occur when assuming that "
12827 "(X - c) < X is always true"),
12828 WARN_STRICT_OVERFLOW_ALL);
12829 return constant_boolean_node (1, type);
12830 }
12831
12832 /* Convert X + c <= X and X - c >= X to false for integers. */
12833 if (code == LE_EXPR
12834 && ((code0 == PLUS_EXPR && is_positive > 0)
12835 || (code0 == MINUS_EXPR && is_positive < 0)))
12836 {
12837 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12838 fold_overflow_warning (("assuming signed overflow does "
12839 "not occur when assuming that "
12840 "(X + c) <= X is always false"),
12841 WARN_STRICT_OVERFLOW_ALL);
12842 return constant_boolean_node (0, type);
12843 }
12844
12845 if (code == GE_EXPR
12846 && ((code0 == MINUS_EXPR && is_positive > 0)
12847 || (code0 == PLUS_EXPR && is_positive < 0)))
12848 {
12849 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12850 fold_overflow_warning (("assuming signed overflow does "
12851 "not occur when assuming that "
12852 "(X - c) >= X is always false"),
12853 WARN_STRICT_OVERFLOW_ALL);
12854 return constant_boolean_node (0, type);
12855 }
12856 }
12857 }
12858
12859 /* Comparisons with the highest or lowest possible integer of
12860 the specified precision will have known values. */
12861 {
12862 tree arg1_type = TREE_TYPE (arg1);
12863 unsigned int prec = TYPE_PRECISION (arg1_type);
12864
12865 if (TREE_CODE (arg1) == INTEGER_CST
12866 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12867 {
12868 wide_int max = wi::max_value (arg1_type);
12869 wide_int signed_max = wi::max_value (prec, SIGNED);
12870 wide_int min = wi::min_value (arg1_type);
12871
12872 if (wi::eq_p (arg1, max))
12873 switch (code)
12874 {
12875 case GT_EXPR:
12876 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12877
12878 case GE_EXPR:
12879 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12880
12881 case LE_EXPR:
12882 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12883
12884 case LT_EXPR:
12885 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12886
12887 /* The GE_EXPR and LT_EXPR cases above are not normally
12888 reached because of previous transformations. */
12889
12890 default:
12891 break;
12892 }
12893 else if (wi::eq_p (arg1, max - 1))
12894 switch (code)
12895 {
12896 case GT_EXPR:
12897 arg1 = const_binop (PLUS_EXPR, arg1,
12898 build_int_cst (TREE_TYPE (arg1), 1));
12899 return fold_build2_loc (loc, EQ_EXPR, type,
12900 fold_convert_loc (loc,
12901 TREE_TYPE (arg1), arg0),
12902 arg1);
12903 case LE_EXPR:
12904 arg1 = const_binop (PLUS_EXPR, arg1,
12905 build_int_cst (TREE_TYPE (arg1), 1));
12906 return fold_build2_loc (loc, NE_EXPR, type,
12907 fold_convert_loc (loc, TREE_TYPE (arg1),
12908 arg0),
12909 arg1);
12910 default:
12911 break;
12912 }
12913 else if (wi::eq_p (arg1, min))
12914 switch (code)
12915 {
12916 case LT_EXPR:
12917 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12918
12919 case LE_EXPR:
12920 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12921
12922 case GE_EXPR:
12923 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12924
12925 case GT_EXPR:
12926 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12927
12928 default:
12929 break;
12930 }
12931 else if (wi::eq_p (arg1, min + 1))
12932 switch (code)
12933 {
12934 case GE_EXPR:
12935 arg1 = const_binop (MINUS_EXPR, arg1,
12936 build_int_cst (TREE_TYPE (arg1), 1));
12937 return fold_build2_loc (loc, NE_EXPR, type,
12938 fold_convert_loc (loc,
12939 TREE_TYPE (arg1), arg0),
12940 arg1);
12941 case LT_EXPR:
12942 arg1 = const_binop (MINUS_EXPR, arg1,
12943 build_int_cst (TREE_TYPE (arg1), 1));
12944 return fold_build2_loc (loc, EQ_EXPR, type,
12945 fold_convert_loc (loc, TREE_TYPE (arg1),
12946 arg0),
12947 arg1);
12948 default:
12949 break;
12950 }
12951
12952 else if (wi::eq_p (arg1, signed_max)
12953 && TYPE_UNSIGNED (arg1_type)
12954 /* We will flip the signedness of the comparison operator
12955 associated with the mode of arg1, so the sign bit is
12956 specified by this mode. Check that arg1 is the signed
12957 max associated with this sign bit. */
12958 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
12959 /* signed_type does not work on pointer types. */
12960 && INTEGRAL_TYPE_P (arg1_type))
12961 {
12962 /* The following case also applies to X < signed_max+1
12963 and X >= signed_max+1 because previous transformations. */
12964 if (code == LE_EXPR || code == GT_EXPR)
12965 {
12966 tree st = signed_type_for (arg1_type);
12967 return fold_build2_loc (loc,
12968 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12969 type, fold_convert_loc (loc, st, arg0),
12970 build_int_cst (st, 0));
12971 }
12972 }
12973 }
12974 }
12975
12976 /* If we are comparing an ABS_EXPR with a constant, we can
12977 convert all the cases into explicit comparisons, but they may
12978 well not be faster than doing the ABS and one comparison.
12979 But ABS (X) <= C is a range comparison, which becomes a subtraction
12980 and a comparison, and is probably faster. */
12981 if (code == LE_EXPR
12982 && TREE_CODE (arg1) == INTEGER_CST
12983 && TREE_CODE (arg0) == ABS_EXPR
12984 && ! TREE_SIDE_EFFECTS (arg0)
12985 && (0 != (tem = negate_expr (arg1)))
12986 && TREE_CODE (tem) == INTEGER_CST
12987 && !TREE_OVERFLOW (tem))
12988 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12989 build2 (GE_EXPR, type,
12990 TREE_OPERAND (arg0, 0), tem),
12991 build2 (LE_EXPR, type,
12992 TREE_OPERAND (arg0, 0), arg1));
12993
12994 /* Convert ABS_EXPR<x> >= 0 to true. */
12995 strict_overflow_p = false;
12996 if (code == GE_EXPR
12997 && (integer_zerop (arg1)
12998 || (! HONOR_NANS (element_mode (arg0))
12999 && real_zerop (arg1)))
13000 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13001 {
13002 if (strict_overflow_p)
13003 fold_overflow_warning (("assuming signed overflow does not occur "
13004 "when simplifying comparison of "
13005 "absolute value and zero"),
13006 WARN_STRICT_OVERFLOW_CONDITIONAL);
13007 return omit_one_operand_loc (loc, type,
13008 constant_boolean_node (true, type),
13009 arg0);
13010 }
13011
13012 /* Convert ABS_EXPR<x> < 0 to false. */
13013 strict_overflow_p = false;
13014 if (code == LT_EXPR
13015 && (integer_zerop (arg1) || real_zerop (arg1))
13016 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13017 {
13018 if (strict_overflow_p)
13019 fold_overflow_warning (("assuming signed overflow does not occur "
13020 "when simplifying comparison of "
13021 "absolute value and zero"),
13022 WARN_STRICT_OVERFLOW_CONDITIONAL);
13023 return omit_one_operand_loc (loc, type,
13024 constant_boolean_node (false, type),
13025 arg0);
13026 }
13027
13028 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13029 and similarly for >= into !=. */
13030 if ((code == LT_EXPR || code == GE_EXPR)
13031 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13032 && TREE_CODE (arg1) == LSHIFT_EXPR
13033 && integer_onep (TREE_OPERAND (arg1, 0)))
13034 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13035 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13036 TREE_OPERAND (arg1, 1)),
13037 build_zero_cst (TREE_TYPE (arg0)));
13038
13039 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13040 otherwise Y might be >= # of bits in X's type and thus e.g.
13041 (unsigned char) (1 << Y) for Y 15 might be 0.
13042 If the cast is widening, then 1 << Y should have unsigned type,
13043 otherwise if Y is number of bits in the signed shift type minus 1,
13044 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13045 31 might be 0xffffffff80000000. */
13046 if ((code == LT_EXPR || code == GE_EXPR)
13047 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13048 && CONVERT_EXPR_P (arg1)
13049 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13050 && (element_precision (TREE_TYPE (arg1))
13051 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13052 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13053 || (element_precision (TREE_TYPE (arg1))
13054 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13055 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13056 {
13057 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13058 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13059 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13060 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13061 build_zero_cst (TREE_TYPE (arg0)));
13062 }
13063
13064 return NULL_TREE;
13065
13066 case UNORDERED_EXPR:
13067 case ORDERED_EXPR:
13068 case UNLT_EXPR:
13069 case UNLE_EXPR:
13070 case UNGT_EXPR:
13071 case UNGE_EXPR:
13072 case UNEQ_EXPR:
13073 case LTGT_EXPR:
13074 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13075 {
13076 t1 = fold_relational_const (code, type, arg0, arg1);
13077 if (t1 != NULL_TREE)
13078 return t1;
13079 }
13080
13081 /* If the first operand is NaN, the result is constant. */
13082 if (TREE_CODE (arg0) == REAL_CST
13083 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13084 && (code != LTGT_EXPR || ! flag_trapping_math))
13085 {
13086 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13087 ? integer_zero_node
13088 : integer_one_node;
13089 return omit_one_operand_loc (loc, type, t1, arg1);
13090 }
13091
13092 /* If the second operand is NaN, the result is constant. */
13093 if (TREE_CODE (arg1) == REAL_CST
13094 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13095 && (code != LTGT_EXPR || ! flag_trapping_math))
13096 {
13097 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13098 ? integer_zero_node
13099 : integer_one_node;
13100 return omit_one_operand_loc (loc, type, t1, arg0);
13101 }
13102
13103 /* Simplify unordered comparison of something with itself. */
13104 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13105 && operand_equal_p (arg0, arg1, 0))
13106 return constant_boolean_node (1, type);
13107
13108 if (code == LTGT_EXPR
13109 && !flag_trapping_math
13110 && operand_equal_p (arg0, arg1, 0))
13111 return constant_boolean_node (0, type);
13112
13113 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13114 {
13115 tree targ0 = strip_float_extensions (arg0);
13116 tree targ1 = strip_float_extensions (arg1);
13117 tree newtype = TREE_TYPE (targ0);
13118
13119 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13120 newtype = TREE_TYPE (targ1);
13121
13122 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13123 return fold_build2_loc (loc, code, type,
13124 fold_convert_loc (loc, newtype, targ0),
13125 fold_convert_loc (loc, newtype, targ1));
13126 }
13127
13128 return NULL_TREE;
13129
13130 case COMPOUND_EXPR:
13131 /* When pedantic, a compound expression can be neither an lvalue
13132 nor an integer constant expression. */
13133 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13134 return NULL_TREE;
13135 /* Don't let (0, 0) be null pointer constant. */
13136 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13137 : fold_convert_loc (loc, type, arg1);
13138 return pedantic_non_lvalue_loc (loc, tem);
13139
13140 case COMPLEX_EXPR:
13141 if ((TREE_CODE (arg0) == REAL_CST
13142 && TREE_CODE (arg1) == REAL_CST)
13143 || (TREE_CODE (arg0) == INTEGER_CST
13144 && TREE_CODE (arg1) == INTEGER_CST))
13145 return build_complex (type, arg0, arg1);
13146 return NULL_TREE;
13147
13148 case ASSERT_EXPR:
13149 /* An ASSERT_EXPR should never be passed to fold_binary. */
13150 gcc_unreachable ();
13151
13152 case VEC_PACK_TRUNC_EXPR:
13153 case VEC_PACK_FIX_TRUNC_EXPR:
13154 {
13155 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13156 tree *elts;
13157
13158 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13159 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13160 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13161 return NULL_TREE;
13162
13163 elts = XALLOCAVEC (tree, nelts);
13164 if (!vec_cst_ctor_to_array (arg0, elts)
13165 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13166 return NULL_TREE;
13167
13168 for (i = 0; i < nelts; i++)
13169 {
13170 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13171 ? NOP_EXPR : FIX_TRUNC_EXPR,
13172 TREE_TYPE (type), elts[i]);
13173 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13174 return NULL_TREE;
13175 }
13176
13177 return build_vector (type, elts);
13178 }
13179
13180 case VEC_WIDEN_MULT_LO_EXPR:
13181 case VEC_WIDEN_MULT_HI_EXPR:
13182 case VEC_WIDEN_MULT_EVEN_EXPR:
13183 case VEC_WIDEN_MULT_ODD_EXPR:
13184 {
13185 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13186 unsigned int out, ofs, scale;
13187 tree *elts;
13188
13189 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13190 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13191 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13192 return NULL_TREE;
13193
13194 elts = XALLOCAVEC (tree, nelts * 4);
13195 if (!vec_cst_ctor_to_array (arg0, elts)
13196 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13197 return NULL_TREE;
13198
13199 if (code == VEC_WIDEN_MULT_LO_EXPR)
13200 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13201 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13202 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13203 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13204 scale = 1, ofs = 0;
13205 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13206 scale = 1, ofs = 1;
13207
13208 for (out = 0; out < nelts; out++)
13209 {
13210 unsigned int in1 = (out << scale) + ofs;
13211 unsigned int in2 = in1 + nelts * 2;
13212 tree t1, t2;
13213
13214 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13215 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13216
13217 if (t1 == NULL_TREE || t2 == NULL_TREE)
13218 return NULL_TREE;
13219 elts[out] = const_binop (MULT_EXPR, t1, t2);
13220 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13221 return NULL_TREE;
13222 }
13223
13224 return build_vector (type, elts);
13225 }
13226
13227 default:
13228 return NULL_TREE;
13229 } /* switch (code) */
13230 }
13231
13232 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13233 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13234 of GOTO_EXPR. */
13235
13236 static tree
13237 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13238 {
13239 switch (TREE_CODE (*tp))
13240 {
13241 case LABEL_EXPR:
13242 return *tp;
13243
13244 case GOTO_EXPR:
13245 *walk_subtrees = 0;
13246
13247 /* ... fall through ... */
13248
13249 default:
13250 return NULL_TREE;
13251 }
13252 }
13253
13254 /* Return whether the sub-tree ST contains a label which is accessible from
13255 outside the sub-tree. */
13256
13257 static bool
13258 contains_label_p (tree st)
13259 {
13260 return
13261 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13262 }
13263
13264 /* Fold a ternary expression of code CODE and type TYPE with operands
13265 OP0, OP1, and OP2. Return the folded expression if folding is
13266 successful. Otherwise, return NULL_TREE. */
13267
13268 tree
13269 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13270 tree op0, tree op1, tree op2)
13271 {
13272 tree tem;
13273 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13274 enum tree_code_class kind = TREE_CODE_CLASS (code);
13275
13276 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13277 && TREE_CODE_LENGTH (code) == 3);
13278
13279 /* If this is a commutative operation, and OP0 is a constant, move it
13280 to OP1 to reduce the number of tests below. */
13281 if (commutative_ternary_tree_code (code)
13282 && tree_swap_operands_p (op0, op1, true))
13283 return fold_build3_loc (loc, code, type, op1, op0, op2);
13284
13285 tem = generic_simplify (loc, code, type, op0, op1, op2);
13286 if (tem)
13287 return tem;
13288
13289 /* Strip any conversions that don't change the mode. This is safe
13290 for every expression, except for a comparison expression because
13291 its signedness is derived from its operands. So, in the latter
13292 case, only strip conversions that don't change the signedness.
13293
13294 Note that this is done as an internal manipulation within the
13295 constant folder, in order to find the simplest representation of
13296 the arguments so that their form can be studied. In any cases,
13297 the appropriate type conversions should be put back in the tree
13298 that will get out of the constant folder. */
13299 if (op0)
13300 {
13301 arg0 = op0;
13302 STRIP_NOPS (arg0);
13303 }
13304
13305 if (op1)
13306 {
13307 arg1 = op1;
13308 STRIP_NOPS (arg1);
13309 }
13310
13311 if (op2)
13312 {
13313 arg2 = op2;
13314 STRIP_NOPS (arg2);
13315 }
13316
13317 switch (code)
13318 {
13319 case COMPONENT_REF:
13320 if (TREE_CODE (arg0) == CONSTRUCTOR
13321 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13322 {
13323 unsigned HOST_WIDE_INT idx;
13324 tree field, value;
13325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13326 if (field == arg1)
13327 return value;
13328 }
13329 return NULL_TREE;
13330
13331 case COND_EXPR:
13332 case VEC_COND_EXPR:
13333 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13334 so all simple results must be passed through pedantic_non_lvalue. */
13335 if (TREE_CODE (arg0) == INTEGER_CST)
13336 {
13337 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13338 tem = integer_zerop (arg0) ? op2 : op1;
13339 /* Only optimize constant conditions when the selected branch
13340 has the same type as the COND_EXPR. This avoids optimizing
13341 away "c ? x : throw", where the throw has a void type.
13342 Avoid throwing away that operand which contains label. */
13343 if ((!TREE_SIDE_EFFECTS (unused_op)
13344 || !contains_label_p (unused_op))
13345 && (! VOID_TYPE_P (TREE_TYPE (tem))
13346 || VOID_TYPE_P (type)))
13347 return pedantic_non_lvalue_loc (loc, tem);
13348 return NULL_TREE;
13349 }
13350 else if (TREE_CODE (arg0) == VECTOR_CST)
13351 {
13352 if ((TREE_CODE (arg1) == VECTOR_CST
13353 || TREE_CODE (arg1) == CONSTRUCTOR)
13354 && (TREE_CODE (arg2) == VECTOR_CST
13355 || TREE_CODE (arg2) == CONSTRUCTOR))
13356 {
13357 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13358 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13359 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13360 for (i = 0; i < nelts; i++)
13361 {
13362 tree val = VECTOR_CST_ELT (arg0, i);
13363 if (integer_all_onesp (val))
13364 sel[i] = i;
13365 else if (integer_zerop (val))
13366 sel[i] = nelts + i;
13367 else /* Currently unreachable. */
13368 return NULL_TREE;
13369 }
13370 tree t = fold_vec_perm (type, arg1, arg2, sel);
13371 if (t != NULL_TREE)
13372 return t;
13373 }
13374 }
13375
13376 /* If we have A op B ? A : C, we may be able to convert this to a
13377 simpler expression, depending on the operation and the values
13378 of B and C. Signed zeros prevent all of these transformations,
13379 for reasons given above each one.
13380
13381 Also try swapping the arguments and inverting the conditional. */
13382 if (COMPARISON_CLASS_P (arg0)
13383 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13384 arg1, TREE_OPERAND (arg0, 1))
13385 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13386 {
13387 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13388 if (tem)
13389 return tem;
13390 }
13391
13392 if (COMPARISON_CLASS_P (arg0)
13393 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13394 op2,
13395 TREE_OPERAND (arg0, 1))
13396 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13397 {
13398 location_t loc0 = expr_location_or (arg0, loc);
13399 tem = fold_invert_truthvalue (loc0, arg0);
13400 if (tem && COMPARISON_CLASS_P (tem))
13401 {
13402 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13403 if (tem)
13404 return tem;
13405 }
13406 }
13407
13408 /* If the second operand is simpler than the third, swap them
13409 since that produces better jump optimization results. */
13410 if (truth_value_p (TREE_CODE (arg0))
13411 && tree_swap_operands_p (op1, op2, false))
13412 {
13413 location_t loc0 = expr_location_or (arg0, loc);
13414 /* See if this can be inverted. If it can't, possibly because
13415 it was a floating-point inequality comparison, don't do
13416 anything. */
13417 tem = fold_invert_truthvalue (loc0, arg0);
13418 if (tem)
13419 return fold_build3_loc (loc, code, type, tem, op2, op1);
13420 }
13421
13422 /* Convert A ? 1 : 0 to simply A. */
13423 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13424 : (integer_onep (op1)
13425 && !VECTOR_TYPE_P (type)))
13426 && integer_zerop (op2)
13427 /* If we try to convert OP0 to our type, the
13428 call to fold will try to move the conversion inside
13429 a COND, which will recurse. In that case, the COND_EXPR
13430 is probably the best choice, so leave it alone. */
13431 && type == TREE_TYPE (arg0))
13432 return pedantic_non_lvalue_loc (loc, arg0);
13433
13434 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13435 over COND_EXPR in cases such as floating point comparisons. */
13436 if (integer_zerop (op1)
13437 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13438 : (integer_onep (op2)
13439 && !VECTOR_TYPE_P (type)))
13440 && truth_value_p (TREE_CODE (arg0)))
13441 return pedantic_non_lvalue_loc (loc,
13442 fold_convert_loc (loc, type,
13443 invert_truthvalue_loc (loc,
13444 arg0)));
13445
13446 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13447 if (TREE_CODE (arg0) == LT_EXPR
13448 && integer_zerop (TREE_OPERAND (arg0, 1))
13449 && integer_zerop (op2)
13450 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13451 {
13452 /* sign_bit_p looks through both zero and sign extensions,
13453 but for this optimization only sign extensions are
13454 usable. */
13455 tree tem2 = TREE_OPERAND (arg0, 0);
13456 while (tem != tem2)
13457 {
13458 if (TREE_CODE (tem2) != NOP_EXPR
13459 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13460 {
13461 tem = NULL_TREE;
13462 break;
13463 }
13464 tem2 = TREE_OPERAND (tem2, 0);
13465 }
13466 /* sign_bit_p only checks ARG1 bits within A's precision.
13467 If <sign bit of A> has wider type than A, bits outside
13468 of A's precision in <sign bit of A> need to be checked.
13469 If they are all 0, this optimization needs to be done
13470 in unsigned A's type, if they are all 1 in signed A's type,
13471 otherwise this can't be done. */
13472 if (tem
13473 && TYPE_PRECISION (TREE_TYPE (tem))
13474 < TYPE_PRECISION (TREE_TYPE (arg1))
13475 && TYPE_PRECISION (TREE_TYPE (tem))
13476 < TYPE_PRECISION (type))
13477 {
13478 int inner_width, outer_width;
13479 tree tem_type;
13480
13481 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13482 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13483 if (outer_width > TYPE_PRECISION (type))
13484 outer_width = TYPE_PRECISION (type);
13485
13486 wide_int mask = wi::shifted_mask
13487 (inner_width, outer_width - inner_width, false,
13488 TYPE_PRECISION (TREE_TYPE (arg1)));
13489
13490 wide_int common = mask & arg1;
13491 if (common == mask)
13492 {
13493 tem_type = signed_type_for (TREE_TYPE (tem));
13494 tem = fold_convert_loc (loc, tem_type, tem);
13495 }
13496 else if (common == 0)
13497 {
13498 tem_type = unsigned_type_for (TREE_TYPE (tem));
13499 tem = fold_convert_loc (loc, tem_type, tem);
13500 }
13501 else
13502 tem = NULL;
13503 }
13504
13505 if (tem)
13506 return
13507 fold_convert_loc (loc, type,
13508 fold_build2_loc (loc, BIT_AND_EXPR,
13509 TREE_TYPE (tem), tem,
13510 fold_convert_loc (loc,
13511 TREE_TYPE (tem),
13512 arg1)));
13513 }
13514
13515 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13516 already handled above. */
13517 if (TREE_CODE (arg0) == BIT_AND_EXPR
13518 && integer_onep (TREE_OPERAND (arg0, 1))
13519 && integer_zerop (op2)
13520 && integer_pow2p (arg1))
13521 {
13522 tree tem = TREE_OPERAND (arg0, 0);
13523 STRIP_NOPS (tem);
13524 if (TREE_CODE (tem) == RSHIFT_EXPR
13525 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13526 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13527 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13528 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13529 TREE_OPERAND (tem, 0), arg1);
13530 }
13531
13532 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13533 is probably obsolete because the first operand should be a
13534 truth value (that's why we have the two cases above), but let's
13535 leave it in until we can confirm this for all front-ends. */
13536 if (integer_zerop (op2)
13537 && TREE_CODE (arg0) == NE_EXPR
13538 && integer_zerop (TREE_OPERAND (arg0, 1))
13539 && integer_pow2p (arg1)
13540 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13541 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13542 arg1, OEP_ONLY_CONST))
13543 return pedantic_non_lvalue_loc (loc,
13544 fold_convert_loc (loc, type,
13545 TREE_OPERAND (arg0, 0)));
13546
13547 /* Disable the transformations below for vectors, since
13548 fold_binary_op_with_conditional_arg may undo them immediately,
13549 yielding an infinite loop. */
13550 if (code == VEC_COND_EXPR)
13551 return NULL_TREE;
13552
13553 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13554 if (integer_zerop (op2)
13555 && truth_value_p (TREE_CODE (arg0))
13556 && truth_value_p (TREE_CODE (arg1))
13557 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13558 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13559 : TRUTH_ANDIF_EXPR,
13560 type, fold_convert_loc (loc, type, arg0), arg1);
13561
13562 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13563 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13564 && truth_value_p (TREE_CODE (arg0))
13565 && truth_value_p (TREE_CODE (arg1))
13566 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13567 {
13568 location_t loc0 = expr_location_or (arg0, loc);
13569 /* Only perform transformation if ARG0 is easily inverted. */
13570 tem = fold_invert_truthvalue (loc0, arg0);
13571 if (tem)
13572 return fold_build2_loc (loc, code == VEC_COND_EXPR
13573 ? BIT_IOR_EXPR
13574 : TRUTH_ORIF_EXPR,
13575 type, fold_convert_loc (loc, type, tem),
13576 arg1);
13577 }
13578
13579 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13580 if (integer_zerop (arg1)
13581 && truth_value_p (TREE_CODE (arg0))
13582 && truth_value_p (TREE_CODE (op2))
13583 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13584 {
13585 location_t loc0 = expr_location_or (arg0, loc);
13586 /* Only perform transformation if ARG0 is easily inverted. */
13587 tem = fold_invert_truthvalue (loc0, arg0);
13588 if (tem)
13589 return fold_build2_loc (loc, code == VEC_COND_EXPR
13590 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13591 type, fold_convert_loc (loc, type, tem),
13592 op2);
13593 }
13594
13595 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13596 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13597 && truth_value_p (TREE_CODE (arg0))
13598 && truth_value_p (TREE_CODE (op2))
13599 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13600 return fold_build2_loc (loc, code == VEC_COND_EXPR
13601 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13602 type, fold_convert_loc (loc, type, arg0), op2);
13603
13604 return NULL_TREE;
13605
13606 case CALL_EXPR:
13607 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13608 of fold_ternary on them. */
13609 gcc_unreachable ();
13610
13611 case BIT_FIELD_REF:
13612 if ((TREE_CODE (arg0) == VECTOR_CST
13613 || (TREE_CODE (arg0) == CONSTRUCTOR
13614 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13615 && (type == TREE_TYPE (TREE_TYPE (arg0))
13616 || (TREE_CODE (type) == VECTOR_TYPE
13617 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13618 {
13619 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13620 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13621 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13622 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13623
13624 if (n != 0
13625 && (idx % width) == 0
13626 && (n % width) == 0
13627 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13628 {
13629 idx = idx / width;
13630 n = n / width;
13631
13632 if (TREE_CODE (arg0) == VECTOR_CST)
13633 {
13634 if (n == 1)
13635 return VECTOR_CST_ELT (arg0, idx);
13636
13637 tree *vals = XALLOCAVEC (tree, n);
13638 for (unsigned i = 0; i < n; ++i)
13639 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13640 return build_vector (type, vals);
13641 }
13642
13643 /* Constructor elements can be subvectors. */
13644 unsigned HOST_WIDE_INT k = 1;
13645 if (CONSTRUCTOR_NELTS (arg0) != 0)
13646 {
13647 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13648 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13649 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13650 }
13651
13652 /* We keep an exact subset of the constructor elements. */
13653 if ((idx % k) == 0 && (n % k) == 0)
13654 {
13655 if (CONSTRUCTOR_NELTS (arg0) == 0)
13656 return build_constructor (type, NULL);
13657 idx /= k;
13658 n /= k;
13659 if (n == 1)
13660 {
13661 if (idx < CONSTRUCTOR_NELTS (arg0))
13662 return CONSTRUCTOR_ELT (arg0, idx)->value;
13663 return build_zero_cst (type);
13664 }
13665
13666 vec<constructor_elt, va_gc> *vals;
13667 vec_alloc (vals, n);
13668 for (unsigned i = 0;
13669 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13670 ++i)
13671 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13672 CONSTRUCTOR_ELT
13673 (arg0, idx + i)->value);
13674 return build_constructor (type, vals);
13675 }
13676 /* The bitfield references a single constructor element. */
13677 else if (idx + n <= (idx / k + 1) * k)
13678 {
13679 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13680 return build_zero_cst (type);
13681 else if (n == k)
13682 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13683 else
13684 return fold_build3_loc (loc, code, type,
13685 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13686 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13687 }
13688 }
13689 }
13690
13691 /* A bit-field-ref that referenced the full argument can be stripped. */
13692 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13693 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13694 && integer_zerop (op2))
13695 return fold_convert_loc (loc, type, arg0);
13696
13697 /* On constants we can use native encode/interpret to constant
13698 fold (nearly) all BIT_FIELD_REFs. */
13699 if (CONSTANT_CLASS_P (arg0)
13700 && can_native_interpret_type_p (type)
13701 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13702 /* This limitation should not be necessary, we just need to
13703 round this up to mode size. */
13704 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13705 /* Need bit-shifting of the buffer to relax the following. */
13706 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13707 {
13708 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13709 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13710 unsigned HOST_WIDE_INT clen;
13711 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13712 /* ??? We cannot tell native_encode_expr to start at
13713 some random byte only. So limit us to a reasonable amount
13714 of work. */
13715 if (clen <= 4096)
13716 {
13717 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13718 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13719 if (len > 0
13720 && len * BITS_PER_UNIT >= bitpos + bitsize)
13721 {
13722 tree v = native_interpret_expr (type,
13723 b + bitpos / BITS_PER_UNIT,
13724 bitsize / BITS_PER_UNIT);
13725 if (v)
13726 return v;
13727 }
13728 }
13729 }
13730
13731 return NULL_TREE;
13732
13733 case FMA_EXPR:
13734 /* For integers we can decompose the FMA if possible. */
13735 if (TREE_CODE (arg0) == INTEGER_CST
13736 && TREE_CODE (arg1) == INTEGER_CST)
13737 return fold_build2_loc (loc, PLUS_EXPR, type,
13738 const_binop (MULT_EXPR, arg0, arg1), arg2);
13739 if (integer_zerop (arg2))
13740 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13741
13742 return fold_fma (loc, type, arg0, arg1, arg2);
13743
13744 case VEC_PERM_EXPR:
13745 if (TREE_CODE (arg2) == VECTOR_CST)
13746 {
13747 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13748 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13749 unsigned char *sel2 = sel + nelts;
13750 bool need_mask_canon = false;
13751 bool need_mask_canon2 = false;
13752 bool all_in_vec0 = true;
13753 bool all_in_vec1 = true;
13754 bool maybe_identity = true;
13755 bool single_arg = (op0 == op1);
13756 bool changed = false;
13757
13758 mask2 = 2 * nelts - 1;
13759 mask = single_arg ? (nelts - 1) : mask2;
13760 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13761 for (i = 0; i < nelts; i++)
13762 {
13763 tree val = VECTOR_CST_ELT (arg2, i);
13764 if (TREE_CODE (val) != INTEGER_CST)
13765 return NULL_TREE;
13766
13767 /* Make sure that the perm value is in an acceptable
13768 range. */
13769 wide_int t = val;
13770 need_mask_canon |= wi::gtu_p (t, mask);
13771 need_mask_canon2 |= wi::gtu_p (t, mask2);
13772 sel[i] = t.to_uhwi () & mask;
13773 sel2[i] = t.to_uhwi () & mask2;
13774
13775 if (sel[i] < nelts)
13776 all_in_vec1 = false;
13777 else
13778 all_in_vec0 = false;
13779
13780 if ((sel[i] & (nelts-1)) != i)
13781 maybe_identity = false;
13782 }
13783
13784 if (maybe_identity)
13785 {
13786 if (all_in_vec0)
13787 return op0;
13788 if (all_in_vec1)
13789 return op1;
13790 }
13791
13792 if (all_in_vec0)
13793 op1 = op0;
13794 else if (all_in_vec1)
13795 {
13796 op0 = op1;
13797 for (i = 0; i < nelts; i++)
13798 sel[i] -= nelts;
13799 need_mask_canon = true;
13800 }
13801
13802 if ((TREE_CODE (op0) == VECTOR_CST
13803 || TREE_CODE (op0) == CONSTRUCTOR)
13804 && (TREE_CODE (op1) == VECTOR_CST
13805 || TREE_CODE (op1) == CONSTRUCTOR))
13806 {
13807 tree t = fold_vec_perm (type, op0, op1, sel);
13808 if (t != NULL_TREE)
13809 return t;
13810 }
13811
13812 if (op0 == op1 && !single_arg)
13813 changed = true;
13814
13815 /* Some targets are deficient and fail to expand a single
13816 argument permutation while still allowing an equivalent
13817 2-argument version. */
13818 if (need_mask_canon && arg2 == op2
13819 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13820 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13821 {
13822 need_mask_canon = need_mask_canon2;
13823 sel = sel2;
13824 }
13825
13826 if (need_mask_canon && arg2 == op2)
13827 {
13828 tree *tsel = XALLOCAVEC (tree, nelts);
13829 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13830 for (i = 0; i < nelts; i++)
13831 tsel[i] = build_int_cst (eltype, sel[i]);
13832 op2 = build_vector (TREE_TYPE (arg2), tsel);
13833 changed = true;
13834 }
13835
13836 if (changed)
13837 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13838 }
13839 return NULL_TREE;
13840
13841 default:
13842 return NULL_TREE;
13843 } /* switch (code) */
13844 }
13845
13846 /* Perform constant folding and related simplification of EXPR.
13847 The related simplifications include x*1 => x, x*0 => 0, etc.,
13848 and application of the associative law.
13849 NOP_EXPR conversions may be removed freely (as long as we
13850 are careful not to change the type of the overall expression).
13851 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13852 but we can constant-fold them if they have constant operands. */
13853
13854 #ifdef ENABLE_FOLD_CHECKING
13855 # define fold(x) fold_1 (x)
13856 static tree fold_1 (tree);
13857 static
13858 #endif
13859 tree
13860 fold (tree expr)
13861 {
13862 const tree t = expr;
13863 enum tree_code code = TREE_CODE (t);
13864 enum tree_code_class kind = TREE_CODE_CLASS (code);
13865 tree tem;
13866 location_t loc = EXPR_LOCATION (expr);
13867
13868 /* Return right away if a constant. */
13869 if (kind == tcc_constant)
13870 return t;
13871
13872 /* CALL_EXPR-like objects with variable numbers of operands are
13873 treated specially. */
13874 if (kind == tcc_vl_exp)
13875 {
13876 if (code == CALL_EXPR)
13877 {
13878 tem = fold_call_expr (loc, expr, false);
13879 return tem ? tem : expr;
13880 }
13881 return expr;
13882 }
13883
13884 if (IS_EXPR_CODE_CLASS (kind))
13885 {
13886 tree type = TREE_TYPE (t);
13887 tree op0, op1, op2;
13888
13889 switch (TREE_CODE_LENGTH (code))
13890 {
13891 case 1:
13892 op0 = TREE_OPERAND (t, 0);
13893 tem = fold_unary_loc (loc, code, type, op0);
13894 return tem ? tem : expr;
13895 case 2:
13896 op0 = TREE_OPERAND (t, 0);
13897 op1 = TREE_OPERAND (t, 1);
13898 tem = fold_binary_loc (loc, code, type, op0, op1);
13899 return tem ? tem : expr;
13900 case 3:
13901 op0 = TREE_OPERAND (t, 0);
13902 op1 = TREE_OPERAND (t, 1);
13903 op2 = TREE_OPERAND (t, 2);
13904 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13905 return tem ? tem : expr;
13906 default:
13907 break;
13908 }
13909 }
13910
13911 switch (code)
13912 {
13913 case ARRAY_REF:
13914 {
13915 tree op0 = TREE_OPERAND (t, 0);
13916 tree op1 = TREE_OPERAND (t, 1);
13917
13918 if (TREE_CODE (op1) == INTEGER_CST
13919 && TREE_CODE (op0) == CONSTRUCTOR
13920 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13921 {
13922 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13923 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13924 unsigned HOST_WIDE_INT begin = 0;
13925
13926 /* Find a matching index by means of a binary search. */
13927 while (begin != end)
13928 {
13929 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13930 tree index = (*elts)[middle].index;
13931
13932 if (TREE_CODE (index) == INTEGER_CST
13933 && tree_int_cst_lt (index, op1))
13934 begin = middle + 1;
13935 else if (TREE_CODE (index) == INTEGER_CST
13936 && tree_int_cst_lt (op1, index))
13937 end = middle;
13938 else if (TREE_CODE (index) == RANGE_EXPR
13939 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13940 begin = middle + 1;
13941 else if (TREE_CODE (index) == RANGE_EXPR
13942 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13943 end = middle;
13944 else
13945 return (*elts)[middle].value;
13946 }
13947 }
13948
13949 return t;
13950 }
13951
13952 /* Return a VECTOR_CST if possible. */
13953 case CONSTRUCTOR:
13954 {
13955 tree type = TREE_TYPE (t);
13956 if (TREE_CODE (type) != VECTOR_TYPE)
13957 return t;
13958
13959 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13960 unsigned HOST_WIDE_INT idx, pos = 0;
13961 tree value;
13962
13963 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13964 {
13965 if (!CONSTANT_CLASS_P (value))
13966 return t;
13967 if (TREE_CODE (value) == VECTOR_CST)
13968 {
13969 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13970 vec[pos++] = VECTOR_CST_ELT (value, i);
13971 }
13972 else
13973 vec[pos++] = value;
13974 }
13975 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13976 vec[pos] = build_zero_cst (TREE_TYPE (type));
13977
13978 return build_vector (type, vec);
13979 }
13980
13981 case CONST_DECL:
13982 return fold (DECL_INITIAL (t));
13983
13984 default:
13985 return t;
13986 } /* switch (code) */
13987 }
13988
13989 #ifdef ENABLE_FOLD_CHECKING
13990 #undef fold
13991
13992 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13993 hash_table<pointer_hash<const tree_node> > *);
13994 static void fold_check_failed (const_tree, const_tree);
13995 void print_fold_checksum (const_tree);
13996
13997 /* When --enable-checking=fold, compute a digest of expr before
13998 and after actual fold call to see if fold did not accidentally
13999 change original expr. */
14000
14001 tree
14002 fold (tree expr)
14003 {
14004 tree ret;
14005 struct md5_ctx ctx;
14006 unsigned char checksum_before[16], checksum_after[16];
14007 hash_table<pointer_hash<const tree_node> > ht (32);
14008
14009 md5_init_ctx (&ctx);
14010 fold_checksum_tree (expr, &ctx, &ht);
14011 md5_finish_ctx (&ctx, checksum_before);
14012 ht.empty ();
14013
14014 ret = fold_1 (expr);
14015
14016 md5_init_ctx (&ctx);
14017 fold_checksum_tree (expr, &ctx, &ht);
14018 md5_finish_ctx (&ctx, checksum_after);
14019
14020 if (memcmp (checksum_before, checksum_after, 16))
14021 fold_check_failed (expr, ret);
14022
14023 return ret;
14024 }
14025
14026 void
14027 print_fold_checksum (const_tree expr)
14028 {
14029 struct md5_ctx ctx;
14030 unsigned char checksum[16], cnt;
14031 hash_table<pointer_hash<const tree_node> > ht (32);
14032
14033 md5_init_ctx (&ctx);
14034 fold_checksum_tree (expr, &ctx, &ht);
14035 md5_finish_ctx (&ctx, checksum);
14036 for (cnt = 0; cnt < 16; ++cnt)
14037 fprintf (stderr, "%02x", checksum[cnt]);
14038 putc ('\n', stderr);
14039 }
14040
14041 static void
14042 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14043 {
14044 internal_error ("fold check: original tree changed by fold");
14045 }
14046
14047 static void
14048 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14049 hash_table<pointer_hash <const tree_node> > *ht)
14050 {
14051 const tree_node **slot;
14052 enum tree_code code;
14053 union tree_node buf;
14054 int i, len;
14055
14056 recursive_label:
14057 if (expr == NULL)
14058 return;
14059 slot = ht->find_slot (expr, INSERT);
14060 if (*slot != NULL)
14061 return;
14062 *slot = expr;
14063 code = TREE_CODE (expr);
14064 if (TREE_CODE_CLASS (code) == tcc_declaration
14065 && DECL_ASSEMBLER_NAME_SET_P (expr))
14066 {
14067 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14068 memcpy ((char *) &buf, expr, tree_size (expr));
14069 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14070 expr = (tree) &buf;
14071 }
14072 else if (TREE_CODE_CLASS (code) == tcc_type
14073 && (TYPE_POINTER_TO (expr)
14074 || TYPE_REFERENCE_TO (expr)
14075 || TYPE_CACHED_VALUES_P (expr)
14076 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14077 || TYPE_NEXT_VARIANT (expr)))
14078 {
14079 /* Allow these fields to be modified. */
14080 tree tmp;
14081 memcpy ((char *) &buf, expr, tree_size (expr));
14082 expr = tmp = (tree) &buf;
14083 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14084 TYPE_POINTER_TO (tmp) = NULL;
14085 TYPE_REFERENCE_TO (tmp) = NULL;
14086 TYPE_NEXT_VARIANT (tmp) = NULL;
14087 if (TYPE_CACHED_VALUES_P (tmp))
14088 {
14089 TYPE_CACHED_VALUES_P (tmp) = 0;
14090 TYPE_CACHED_VALUES (tmp) = NULL;
14091 }
14092 }
14093 md5_process_bytes (expr, tree_size (expr), ctx);
14094 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14095 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14096 if (TREE_CODE_CLASS (code) != tcc_type
14097 && TREE_CODE_CLASS (code) != tcc_declaration
14098 && code != TREE_LIST
14099 && code != SSA_NAME
14100 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14101 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14102 switch (TREE_CODE_CLASS (code))
14103 {
14104 case tcc_constant:
14105 switch (code)
14106 {
14107 case STRING_CST:
14108 md5_process_bytes (TREE_STRING_POINTER (expr),
14109 TREE_STRING_LENGTH (expr), ctx);
14110 break;
14111 case COMPLEX_CST:
14112 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14113 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14114 break;
14115 case VECTOR_CST:
14116 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14117 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14118 break;
14119 default:
14120 break;
14121 }
14122 break;
14123 case tcc_exceptional:
14124 switch (code)
14125 {
14126 case TREE_LIST:
14127 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14128 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14129 expr = TREE_CHAIN (expr);
14130 goto recursive_label;
14131 break;
14132 case TREE_VEC:
14133 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14134 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14135 break;
14136 default:
14137 break;
14138 }
14139 break;
14140 case tcc_expression:
14141 case tcc_reference:
14142 case tcc_comparison:
14143 case tcc_unary:
14144 case tcc_binary:
14145 case tcc_statement:
14146 case tcc_vl_exp:
14147 len = TREE_OPERAND_LENGTH (expr);
14148 for (i = 0; i < len; ++i)
14149 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14150 break;
14151 case tcc_declaration:
14152 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14153 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14154 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14155 {
14156 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14157 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14158 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14159 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14160 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14161 }
14162
14163 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14164 {
14165 if (TREE_CODE (expr) == FUNCTION_DECL)
14166 {
14167 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14168 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14169 }
14170 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14171 }
14172 break;
14173 case tcc_type:
14174 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14175 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14176 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14177 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14178 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14179 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14180 if (INTEGRAL_TYPE_P (expr)
14181 || SCALAR_FLOAT_TYPE_P (expr))
14182 {
14183 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14184 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14185 }
14186 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14187 if (TREE_CODE (expr) == RECORD_TYPE
14188 || TREE_CODE (expr) == UNION_TYPE
14189 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14190 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14191 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14192 break;
14193 default:
14194 break;
14195 }
14196 }
14197
14198 /* Helper function for outputting the checksum of a tree T. When
14199 debugging with gdb, you can "define mynext" to be "next" followed
14200 by "call debug_fold_checksum (op0)", then just trace down till the
14201 outputs differ. */
14202
14203 DEBUG_FUNCTION void
14204 debug_fold_checksum (const_tree t)
14205 {
14206 int i;
14207 unsigned char checksum[16];
14208 struct md5_ctx ctx;
14209 hash_table<pointer_hash<const tree_node> > ht (32);
14210
14211 md5_init_ctx (&ctx);
14212 fold_checksum_tree (t, &ctx, &ht);
14213 md5_finish_ctx (&ctx, checksum);
14214 ht.empty ();
14215
14216 for (i = 0; i < 16; i++)
14217 fprintf (stderr, "%d ", checksum[i]);
14218
14219 fprintf (stderr, "\n");
14220 }
14221
14222 #endif
14223
14224 /* Fold a unary tree expression with code CODE of type TYPE with an
14225 operand OP0. LOC is the location of the resulting expression.
14226 Return a folded expression if successful. Otherwise, return a tree
14227 expression with code CODE of type TYPE with an operand OP0. */
14228
14229 tree
14230 fold_build1_stat_loc (location_t loc,
14231 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14232 {
14233 tree tem;
14234 #ifdef ENABLE_FOLD_CHECKING
14235 unsigned char checksum_before[16], checksum_after[16];
14236 struct md5_ctx ctx;
14237 hash_table<pointer_hash<const tree_node> > ht (32);
14238
14239 md5_init_ctx (&ctx);
14240 fold_checksum_tree (op0, &ctx, &ht);
14241 md5_finish_ctx (&ctx, checksum_before);
14242 ht.empty ();
14243 #endif
14244
14245 tem = fold_unary_loc (loc, code, type, op0);
14246 if (!tem)
14247 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14248
14249 #ifdef ENABLE_FOLD_CHECKING
14250 md5_init_ctx (&ctx);
14251 fold_checksum_tree (op0, &ctx, &ht);
14252 md5_finish_ctx (&ctx, checksum_after);
14253
14254 if (memcmp (checksum_before, checksum_after, 16))
14255 fold_check_failed (op0, tem);
14256 #endif
14257 return tem;
14258 }
14259
14260 /* Fold a binary tree expression with code CODE of type TYPE with
14261 operands OP0 and OP1. LOC is the location of the resulting
14262 expression. Return a folded expression if successful. Otherwise,
14263 return a tree expression with code CODE of type TYPE with operands
14264 OP0 and OP1. */
14265
14266 tree
14267 fold_build2_stat_loc (location_t loc,
14268 enum tree_code code, tree type, tree op0, tree op1
14269 MEM_STAT_DECL)
14270 {
14271 tree tem;
14272 #ifdef ENABLE_FOLD_CHECKING
14273 unsigned char checksum_before_op0[16],
14274 checksum_before_op1[16],
14275 checksum_after_op0[16],
14276 checksum_after_op1[16];
14277 struct md5_ctx ctx;
14278 hash_table<pointer_hash<const tree_node> > ht (32);
14279
14280 md5_init_ctx (&ctx);
14281 fold_checksum_tree (op0, &ctx, &ht);
14282 md5_finish_ctx (&ctx, checksum_before_op0);
14283 ht.empty ();
14284
14285 md5_init_ctx (&ctx);
14286 fold_checksum_tree (op1, &ctx, &ht);
14287 md5_finish_ctx (&ctx, checksum_before_op1);
14288 ht.empty ();
14289 #endif
14290
14291 tem = fold_binary_loc (loc, code, type, op0, op1);
14292 if (!tem)
14293 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14294
14295 #ifdef ENABLE_FOLD_CHECKING
14296 md5_init_ctx (&ctx);
14297 fold_checksum_tree (op0, &ctx, &ht);
14298 md5_finish_ctx (&ctx, checksum_after_op0);
14299 ht.empty ();
14300
14301 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14302 fold_check_failed (op0, tem);
14303
14304 md5_init_ctx (&ctx);
14305 fold_checksum_tree (op1, &ctx, &ht);
14306 md5_finish_ctx (&ctx, checksum_after_op1);
14307
14308 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14309 fold_check_failed (op1, tem);
14310 #endif
14311 return tem;
14312 }
14313
14314 /* Fold a ternary tree expression with code CODE of type TYPE with
14315 operands OP0, OP1, and OP2. Return a folded expression if
14316 successful. Otherwise, return a tree expression with code CODE of
14317 type TYPE with operands OP0, OP1, and OP2. */
14318
14319 tree
14320 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14321 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14322 {
14323 tree tem;
14324 #ifdef ENABLE_FOLD_CHECKING
14325 unsigned char checksum_before_op0[16],
14326 checksum_before_op1[16],
14327 checksum_before_op2[16],
14328 checksum_after_op0[16],
14329 checksum_after_op1[16],
14330 checksum_after_op2[16];
14331 struct md5_ctx ctx;
14332 hash_table<pointer_hash<const tree_node> > ht (32);
14333
14334 md5_init_ctx (&ctx);
14335 fold_checksum_tree (op0, &ctx, &ht);
14336 md5_finish_ctx (&ctx, checksum_before_op0);
14337 ht.empty ();
14338
14339 md5_init_ctx (&ctx);
14340 fold_checksum_tree (op1, &ctx, &ht);
14341 md5_finish_ctx (&ctx, checksum_before_op1);
14342 ht.empty ();
14343
14344 md5_init_ctx (&ctx);
14345 fold_checksum_tree (op2, &ctx, &ht);
14346 md5_finish_ctx (&ctx, checksum_before_op2);
14347 ht.empty ();
14348 #endif
14349
14350 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14351 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14352 if (!tem)
14353 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14354
14355 #ifdef ENABLE_FOLD_CHECKING
14356 md5_init_ctx (&ctx);
14357 fold_checksum_tree (op0, &ctx, &ht);
14358 md5_finish_ctx (&ctx, checksum_after_op0);
14359 ht.empty ();
14360
14361 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14362 fold_check_failed (op0, tem);
14363
14364 md5_init_ctx (&ctx);
14365 fold_checksum_tree (op1, &ctx, &ht);
14366 md5_finish_ctx (&ctx, checksum_after_op1);
14367 ht.empty ();
14368
14369 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14370 fold_check_failed (op1, tem);
14371
14372 md5_init_ctx (&ctx);
14373 fold_checksum_tree (op2, &ctx, &ht);
14374 md5_finish_ctx (&ctx, checksum_after_op2);
14375
14376 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14377 fold_check_failed (op2, tem);
14378 #endif
14379 return tem;
14380 }
14381
14382 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14383 arguments in ARGARRAY, and a null static chain.
14384 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14385 of type TYPE from the given operands as constructed by build_call_array. */
14386
14387 tree
14388 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14389 int nargs, tree *argarray)
14390 {
14391 tree tem;
14392 #ifdef ENABLE_FOLD_CHECKING
14393 unsigned char checksum_before_fn[16],
14394 checksum_before_arglist[16],
14395 checksum_after_fn[16],
14396 checksum_after_arglist[16];
14397 struct md5_ctx ctx;
14398 hash_table<pointer_hash<const tree_node> > ht (32);
14399 int i;
14400
14401 md5_init_ctx (&ctx);
14402 fold_checksum_tree (fn, &ctx, &ht);
14403 md5_finish_ctx (&ctx, checksum_before_fn);
14404 ht.empty ();
14405
14406 md5_init_ctx (&ctx);
14407 for (i = 0; i < nargs; i++)
14408 fold_checksum_tree (argarray[i], &ctx, &ht);
14409 md5_finish_ctx (&ctx, checksum_before_arglist);
14410 ht.empty ();
14411 #endif
14412
14413 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14414
14415 #ifdef ENABLE_FOLD_CHECKING
14416 md5_init_ctx (&ctx);
14417 fold_checksum_tree (fn, &ctx, &ht);
14418 md5_finish_ctx (&ctx, checksum_after_fn);
14419 ht.empty ();
14420
14421 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14422 fold_check_failed (fn, tem);
14423
14424 md5_init_ctx (&ctx);
14425 for (i = 0; i < nargs; i++)
14426 fold_checksum_tree (argarray[i], &ctx, &ht);
14427 md5_finish_ctx (&ctx, checksum_after_arglist);
14428
14429 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14430 fold_check_failed (NULL_TREE, tem);
14431 #endif
14432 return tem;
14433 }
14434
14435 /* Perform constant folding and related simplification of initializer
14436 expression EXPR. These behave identically to "fold_buildN" but ignore
14437 potential run-time traps and exceptions that fold must preserve. */
14438
14439 #define START_FOLD_INIT \
14440 int saved_signaling_nans = flag_signaling_nans;\
14441 int saved_trapping_math = flag_trapping_math;\
14442 int saved_rounding_math = flag_rounding_math;\
14443 int saved_trapv = flag_trapv;\
14444 int saved_folding_initializer = folding_initializer;\
14445 flag_signaling_nans = 0;\
14446 flag_trapping_math = 0;\
14447 flag_rounding_math = 0;\
14448 flag_trapv = 0;\
14449 folding_initializer = 1;
14450
14451 #define END_FOLD_INIT \
14452 flag_signaling_nans = saved_signaling_nans;\
14453 flag_trapping_math = saved_trapping_math;\
14454 flag_rounding_math = saved_rounding_math;\
14455 flag_trapv = saved_trapv;\
14456 folding_initializer = saved_folding_initializer;
14457
14458 tree
14459 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14460 tree type, tree op)
14461 {
14462 tree result;
14463 START_FOLD_INIT;
14464
14465 result = fold_build1_loc (loc, code, type, op);
14466
14467 END_FOLD_INIT;
14468 return result;
14469 }
14470
14471 tree
14472 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14473 tree type, tree op0, tree op1)
14474 {
14475 tree result;
14476 START_FOLD_INIT;
14477
14478 result = fold_build2_loc (loc, code, type, op0, op1);
14479
14480 END_FOLD_INIT;
14481 return result;
14482 }
14483
14484 tree
14485 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14486 int nargs, tree *argarray)
14487 {
14488 tree result;
14489 START_FOLD_INIT;
14490
14491 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14492
14493 END_FOLD_INIT;
14494 return result;
14495 }
14496
14497 #undef START_FOLD_INIT
14498 #undef END_FOLD_INIT
14499
14500 /* Determine if first argument is a multiple of second argument. Return 0 if
14501 it is not, or we cannot easily determined it to be.
14502
14503 An example of the sort of thing we care about (at this point; this routine
14504 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14505 fold cases do now) is discovering that
14506
14507 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14508
14509 is a multiple of
14510
14511 SAVE_EXPR (J * 8)
14512
14513 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14514
14515 This code also handles discovering that
14516
14517 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14518
14519 is a multiple of 8 so we don't have to worry about dealing with a
14520 possible remainder.
14521
14522 Note that we *look* inside a SAVE_EXPR only to determine how it was
14523 calculated; it is not safe for fold to do much of anything else with the
14524 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14525 at run time. For example, the latter example above *cannot* be implemented
14526 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14527 evaluation time of the original SAVE_EXPR is not necessarily the same at
14528 the time the new expression is evaluated. The only optimization of this
14529 sort that would be valid is changing
14530
14531 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14532
14533 divided by 8 to
14534
14535 SAVE_EXPR (I) * SAVE_EXPR (J)
14536
14537 (where the same SAVE_EXPR (J) is used in the original and the
14538 transformed version). */
14539
14540 int
14541 multiple_of_p (tree type, const_tree top, const_tree bottom)
14542 {
14543 if (operand_equal_p (top, bottom, 0))
14544 return 1;
14545
14546 if (TREE_CODE (type) != INTEGER_TYPE)
14547 return 0;
14548
14549 switch (TREE_CODE (top))
14550 {
14551 case BIT_AND_EXPR:
14552 /* Bitwise and provides a power of two multiple. If the mask is
14553 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14554 if (!integer_pow2p (bottom))
14555 return 0;
14556 /* FALLTHRU */
14557
14558 case MULT_EXPR:
14559 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14560 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14561
14562 case PLUS_EXPR:
14563 case MINUS_EXPR:
14564 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14565 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14566
14567 case LSHIFT_EXPR:
14568 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14569 {
14570 tree op1, t1;
14571
14572 op1 = TREE_OPERAND (top, 1);
14573 /* const_binop may not detect overflow correctly,
14574 so check for it explicitly here. */
14575 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14576 && 0 != (t1 = fold_convert (type,
14577 const_binop (LSHIFT_EXPR,
14578 size_one_node,
14579 op1)))
14580 && !TREE_OVERFLOW (t1))
14581 return multiple_of_p (type, t1, bottom);
14582 }
14583 return 0;
14584
14585 case NOP_EXPR:
14586 /* Can't handle conversions from non-integral or wider integral type. */
14587 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14588 || (TYPE_PRECISION (type)
14589 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14590 return 0;
14591
14592 /* .. fall through ... */
14593
14594 case SAVE_EXPR:
14595 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14596
14597 case COND_EXPR:
14598 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14599 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14600
14601 case INTEGER_CST:
14602 if (TREE_CODE (bottom) != INTEGER_CST
14603 || integer_zerop (bottom)
14604 || (TYPE_UNSIGNED (type)
14605 && (tree_int_cst_sgn (top) < 0
14606 || tree_int_cst_sgn (bottom) < 0)))
14607 return 0;
14608 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14609 SIGNED);
14610
14611 default:
14612 return 0;
14613 }
14614 }
14615
14616 /* Return true if CODE or TYPE is known to be non-negative. */
14617
14618 static bool
14619 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14620 {
14621 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14622 && truth_value_p (code))
14623 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14624 have a signed:1 type (where the value is -1 and 0). */
14625 return true;
14626 return false;
14627 }
14628
14629 /* Return true if (CODE OP0) is known to be non-negative. If the return
14630 value is based on the assumption that signed overflow is undefined,
14631 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14632 *STRICT_OVERFLOW_P. */
14633
14634 bool
14635 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14636 bool *strict_overflow_p)
14637 {
14638 if (TYPE_UNSIGNED (type))
14639 return true;
14640
14641 switch (code)
14642 {
14643 case ABS_EXPR:
14644 /* We can't return 1 if flag_wrapv is set because
14645 ABS_EXPR<INT_MIN> = INT_MIN. */
14646 if (!INTEGRAL_TYPE_P (type))
14647 return true;
14648 if (TYPE_OVERFLOW_UNDEFINED (type))
14649 {
14650 *strict_overflow_p = true;
14651 return true;
14652 }
14653 break;
14654
14655 case NON_LVALUE_EXPR:
14656 case FLOAT_EXPR:
14657 case FIX_TRUNC_EXPR:
14658 return tree_expr_nonnegative_warnv_p (op0,
14659 strict_overflow_p);
14660
14661 CASE_CONVERT:
14662 {
14663 tree inner_type = TREE_TYPE (op0);
14664 tree outer_type = type;
14665
14666 if (TREE_CODE (outer_type) == REAL_TYPE)
14667 {
14668 if (TREE_CODE (inner_type) == REAL_TYPE)
14669 return tree_expr_nonnegative_warnv_p (op0,
14670 strict_overflow_p);
14671 if (INTEGRAL_TYPE_P (inner_type))
14672 {
14673 if (TYPE_UNSIGNED (inner_type))
14674 return true;
14675 return tree_expr_nonnegative_warnv_p (op0,
14676 strict_overflow_p);
14677 }
14678 }
14679 else if (INTEGRAL_TYPE_P (outer_type))
14680 {
14681 if (TREE_CODE (inner_type) == REAL_TYPE)
14682 return tree_expr_nonnegative_warnv_p (op0,
14683 strict_overflow_p);
14684 if (INTEGRAL_TYPE_P (inner_type))
14685 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14686 && TYPE_UNSIGNED (inner_type);
14687 }
14688 }
14689 break;
14690
14691 default:
14692 return tree_simple_nonnegative_warnv_p (code, type);
14693 }
14694
14695 /* We don't know sign of `t', so be conservative and return false. */
14696 return false;
14697 }
14698
14699 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14700 value is based on the assumption that signed overflow is undefined,
14701 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14702 *STRICT_OVERFLOW_P. */
14703
14704 bool
14705 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14706 tree op1, bool *strict_overflow_p)
14707 {
14708 if (TYPE_UNSIGNED (type))
14709 return true;
14710
14711 switch (code)
14712 {
14713 case POINTER_PLUS_EXPR:
14714 case PLUS_EXPR:
14715 if (FLOAT_TYPE_P (type))
14716 return (tree_expr_nonnegative_warnv_p (op0,
14717 strict_overflow_p)
14718 && tree_expr_nonnegative_warnv_p (op1,
14719 strict_overflow_p));
14720
14721 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14722 both unsigned and at least 2 bits shorter than the result. */
14723 if (TREE_CODE (type) == INTEGER_TYPE
14724 && TREE_CODE (op0) == NOP_EXPR
14725 && TREE_CODE (op1) == NOP_EXPR)
14726 {
14727 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14728 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14729 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14730 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14731 {
14732 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14733 TYPE_PRECISION (inner2)) + 1;
14734 return prec < TYPE_PRECISION (type);
14735 }
14736 }
14737 break;
14738
14739 case MULT_EXPR:
14740 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14741 {
14742 /* x * x is always non-negative for floating point x
14743 or without overflow. */
14744 if (operand_equal_p (op0, op1, 0)
14745 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14746 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14747 {
14748 if (TYPE_OVERFLOW_UNDEFINED (type))
14749 *strict_overflow_p = true;
14750 return true;
14751 }
14752 }
14753
14754 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14755 both unsigned and their total bits is shorter than the result. */
14756 if (TREE_CODE (type) == INTEGER_TYPE
14757 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14758 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14759 {
14760 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14761 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14762 : TREE_TYPE (op0);
14763 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14764 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14765 : TREE_TYPE (op1);
14766
14767 bool unsigned0 = TYPE_UNSIGNED (inner0);
14768 bool unsigned1 = TYPE_UNSIGNED (inner1);
14769
14770 if (TREE_CODE (op0) == INTEGER_CST)
14771 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14772
14773 if (TREE_CODE (op1) == INTEGER_CST)
14774 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14775
14776 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14777 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14778 {
14779 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14780 ? tree_int_cst_min_precision (op0, UNSIGNED)
14781 : TYPE_PRECISION (inner0);
14782
14783 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14784 ? tree_int_cst_min_precision (op1, UNSIGNED)
14785 : TYPE_PRECISION (inner1);
14786
14787 return precision0 + precision1 < TYPE_PRECISION (type);
14788 }
14789 }
14790 return false;
14791
14792 case BIT_AND_EXPR:
14793 case MAX_EXPR:
14794 return (tree_expr_nonnegative_warnv_p (op0,
14795 strict_overflow_p)
14796 || tree_expr_nonnegative_warnv_p (op1,
14797 strict_overflow_p));
14798
14799 case BIT_IOR_EXPR:
14800 case BIT_XOR_EXPR:
14801 case MIN_EXPR:
14802 case RDIV_EXPR:
14803 case TRUNC_DIV_EXPR:
14804 case CEIL_DIV_EXPR:
14805 case FLOOR_DIV_EXPR:
14806 case ROUND_DIV_EXPR:
14807 return (tree_expr_nonnegative_warnv_p (op0,
14808 strict_overflow_p)
14809 && tree_expr_nonnegative_warnv_p (op1,
14810 strict_overflow_p));
14811
14812 case TRUNC_MOD_EXPR:
14813 case CEIL_MOD_EXPR:
14814 case FLOOR_MOD_EXPR:
14815 case ROUND_MOD_EXPR:
14816 return tree_expr_nonnegative_warnv_p (op0,
14817 strict_overflow_p);
14818 default:
14819 return tree_simple_nonnegative_warnv_p (code, type);
14820 }
14821
14822 /* We don't know sign of `t', so be conservative and return false. */
14823 return false;
14824 }
14825
14826 /* Return true if T is known to be non-negative. If the return
14827 value is based on the assumption that signed overflow is undefined,
14828 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14829 *STRICT_OVERFLOW_P. */
14830
14831 bool
14832 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14833 {
14834 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14835 return true;
14836
14837 switch (TREE_CODE (t))
14838 {
14839 case INTEGER_CST:
14840 return tree_int_cst_sgn (t) >= 0;
14841
14842 case REAL_CST:
14843 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14844
14845 case FIXED_CST:
14846 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14847
14848 case COND_EXPR:
14849 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14850 strict_overflow_p)
14851 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14852 strict_overflow_p));
14853 default:
14854 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14855 TREE_TYPE (t));
14856 }
14857 /* We don't know sign of `t', so be conservative and return false. */
14858 return false;
14859 }
14860
14861 /* Return true if T is known to be non-negative. If the return
14862 value is based on the assumption that signed overflow is undefined,
14863 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14864 *STRICT_OVERFLOW_P. */
14865
14866 bool
14867 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14868 tree arg0, tree arg1, bool *strict_overflow_p)
14869 {
14870 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14871 switch (DECL_FUNCTION_CODE (fndecl))
14872 {
14873 CASE_FLT_FN (BUILT_IN_ACOS):
14874 CASE_FLT_FN (BUILT_IN_ACOSH):
14875 CASE_FLT_FN (BUILT_IN_CABS):
14876 CASE_FLT_FN (BUILT_IN_COSH):
14877 CASE_FLT_FN (BUILT_IN_ERFC):
14878 CASE_FLT_FN (BUILT_IN_EXP):
14879 CASE_FLT_FN (BUILT_IN_EXP10):
14880 CASE_FLT_FN (BUILT_IN_EXP2):
14881 CASE_FLT_FN (BUILT_IN_FABS):
14882 CASE_FLT_FN (BUILT_IN_FDIM):
14883 CASE_FLT_FN (BUILT_IN_HYPOT):
14884 CASE_FLT_FN (BUILT_IN_POW10):
14885 CASE_INT_FN (BUILT_IN_FFS):
14886 CASE_INT_FN (BUILT_IN_PARITY):
14887 CASE_INT_FN (BUILT_IN_POPCOUNT):
14888 CASE_INT_FN (BUILT_IN_CLZ):
14889 CASE_INT_FN (BUILT_IN_CLRSB):
14890 case BUILT_IN_BSWAP32:
14891 case BUILT_IN_BSWAP64:
14892 /* Always true. */
14893 return true;
14894
14895 CASE_FLT_FN (BUILT_IN_SQRT):
14896 /* sqrt(-0.0) is -0.0. */
14897 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14898 return true;
14899 return tree_expr_nonnegative_warnv_p (arg0,
14900 strict_overflow_p);
14901
14902 CASE_FLT_FN (BUILT_IN_ASINH):
14903 CASE_FLT_FN (BUILT_IN_ATAN):
14904 CASE_FLT_FN (BUILT_IN_ATANH):
14905 CASE_FLT_FN (BUILT_IN_CBRT):
14906 CASE_FLT_FN (BUILT_IN_CEIL):
14907 CASE_FLT_FN (BUILT_IN_ERF):
14908 CASE_FLT_FN (BUILT_IN_EXPM1):
14909 CASE_FLT_FN (BUILT_IN_FLOOR):
14910 CASE_FLT_FN (BUILT_IN_FMOD):
14911 CASE_FLT_FN (BUILT_IN_FREXP):
14912 CASE_FLT_FN (BUILT_IN_ICEIL):
14913 CASE_FLT_FN (BUILT_IN_IFLOOR):
14914 CASE_FLT_FN (BUILT_IN_IRINT):
14915 CASE_FLT_FN (BUILT_IN_IROUND):
14916 CASE_FLT_FN (BUILT_IN_LCEIL):
14917 CASE_FLT_FN (BUILT_IN_LDEXP):
14918 CASE_FLT_FN (BUILT_IN_LFLOOR):
14919 CASE_FLT_FN (BUILT_IN_LLCEIL):
14920 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14921 CASE_FLT_FN (BUILT_IN_LLRINT):
14922 CASE_FLT_FN (BUILT_IN_LLROUND):
14923 CASE_FLT_FN (BUILT_IN_LRINT):
14924 CASE_FLT_FN (BUILT_IN_LROUND):
14925 CASE_FLT_FN (BUILT_IN_MODF):
14926 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14927 CASE_FLT_FN (BUILT_IN_RINT):
14928 CASE_FLT_FN (BUILT_IN_ROUND):
14929 CASE_FLT_FN (BUILT_IN_SCALB):
14930 CASE_FLT_FN (BUILT_IN_SCALBLN):
14931 CASE_FLT_FN (BUILT_IN_SCALBN):
14932 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14933 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14934 CASE_FLT_FN (BUILT_IN_SINH):
14935 CASE_FLT_FN (BUILT_IN_TANH):
14936 CASE_FLT_FN (BUILT_IN_TRUNC):
14937 /* True if the 1st argument is nonnegative. */
14938 return tree_expr_nonnegative_warnv_p (arg0,
14939 strict_overflow_p);
14940
14941 CASE_FLT_FN (BUILT_IN_FMAX):
14942 /* True if the 1st OR 2nd arguments are nonnegative. */
14943 return (tree_expr_nonnegative_warnv_p (arg0,
14944 strict_overflow_p)
14945 || (tree_expr_nonnegative_warnv_p (arg1,
14946 strict_overflow_p)));
14947
14948 CASE_FLT_FN (BUILT_IN_FMIN):
14949 /* True if the 1st AND 2nd arguments are nonnegative. */
14950 return (tree_expr_nonnegative_warnv_p (arg0,
14951 strict_overflow_p)
14952 && (tree_expr_nonnegative_warnv_p (arg1,
14953 strict_overflow_p)));
14954
14955 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14956 /* True if the 2nd argument is nonnegative. */
14957 return tree_expr_nonnegative_warnv_p (arg1,
14958 strict_overflow_p);
14959
14960 CASE_FLT_FN (BUILT_IN_POWI):
14961 /* True if the 1st argument is nonnegative or the second
14962 argument is an even integer. */
14963 if (TREE_CODE (arg1) == INTEGER_CST
14964 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14965 return true;
14966 return tree_expr_nonnegative_warnv_p (arg0,
14967 strict_overflow_p);
14968
14969 CASE_FLT_FN (BUILT_IN_POW):
14970 /* True if the 1st argument is nonnegative or the second
14971 argument is an even integer valued real. */
14972 if (TREE_CODE (arg1) == REAL_CST)
14973 {
14974 REAL_VALUE_TYPE c;
14975 HOST_WIDE_INT n;
14976
14977 c = TREE_REAL_CST (arg1);
14978 n = real_to_integer (&c);
14979 if ((n & 1) == 0)
14980 {
14981 REAL_VALUE_TYPE cint;
14982 real_from_integer (&cint, VOIDmode, n, SIGNED);
14983 if (real_identical (&c, &cint))
14984 return true;
14985 }
14986 }
14987 return tree_expr_nonnegative_warnv_p (arg0,
14988 strict_overflow_p);
14989
14990 default:
14991 break;
14992 }
14993 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14994 type);
14995 }
14996
14997 /* Return true if T is known to be non-negative. If the return
14998 value is based on the assumption that signed overflow is undefined,
14999 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15000 *STRICT_OVERFLOW_P. */
15001
15002 static bool
15003 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15004 {
15005 enum tree_code code = TREE_CODE (t);
15006 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15007 return true;
15008
15009 switch (code)
15010 {
15011 case TARGET_EXPR:
15012 {
15013 tree temp = TARGET_EXPR_SLOT (t);
15014 t = TARGET_EXPR_INITIAL (t);
15015
15016 /* If the initializer is non-void, then it's a normal expression
15017 that will be assigned to the slot. */
15018 if (!VOID_TYPE_P (t))
15019 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15020
15021 /* Otherwise, the initializer sets the slot in some way. One common
15022 way is an assignment statement at the end of the initializer. */
15023 while (1)
15024 {
15025 if (TREE_CODE (t) == BIND_EXPR)
15026 t = expr_last (BIND_EXPR_BODY (t));
15027 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15028 || TREE_CODE (t) == TRY_CATCH_EXPR)
15029 t = expr_last (TREE_OPERAND (t, 0));
15030 else if (TREE_CODE (t) == STATEMENT_LIST)
15031 t = expr_last (t);
15032 else
15033 break;
15034 }
15035 if (TREE_CODE (t) == MODIFY_EXPR
15036 && TREE_OPERAND (t, 0) == temp)
15037 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15038 strict_overflow_p);
15039
15040 return false;
15041 }
15042
15043 case CALL_EXPR:
15044 {
15045 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15046 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15047
15048 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15049 get_callee_fndecl (t),
15050 arg0,
15051 arg1,
15052 strict_overflow_p);
15053 }
15054 case COMPOUND_EXPR:
15055 case MODIFY_EXPR:
15056 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15057 strict_overflow_p);
15058 case BIND_EXPR:
15059 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15060 strict_overflow_p);
15061 case SAVE_EXPR:
15062 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15063 strict_overflow_p);
15064
15065 default:
15066 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15067 TREE_TYPE (t));
15068 }
15069
15070 /* We don't know sign of `t', so be conservative and return false. */
15071 return false;
15072 }
15073
15074 /* Return true if T is known to be non-negative. If the return
15075 value is based on the assumption that signed overflow is undefined,
15076 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15077 *STRICT_OVERFLOW_P. */
15078
15079 bool
15080 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15081 {
15082 enum tree_code code;
15083 if (t == error_mark_node)
15084 return false;
15085
15086 code = TREE_CODE (t);
15087 switch (TREE_CODE_CLASS (code))
15088 {
15089 case tcc_binary:
15090 case tcc_comparison:
15091 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15092 TREE_TYPE (t),
15093 TREE_OPERAND (t, 0),
15094 TREE_OPERAND (t, 1),
15095 strict_overflow_p);
15096
15097 case tcc_unary:
15098 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15099 TREE_TYPE (t),
15100 TREE_OPERAND (t, 0),
15101 strict_overflow_p);
15102
15103 case tcc_constant:
15104 case tcc_declaration:
15105 case tcc_reference:
15106 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15107
15108 default:
15109 break;
15110 }
15111
15112 switch (code)
15113 {
15114 case TRUTH_AND_EXPR:
15115 case TRUTH_OR_EXPR:
15116 case TRUTH_XOR_EXPR:
15117 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15118 TREE_TYPE (t),
15119 TREE_OPERAND (t, 0),
15120 TREE_OPERAND (t, 1),
15121 strict_overflow_p);
15122 case TRUTH_NOT_EXPR:
15123 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15124 TREE_TYPE (t),
15125 TREE_OPERAND (t, 0),
15126 strict_overflow_p);
15127
15128 case COND_EXPR:
15129 case CONSTRUCTOR:
15130 case OBJ_TYPE_REF:
15131 case ASSERT_EXPR:
15132 case ADDR_EXPR:
15133 case WITH_SIZE_EXPR:
15134 case SSA_NAME:
15135 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15136
15137 default:
15138 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15139 }
15140 }
15141
15142 /* Return true if `t' is known to be non-negative. Handle warnings
15143 about undefined signed overflow. */
15144
15145 bool
15146 tree_expr_nonnegative_p (tree t)
15147 {
15148 bool ret, strict_overflow_p;
15149
15150 strict_overflow_p = false;
15151 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15152 if (strict_overflow_p)
15153 fold_overflow_warning (("assuming signed overflow does not occur when "
15154 "determining that expression is always "
15155 "non-negative"),
15156 WARN_STRICT_OVERFLOW_MISC);
15157 return ret;
15158 }
15159
15160
15161 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15162 For floating point we further ensure that T is not denormal.
15163 Similar logic is present in nonzero_address in rtlanal.h.
15164
15165 If the return value is based on the assumption that signed overflow
15166 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15167 change *STRICT_OVERFLOW_P. */
15168
15169 bool
15170 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15171 bool *strict_overflow_p)
15172 {
15173 switch (code)
15174 {
15175 case ABS_EXPR:
15176 return tree_expr_nonzero_warnv_p (op0,
15177 strict_overflow_p);
15178
15179 case NOP_EXPR:
15180 {
15181 tree inner_type = TREE_TYPE (op0);
15182 tree outer_type = type;
15183
15184 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15185 && tree_expr_nonzero_warnv_p (op0,
15186 strict_overflow_p));
15187 }
15188 break;
15189
15190 case NON_LVALUE_EXPR:
15191 return tree_expr_nonzero_warnv_p (op0,
15192 strict_overflow_p);
15193
15194 default:
15195 break;
15196 }
15197
15198 return false;
15199 }
15200
15201 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15202 For floating point we further ensure that T is not denormal.
15203 Similar logic is present in nonzero_address in rtlanal.h.
15204
15205 If the return value is based on the assumption that signed overflow
15206 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15207 change *STRICT_OVERFLOW_P. */
15208
15209 bool
15210 tree_binary_nonzero_warnv_p (enum tree_code code,
15211 tree type,
15212 tree op0,
15213 tree op1, bool *strict_overflow_p)
15214 {
15215 bool sub_strict_overflow_p;
15216 switch (code)
15217 {
15218 case POINTER_PLUS_EXPR:
15219 case PLUS_EXPR:
15220 if (TYPE_OVERFLOW_UNDEFINED (type))
15221 {
15222 /* With the presence of negative values it is hard
15223 to say something. */
15224 sub_strict_overflow_p = false;
15225 if (!tree_expr_nonnegative_warnv_p (op0,
15226 &sub_strict_overflow_p)
15227 || !tree_expr_nonnegative_warnv_p (op1,
15228 &sub_strict_overflow_p))
15229 return false;
15230 /* One of operands must be positive and the other non-negative. */
15231 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15232 overflows, on a twos-complement machine the sum of two
15233 nonnegative numbers can never be zero. */
15234 return (tree_expr_nonzero_warnv_p (op0,
15235 strict_overflow_p)
15236 || tree_expr_nonzero_warnv_p (op1,
15237 strict_overflow_p));
15238 }
15239 break;
15240
15241 case MULT_EXPR:
15242 if (TYPE_OVERFLOW_UNDEFINED (type))
15243 {
15244 if (tree_expr_nonzero_warnv_p (op0,
15245 strict_overflow_p)
15246 && tree_expr_nonzero_warnv_p (op1,
15247 strict_overflow_p))
15248 {
15249 *strict_overflow_p = true;
15250 return true;
15251 }
15252 }
15253 break;
15254
15255 case MIN_EXPR:
15256 sub_strict_overflow_p = false;
15257 if (tree_expr_nonzero_warnv_p (op0,
15258 &sub_strict_overflow_p)
15259 && tree_expr_nonzero_warnv_p (op1,
15260 &sub_strict_overflow_p))
15261 {
15262 if (sub_strict_overflow_p)
15263 *strict_overflow_p = true;
15264 }
15265 break;
15266
15267 case MAX_EXPR:
15268 sub_strict_overflow_p = false;
15269 if (tree_expr_nonzero_warnv_p (op0,
15270 &sub_strict_overflow_p))
15271 {
15272 if (sub_strict_overflow_p)
15273 *strict_overflow_p = true;
15274
15275 /* When both operands are nonzero, then MAX must be too. */
15276 if (tree_expr_nonzero_warnv_p (op1,
15277 strict_overflow_p))
15278 return true;
15279
15280 /* MAX where operand 0 is positive is positive. */
15281 return tree_expr_nonnegative_warnv_p (op0,
15282 strict_overflow_p);
15283 }
15284 /* MAX where operand 1 is positive is positive. */
15285 else if (tree_expr_nonzero_warnv_p (op1,
15286 &sub_strict_overflow_p)
15287 && tree_expr_nonnegative_warnv_p (op1,
15288 &sub_strict_overflow_p))
15289 {
15290 if (sub_strict_overflow_p)
15291 *strict_overflow_p = true;
15292 return true;
15293 }
15294 break;
15295
15296 case BIT_IOR_EXPR:
15297 return (tree_expr_nonzero_warnv_p (op1,
15298 strict_overflow_p)
15299 || tree_expr_nonzero_warnv_p (op0,
15300 strict_overflow_p));
15301
15302 default:
15303 break;
15304 }
15305
15306 return false;
15307 }
15308
15309 /* Return true when T is an address and is known to be nonzero.
15310 For floating point we further ensure that T is not denormal.
15311 Similar logic is present in nonzero_address in rtlanal.h.
15312
15313 If the return value is based on the assumption that signed overflow
15314 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15315 change *STRICT_OVERFLOW_P. */
15316
15317 bool
15318 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15319 {
15320 bool sub_strict_overflow_p;
15321 switch (TREE_CODE (t))
15322 {
15323 case INTEGER_CST:
15324 return !integer_zerop (t);
15325
15326 case ADDR_EXPR:
15327 {
15328 tree base = TREE_OPERAND (t, 0);
15329
15330 if (!DECL_P (base))
15331 base = get_base_address (base);
15332
15333 if (!base)
15334 return false;
15335
15336 /* For objects in symbol table check if we know they are non-zero.
15337 Don't do anything for variables and functions before symtab is built;
15338 it is quite possible that they will be declared weak later. */
15339 if (DECL_P (base) && decl_in_symtab_p (base))
15340 {
15341 struct symtab_node *symbol;
15342
15343 symbol = symtab_node::get_create (base);
15344 if (symbol)
15345 return symbol->nonzero_address ();
15346 else
15347 return false;
15348 }
15349
15350 /* Function local objects are never NULL. */
15351 if (DECL_P (base)
15352 && (DECL_CONTEXT (base)
15353 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15354 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15355 return true;
15356
15357 /* Constants are never weak. */
15358 if (CONSTANT_CLASS_P (base))
15359 return true;
15360
15361 return false;
15362 }
15363
15364 case COND_EXPR:
15365 sub_strict_overflow_p = false;
15366 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15367 &sub_strict_overflow_p)
15368 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15369 &sub_strict_overflow_p))
15370 {
15371 if (sub_strict_overflow_p)
15372 *strict_overflow_p = true;
15373 return true;
15374 }
15375 break;
15376
15377 default:
15378 break;
15379 }
15380 return false;
15381 }
15382
15383 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15384 attempt to fold the expression to a constant without modifying TYPE,
15385 OP0 or OP1.
15386
15387 If the expression could be simplified to a constant, then return
15388 the constant. If the expression would not be simplified to a
15389 constant, then return NULL_TREE. */
15390
15391 tree
15392 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15393 {
15394 tree tem = fold_binary (code, type, op0, op1);
15395 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15396 }
15397
15398 /* Given the components of a unary expression CODE, TYPE and OP0,
15399 attempt to fold the expression to a constant without modifying
15400 TYPE or OP0.
15401
15402 If the expression could be simplified to a constant, then return
15403 the constant. If the expression would not be simplified to a
15404 constant, then return NULL_TREE. */
15405
15406 tree
15407 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15408 {
15409 tree tem = fold_unary (code, type, op0);
15410 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15411 }
15412
15413 /* If EXP represents referencing an element in a constant string
15414 (either via pointer arithmetic or array indexing), return the
15415 tree representing the value accessed, otherwise return NULL. */
15416
15417 tree
15418 fold_read_from_constant_string (tree exp)
15419 {
15420 if ((TREE_CODE (exp) == INDIRECT_REF
15421 || TREE_CODE (exp) == ARRAY_REF)
15422 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15423 {
15424 tree exp1 = TREE_OPERAND (exp, 0);
15425 tree index;
15426 tree string;
15427 location_t loc = EXPR_LOCATION (exp);
15428
15429 if (TREE_CODE (exp) == INDIRECT_REF)
15430 string = string_constant (exp1, &index);
15431 else
15432 {
15433 tree low_bound = array_ref_low_bound (exp);
15434 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15435
15436 /* Optimize the special-case of a zero lower bound.
15437
15438 We convert the low_bound to sizetype to avoid some problems
15439 with constant folding. (E.g. suppose the lower bound is 1,
15440 and its mode is QI. Without the conversion,l (ARRAY
15441 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15442 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15443 if (! integer_zerop (low_bound))
15444 index = size_diffop_loc (loc, index,
15445 fold_convert_loc (loc, sizetype, low_bound));
15446
15447 string = exp1;
15448 }
15449
15450 if (string
15451 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15452 && TREE_CODE (string) == STRING_CST
15453 && TREE_CODE (index) == INTEGER_CST
15454 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15455 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15456 == MODE_INT)
15457 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15458 return build_int_cst_type (TREE_TYPE (exp),
15459 (TREE_STRING_POINTER (string)
15460 [TREE_INT_CST_LOW (index)]));
15461 }
15462 return NULL;
15463 }
15464
15465 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15466 an integer constant, real, or fixed-point constant.
15467
15468 TYPE is the type of the result. */
15469
15470 static tree
15471 fold_negate_const (tree arg0, tree type)
15472 {
15473 tree t = NULL_TREE;
15474
15475 switch (TREE_CODE (arg0))
15476 {
15477 case INTEGER_CST:
15478 {
15479 bool overflow;
15480 wide_int val = wi::neg (arg0, &overflow);
15481 t = force_fit_type (type, val, 1,
15482 (overflow | TREE_OVERFLOW (arg0))
15483 && !TYPE_UNSIGNED (type));
15484 break;
15485 }
15486
15487 case REAL_CST:
15488 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15489 break;
15490
15491 case FIXED_CST:
15492 {
15493 FIXED_VALUE_TYPE f;
15494 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15495 &(TREE_FIXED_CST (arg0)), NULL,
15496 TYPE_SATURATING (type));
15497 t = build_fixed (type, f);
15498 /* Propagate overflow flags. */
15499 if (overflow_p | TREE_OVERFLOW (arg0))
15500 TREE_OVERFLOW (t) = 1;
15501 break;
15502 }
15503
15504 default:
15505 gcc_unreachable ();
15506 }
15507
15508 return t;
15509 }
15510
15511 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15512 an integer constant or real constant.
15513
15514 TYPE is the type of the result. */
15515
15516 tree
15517 fold_abs_const (tree arg0, tree type)
15518 {
15519 tree t = NULL_TREE;
15520
15521 switch (TREE_CODE (arg0))
15522 {
15523 case INTEGER_CST:
15524 {
15525 /* If the value is unsigned or non-negative, then the absolute value
15526 is the same as the ordinary value. */
15527 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15528 t = arg0;
15529
15530 /* If the value is negative, then the absolute value is
15531 its negation. */
15532 else
15533 {
15534 bool overflow;
15535 wide_int val = wi::neg (arg0, &overflow);
15536 t = force_fit_type (type, val, -1,
15537 overflow | TREE_OVERFLOW (arg0));
15538 }
15539 }
15540 break;
15541
15542 case REAL_CST:
15543 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15544 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15545 else
15546 t = arg0;
15547 break;
15548
15549 default:
15550 gcc_unreachable ();
15551 }
15552
15553 return t;
15554 }
15555
15556 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15557 constant. TYPE is the type of the result. */
15558
15559 static tree
15560 fold_not_const (const_tree arg0, tree type)
15561 {
15562 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15563
15564 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15565 }
15566
15567 /* Given CODE, a relational operator, the target type, TYPE and two
15568 constant operands OP0 and OP1, return the result of the
15569 relational operation. If the result is not a compile time
15570 constant, then return NULL_TREE. */
15571
15572 static tree
15573 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15574 {
15575 int result, invert;
15576
15577 /* From here on, the only cases we handle are when the result is
15578 known to be a constant. */
15579
15580 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15581 {
15582 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15583 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15584
15585 /* Handle the cases where either operand is a NaN. */
15586 if (real_isnan (c0) || real_isnan (c1))
15587 {
15588 switch (code)
15589 {
15590 case EQ_EXPR:
15591 case ORDERED_EXPR:
15592 result = 0;
15593 break;
15594
15595 case NE_EXPR:
15596 case UNORDERED_EXPR:
15597 case UNLT_EXPR:
15598 case UNLE_EXPR:
15599 case UNGT_EXPR:
15600 case UNGE_EXPR:
15601 case UNEQ_EXPR:
15602 result = 1;
15603 break;
15604
15605 case LT_EXPR:
15606 case LE_EXPR:
15607 case GT_EXPR:
15608 case GE_EXPR:
15609 case LTGT_EXPR:
15610 if (flag_trapping_math)
15611 return NULL_TREE;
15612 result = 0;
15613 break;
15614
15615 default:
15616 gcc_unreachable ();
15617 }
15618
15619 return constant_boolean_node (result, type);
15620 }
15621
15622 return constant_boolean_node (real_compare (code, c0, c1), type);
15623 }
15624
15625 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15626 {
15627 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15628 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15629 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15630 }
15631
15632 /* Handle equality/inequality of complex constants. */
15633 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15634 {
15635 tree rcond = fold_relational_const (code, type,
15636 TREE_REALPART (op0),
15637 TREE_REALPART (op1));
15638 tree icond = fold_relational_const (code, type,
15639 TREE_IMAGPART (op0),
15640 TREE_IMAGPART (op1));
15641 if (code == EQ_EXPR)
15642 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15643 else if (code == NE_EXPR)
15644 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15645 else
15646 return NULL_TREE;
15647 }
15648
15649 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15650 {
15651 unsigned count = VECTOR_CST_NELTS (op0);
15652 tree *elts = XALLOCAVEC (tree, count);
15653 gcc_assert (VECTOR_CST_NELTS (op1) == count
15654 && TYPE_VECTOR_SUBPARTS (type) == count);
15655
15656 for (unsigned i = 0; i < count; i++)
15657 {
15658 tree elem_type = TREE_TYPE (type);
15659 tree elem0 = VECTOR_CST_ELT (op0, i);
15660 tree elem1 = VECTOR_CST_ELT (op1, i);
15661
15662 tree tem = fold_relational_const (code, elem_type,
15663 elem0, elem1);
15664
15665 if (tem == NULL_TREE)
15666 return NULL_TREE;
15667
15668 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15669 }
15670
15671 return build_vector (type, elts);
15672 }
15673
15674 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15675
15676 To compute GT, swap the arguments and do LT.
15677 To compute GE, do LT and invert the result.
15678 To compute LE, swap the arguments, do LT and invert the result.
15679 To compute NE, do EQ and invert the result.
15680
15681 Therefore, the code below must handle only EQ and LT. */
15682
15683 if (code == LE_EXPR || code == GT_EXPR)
15684 {
15685 tree tem = op0;
15686 op0 = op1;
15687 op1 = tem;
15688 code = swap_tree_comparison (code);
15689 }
15690
15691 /* Note that it is safe to invert for real values here because we
15692 have already handled the one case that it matters. */
15693
15694 invert = 0;
15695 if (code == NE_EXPR || code == GE_EXPR)
15696 {
15697 invert = 1;
15698 code = invert_tree_comparison (code, false);
15699 }
15700
15701 /* Compute a result for LT or EQ if args permit;
15702 Otherwise return T. */
15703 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15704 {
15705 if (code == EQ_EXPR)
15706 result = tree_int_cst_equal (op0, op1);
15707 else
15708 result = tree_int_cst_lt (op0, op1);
15709 }
15710 else
15711 return NULL_TREE;
15712
15713 if (invert)
15714 result ^= 1;
15715 return constant_boolean_node (result, type);
15716 }
15717
15718 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15719 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15720 itself. */
15721
15722 tree
15723 fold_build_cleanup_point_expr (tree type, tree expr)
15724 {
15725 /* If the expression does not have side effects then we don't have to wrap
15726 it with a cleanup point expression. */
15727 if (!TREE_SIDE_EFFECTS (expr))
15728 return expr;
15729
15730 /* If the expression is a return, check to see if the expression inside the
15731 return has no side effects or the right hand side of the modify expression
15732 inside the return. If either don't have side effects set we don't need to
15733 wrap the expression in a cleanup point expression. Note we don't check the
15734 left hand side of the modify because it should always be a return decl. */
15735 if (TREE_CODE (expr) == RETURN_EXPR)
15736 {
15737 tree op = TREE_OPERAND (expr, 0);
15738 if (!op || !TREE_SIDE_EFFECTS (op))
15739 return expr;
15740 op = TREE_OPERAND (op, 1);
15741 if (!TREE_SIDE_EFFECTS (op))
15742 return expr;
15743 }
15744
15745 return build1 (CLEANUP_POINT_EXPR, type, expr);
15746 }
15747
15748 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15749 of an indirection through OP0, or NULL_TREE if no simplification is
15750 possible. */
15751
15752 tree
15753 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15754 {
15755 tree sub = op0;
15756 tree subtype;
15757
15758 STRIP_NOPS (sub);
15759 subtype = TREE_TYPE (sub);
15760 if (!POINTER_TYPE_P (subtype))
15761 return NULL_TREE;
15762
15763 if (TREE_CODE (sub) == ADDR_EXPR)
15764 {
15765 tree op = TREE_OPERAND (sub, 0);
15766 tree optype = TREE_TYPE (op);
15767 /* *&CONST_DECL -> to the value of the const decl. */
15768 if (TREE_CODE (op) == CONST_DECL)
15769 return DECL_INITIAL (op);
15770 /* *&p => p; make sure to handle *&"str"[cst] here. */
15771 if (type == optype)
15772 {
15773 tree fop = fold_read_from_constant_string (op);
15774 if (fop)
15775 return fop;
15776 else
15777 return op;
15778 }
15779 /* *(foo *)&fooarray => fooarray[0] */
15780 else if (TREE_CODE (optype) == ARRAY_TYPE
15781 && type == TREE_TYPE (optype)
15782 && (!in_gimple_form
15783 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15784 {
15785 tree type_domain = TYPE_DOMAIN (optype);
15786 tree min_val = size_zero_node;
15787 if (type_domain && TYPE_MIN_VALUE (type_domain))
15788 min_val = TYPE_MIN_VALUE (type_domain);
15789 if (in_gimple_form
15790 && TREE_CODE (min_val) != INTEGER_CST)
15791 return NULL_TREE;
15792 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15793 NULL_TREE, NULL_TREE);
15794 }
15795 /* *(foo *)&complexfoo => __real__ complexfoo */
15796 else if (TREE_CODE (optype) == COMPLEX_TYPE
15797 && type == TREE_TYPE (optype))
15798 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15799 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15800 else if (TREE_CODE (optype) == VECTOR_TYPE
15801 && type == TREE_TYPE (optype))
15802 {
15803 tree part_width = TYPE_SIZE (type);
15804 tree index = bitsize_int (0);
15805 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15806 }
15807 }
15808
15809 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15810 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15811 {
15812 tree op00 = TREE_OPERAND (sub, 0);
15813 tree op01 = TREE_OPERAND (sub, 1);
15814
15815 STRIP_NOPS (op00);
15816 if (TREE_CODE (op00) == ADDR_EXPR)
15817 {
15818 tree op00type;
15819 op00 = TREE_OPERAND (op00, 0);
15820 op00type = TREE_TYPE (op00);
15821
15822 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15823 if (TREE_CODE (op00type) == VECTOR_TYPE
15824 && type == TREE_TYPE (op00type))
15825 {
15826 HOST_WIDE_INT offset = tree_to_shwi (op01);
15827 tree part_width = TYPE_SIZE (type);
15828 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15829 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15830 tree index = bitsize_int (indexi);
15831
15832 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15833 return fold_build3_loc (loc,
15834 BIT_FIELD_REF, type, op00,
15835 part_width, index);
15836
15837 }
15838 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15839 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15840 && type == TREE_TYPE (op00type))
15841 {
15842 tree size = TYPE_SIZE_UNIT (type);
15843 if (tree_int_cst_equal (size, op01))
15844 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15845 }
15846 /* ((foo *)&fooarray)[1] => fooarray[1] */
15847 else if (TREE_CODE (op00type) == ARRAY_TYPE
15848 && type == TREE_TYPE (op00type))
15849 {
15850 tree type_domain = TYPE_DOMAIN (op00type);
15851 tree min_val = size_zero_node;
15852 if (type_domain && TYPE_MIN_VALUE (type_domain))
15853 min_val = TYPE_MIN_VALUE (type_domain);
15854 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15855 TYPE_SIZE_UNIT (type));
15856 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15857 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15858 NULL_TREE, NULL_TREE);
15859 }
15860 }
15861 }
15862
15863 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15864 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15865 && type == TREE_TYPE (TREE_TYPE (subtype))
15866 && (!in_gimple_form
15867 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15868 {
15869 tree type_domain;
15870 tree min_val = size_zero_node;
15871 sub = build_fold_indirect_ref_loc (loc, sub);
15872 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15873 if (type_domain && TYPE_MIN_VALUE (type_domain))
15874 min_val = TYPE_MIN_VALUE (type_domain);
15875 if (in_gimple_form
15876 && TREE_CODE (min_val) != INTEGER_CST)
15877 return NULL_TREE;
15878 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15879 NULL_TREE);
15880 }
15881
15882 return NULL_TREE;
15883 }
15884
15885 /* Builds an expression for an indirection through T, simplifying some
15886 cases. */
15887
15888 tree
15889 build_fold_indirect_ref_loc (location_t loc, tree t)
15890 {
15891 tree type = TREE_TYPE (TREE_TYPE (t));
15892 tree sub = fold_indirect_ref_1 (loc, type, t);
15893
15894 if (sub)
15895 return sub;
15896
15897 return build1_loc (loc, INDIRECT_REF, type, t);
15898 }
15899
15900 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15901
15902 tree
15903 fold_indirect_ref_loc (location_t loc, tree t)
15904 {
15905 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15906
15907 if (sub)
15908 return sub;
15909 else
15910 return t;
15911 }
15912
15913 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15914 whose result is ignored. The type of the returned tree need not be
15915 the same as the original expression. */
15916
15917 tree
15918 fold_ignored_result (tree t)
15919 {
15920 if (!TREE_SIDE_EFFECTS (t))
15921 return integer_zero_node;
15922
15923 for (;;)
15924 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15925 {
15926 case tcc_unary:
15927 t = TREE_OPERAND (t, 0);
15928 break;
15929
15930 case tcc_binary:
15931 case tcc_comparison:
15932 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15933 t = TREE_OPERAND (t, 0);
15934 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15935 t = TREE_OPERAND (t, 1);
15936 else
15937 return t;
15938 break;
15939
15940 case tcc_expression:
15941 switch (TREE_CODE (t))
15942 {
15943 case COMPOUND_EXPR:
15944 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15945 return t;
15946 t = TREE_OPERAND (t, 0);
15947 break;
15948
15949 case COND_EXPR:
15950 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15951 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15952 return t;
15953 t = TREE_OPERAND (t, 0);
15954 break;
15955
15956 default:
15957 return t;
15958 }
15959 break;
15960
15961 default:
15962 return t;
15963 }
15964 }
15965
15966 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15967
15968 tree
15969 round_up_loc (location_t loc, tree value, unsigned int divisor)
15970 {
15971 tree div = NULL_TREE;
15972
15973 if (divisor == 1)
15974 return value;
15975
15976 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15977 have to do anything. Only do this when we are not given a const,
15978 because in that case, this check is more expensive than just
15979 doing it. */
15980 if (TREE_CODE (value) != INTEGER_CST)
15981 {
15982 div = build_int_cst (TREE_TYPE (value), divisor);
15983
15984 if (multiple_of_p (TREE_TYPE (value), value, div))
15985 return value;
15986 }
15987
15988 /* If divisor is a power of two, simplify this to bit manipulation. */
15989 if (divisor == (divisor & -divisor))
15990 {
15991 if (TREE_CODE (value) == INTEGER_CST)
15992 {
15993 wide_int val = value;
15994 bool overflow_p;
15995
15996 if ((val & (divisor - 1)) == 0)
15997 return value;
15998
15999 overflow_p = TREE_OVERFLOW (value);
16000 val &= ~(divisor - 1);
16001 val += divisor;
16002 if (val == 0)
16003 overflow_p = true;
16004
16005 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16006 }
16007 else
16008 {
16009 tree t;
16010
16011 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16012 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16013 t = build_int_cst (TREE_TYPE (value), -divisor);
16014 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16015 }
16016 }
16017 else
16018 {
16019 if (!div)
16020 div = build_int_cst (TREE_TYPE (value), divisor);
16021 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16022 value = size_binop_loc (loc, MULT_EXPR, value, div);
16023 }
16024
16025 return value;
16026 }
16027
16028 /* Likewise, but round down. */
16029
16030 tree
16031 round_down_loc (location_t loc, tree value, int divisor)
16032 {
16033 tree div = NULL_TREE;
16034
16035 gcc_assert (divisor > 0);
16036 if (divisor == 1)
16037 return value;
16038
16039 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16040 have to do anything. Only do this when we are not given a const,
16041 because in that case, this check is more expensive than just
16042 doing it. */
16043 if (TREE_CODE (value) != INTEGER_CST)
16044 {
16045 div = build_int_cst (TREE_TYPE (value), divisor);
16046
16047 if (multiple_of_p (TREE_TYPE (value), value, div))
16048 return value;
16049 }
16050
16051 /* If divisor is a power of two, simplify this to bit manipulation. */
16052 if (divisor == (divisor & -divisor))
16053 {
16054 tree t;
16055
16056 t = build_int_cst (TREE_TYPE (value), -divisor);
16057 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16058 }
16059 else
16060 {
16061 if (!div)
16062 div = build_int_cst (TREE_TYPE (value), divisor);
16063 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16064 value = size_binop_loc (loc, MULT_EXPR, value, div);
16065 }
16066
16067 return value;
16068 }
16069
16070 /* Returns the pointer to the base of the object addressed by EXP and
16071 extracts the information about the offset of the access, storing it
16072 to PBITPOS and POFFSET. */
16073
16074 static tree
16075 split_address_to_core_and_offset (tree exp,
16076 HOST_WIDE_INT *pbitpos, tree *poffset)
16077 {
16078 tree core;
16079 machine_mode mode;
16080 int unsignedp, volatilep;
16081 HOST_WIDE_INT bitsize;
16082 location_t loc = EXPR_LOCATION (exp);
16083
16084 if (TREE_CODE (exp) == ADDR_EXPR)
16085 {
16086 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16087 poffset, &mode, &unsignedp, &volatilep,
16088 false);
16089 core = build_fold_addr_expr_loc (loc, core);
16090 }
16091 else
16092 {
16093 core = exp;
16094 *pbitpos = 0;
16095 *poffset = NULL_TREE;
16096 }
16097
16098 return core;
16099 }
16100
16101 /* Returns true if addresses of E1 and E2 differ by a constant, false
16102 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16103
16104 bool
16105 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16106 {
16107 tree core1, core2;
16108 HOST_WIDE_INT bitpos1, bitpos2;
16109 tree toffset1, toffset2, tdiff, type;
16110
16111 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16112 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16113
16114 if (bitpos1 % BITS_PER_UNIT != 0
16115 || bitpos2 % BITS_PER_UNIT != 0
16116 || !operand_equal_p (core1, core2, 0))
16117 return false;
16118
16119 if (toffset1 && toffset2)
16120 {
16121 type = TREE_TYPE (toffset1);
16122 if (type != TREE_TYPE (toffset2))
16123 toffset2 = fold_convert (type, toffset2);
16124
16125 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16126 if (!cst_and_fits_in_hwi (tdiff))
16127 return false;
16128
16129 *diff = int_cst_value (tdiff);
16130 }
16131 else if (toffset1 || toffset2)
16132 {
16133 /* If only one of the offsets is non-constant, the difference cannot
16134 be a constant. */
16135 return false;
16136 }
16137 else
16138 *diff = 0;
16139
16140 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16141 return true;
16142 }
16143
16144 /* Simplify the floating point expression EXP when the sign of the
16145 result is not significant. Return NULL_TREE if no simplification
16146 is possible. */
16147
16148 tree
16149 fold_strip_sign_ops (tree exp)
16150 {
16151 tree arg0, arg1;
16152 location_t loc = EXPR_LOCATION (exp);
16153
16154 switch (TREE_CODE (exp))
16155 {
16156 case ABS_EXPR:
16157 case NEGATE_EXPR:
16158 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16159 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16160
16161 case MULT_EXPR:
16162 case RDIV_EXPR:
16163 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16164 return NULL_TREE;
16165 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16166 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16167 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16168 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16169 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16170 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16171 break;
16172
16173 case COMPOUND_EXPR:
16174 arg0 = TREE_OPERAND (exp, 0);
16175 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16176 if (arg1)
16177 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16178 break;
16179
16180 case COND_EXPR:
16181 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16182 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16183 if (arg0 || arg1)
16184 return fold_build3_loc (loc,
16185 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16186 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16187 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16188 break;
16189
16190 case CALL_EXPR:
16191 {
16192 const enum built_in_function fcode = builtin_mathfn_code (exp);
16193 switch (fcode)
16194 {
16195 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16196 /* Strip copysign function call, return the 1st argument. */
16197 arg0 = CALL_EXPR_ARG (exp, 0);
16198 arg1 = CALL_EXPR_ARG (exp, 1);
16199 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16200
16201 default:
16202 /* Strip sign ops from the argument of "odd" math functions. */
16203 if (negate_mathfn_p (fcode))
16204 {
16205 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16206 if (arg0)
16207 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16208 }
16209 break;
16210 }
16211 }
16212 break;
16213
16214 default:
16215 break;
16216 }
16217 return NULL_TREE;
16218 }