re PR middle-end/56917 (-ftrapv detects a overflow wrongly.)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_mathfn_compare (location_t,
149 enum built_in_function, enum tree_code,
150 tree, tree, tree);
151 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
152 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
153 static bool reorder_operands_p (const_tree, const_tree);
154 static tree fold_negate_const (tree, tree);
155 static tree fold_not_const (const_tree, tree);
156 static tree fold_relational_const (enum tree_code, tree, tree, tree);
157 static tree fold_convert_const (enum tree_code, tree, tree);
158 static tree fold_view_convert_expr (tree, tree);
159 static bool vec_cst_ctor_to_array (tree, tree *);
160
161
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
164
165 static location_t
166 expr_location_or (tree t, location_t loc)
167 {
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
170 }
171
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
174
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
177 {
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
183 {
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
186 }
187 return x;
188 }
189 \f
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
193
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 {
197 widest_int quo;
198
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202
203 return NULL_TREE;
204 }
205 \f
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
214
215 static int fold_deferring_overflow_warnings;
216
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
221
222 static const char* fold_deferred_overflow_warning;
223
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
226
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
231
232 void
233 fold_defer_overflow_warnings (void)
234 {
235 ++fold_deferring_overflow_warnings;
236 }
237
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
246
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 {
250 const char *warnmsg;
251 location_t locus;
252
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
256 {
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
262 }
263
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
266
267 if (!issue || warnmsg == NULL)
268 return;
269
270 if (gimple_no_warning_p (stmt))
271 return;
272
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
277
278 if (!issue_strict_overflow_warning (code))
279 return;
280
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
286 }
287
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
290
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
293 {
294 fold_undefer_overflow_warnings (false, NULL, 0);
295 }
296
297 /* Whether we are deferring overflow warnings. */
298
299 bool
300 fold_deferring_overflow_warnings_p (void)
301 {
302 return fold_deferring_overflow_warnings > 0;
303 }
304
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
307
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 {
311 if (fold_deferring_overflow_warnings > 0)
312 {
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
315 {
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
318 }
319 }
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
322 }
323 \f
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
326
327 static bool
328 negate_mathfn_p (enum built_in_function code)
329 {
330 switch (code)
331 {
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
356
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
362
363 default:
364 break;
365 }
366 return false;
367 }
368
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
371
372 bool
373 may_negate_without_overflow_p (const_tree t)
374 {
375 tree type;
376
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
382
383 return !wi::only_sign_bit_p (t);
384 }
385
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
388
389 static bool
390 negate_expr_p (tree t)
391 {
392 tree type;
393
394 if (t == 0)
395 return false;
396
397 type = TREE_TYPE (t);
398
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
401 {
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
405
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
411
412 case FIXED_CST:
413 return true;
414
415 case NEGATE_EXPR:
416 return !TYPE_OVERFLOW_SANITIZED (type);
417
418 case REAL_CST:
419 /* We want to canonicalize to positive real constants. Pretend
420 that only negative ones can be easily negated. */
421 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
422
423 case COMPLEX_CST:
424 return negate_expr_p (TREE_REALPART (t))
425 && negate_expr_p (TREE_IMAGPART (t));
426
427 case VECTOR_CST:
428 {
429 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
430 return true;
431
432 int count = TYPE_VECTOR_SUBPARTS (type), i;
433
434 for (i = 0; i < count; i++)
435 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
436 return false;
437
438 return true;
439 }
440
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
444
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
447
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
450 || HONOR_SIGNED_ZEROS (element_mode (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
459
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
463 && !HONOR_SIGNED_ZEROS (element_mode (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
466
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
470
471 /* Fall through. */
472
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
478
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case EXACT_DIV_EXPR:
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
486 overflow. */
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
488 {
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
490 break;
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
500 return true;
501 }
502 else if (negate_expr_p (TREE_OPERAND (t, 0)))
503 return true;
504 return negate_expr_p (TREE_OPERAND (t, 1));
505
506 case NOP_EXPR:
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type) == REAL_TYPE)
509 {
510 tree tem = strip_float_extensions (t);
511 if (tem != t)
512 return negate_expr_p (tem);
513 }
514 break;
515
516 case CALL_EXPR:
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t)))
519 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 break;
521
522 case RSHIFT_EXPR:
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
524 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
525 {
526 tree op1 = TREE_OPERAND (t, 1);
527 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
528 return true;
529 }
530 break;
531
532 default:
533 break;
534 }
535 return false;
536 }
537
538 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
539 simplification is possible.
540 If negate_expr_p would return true for T, NULL_TREE will never be
541 returned. */
542
543 static tree
544 fold_negate_expr (location_t loc, tree t)
545 {
546 tree type = TREE_TYPE (t);
547 tree tem;
548
549 switch (TREE_CODE (t))
550 {
551 /* Convert - (~A) to A + 1. */
552 case BIT_NOT_EXPR:
553 if (INTEGRAL_TYPE_P (type))
554 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
555 build_one_cst (type));
556 break;
557
558 case INTEGER_CST:
559 tem = fold_negate_const (t, type);
560 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
561 || (!TYPE_OVERFLOW_TRAPS (type)
562 && TYPE_OVERFLOW_WRAPS (type))
563 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
564 return tem;
565 break;
566
567 case REAL_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case FIXED_CST:
572 tem = fold_negate_const (t, type);
573 return tem;
574
575 case COMPLEX_CST:
576 {
577 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
578 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
579 if (rpart && ipart)
580 return build_complex (type, rpart, ipart);
581 }
582 break;
583
584 case VECTOR_CST:
585 {
586 int count = TYPE_VECTOR_SUBPARTS (type), i;
587 tree *elts = XALLOCAVEC (tree, count);
588
589 for (i = 0; i < count; i++)
590 {
591 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
592 if (elts[i] == NULL_TREE)
593 return NULL_TREE;
594 }
595
596 return build_vector (type, elts);
597 }
598
599 case COMPLEX_EXPR:
600 if (negate_expr_p (t))
601 return fold_build2_loc (loc, COMPLEX_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
603 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
604 break;
605
606 case CONJ_EXPR:
607 if (negate_expr_p (t))
608 return fold_build1_loc (loc, CONJ_EXPR, type,
609 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
610 break;
611
612 case NEGATE_EXPR:
613 if (!TYPE_OVERFLOW_SANITIZED (type))
614 return TREE_OPERAND (t, 0);
615 break;
616
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
619 && !HONOR_SIGNED_ZEROS (element_mode (type)))
620 {
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
625 {
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
629 }
630
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
633 {
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
637 }
638 }
639 break;
640
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
644 && !HONOR_SIGNED_ZEROS (element_mode (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
649
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
653
654 /* Fall through. */
655
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
658 {
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
667 }
668 break;
669
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
679 {
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
684 {
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
691 }
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
706 }
707 break;
708
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
712 {
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
716 }
717 break;
718
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
723 {
724 tree fndecl, arg;
725
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
729 }
730 break;
731
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
735 {
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
738 {
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
745 }
746 }
747 break;
748
749 default:
750 break;
751 }
752
753 return NULL_TREE;
754 }
755
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
759
760 static tree
761 negate_expr (tree t)
762 {
763 tree type, tem;
764 location_t loc;
765
766 if (t == NULL_TREE)
767 return NULL_TREE;
768
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
772
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
777 }
778 \f
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
786
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
790
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
793
794 If IN is itself a literal or constant, return it as appropriate.
795
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
798
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
802 {
803 tree var = 0;
804
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
808
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
811
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
824 {
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
829
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
837
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
842
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
851
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
859 }
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
862 {
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
866 }
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
871
872 if (negate_p)
873 {
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
880 }
881
882 return var;
883 }
884
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
889
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
892 {
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
897
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
903 {
904 if (code == PLUS_EXPR)
905 {
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
918 }
919 else if (code == MINUS_EXPR)
920 {
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
923 }
924
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
927 }
928
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
931 }
932 \f
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
935
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
938 {
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
943
944 switch (code)
945 {
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
951
952 default:
953 break;
954 }
955
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
959 }
960
961
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
965
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
969 {
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
975
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
978
979 switch (code)
980 {
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
984
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
988
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
992
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
996 {
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1002 }
1003
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1012
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1016 {
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1022 }
1023
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1029
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1033
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1037
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1041
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1045
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1076
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1082
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1088
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1094
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1098
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1102
1103 default:
1104 return NULL_TREE;
1105 }
1106
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1111
1112 return t;
1113 }
1114
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1117 {
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1119 }
1120
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1125
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1128 {
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1132
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1135
1136 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1137 {
1138 if (code == POINTER_PLUS_EXPR)
1139 return int_const_binop (PLUS_EXPR,
1140 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1141
1142 return int_const_binop (code, arg1, arg2);
1143 }
1144
1145 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1146 {
1147 machine_mode mode;
1148 REAL_VALUE_TYPE d1;
1149 REAL_VALUE_TYPE d2;
1150 REAL_VALUE_TYPE value;
1151 REAL_VALUE_TYPE result;
1152 bool inexact;
1153 tree t, type;
1154
1155 /* The following codes are handled by real_arithmetic. */
1156 switch (code)
1157 {
1158 case PLUS_EXPR:
1159 case MINUS_EXPR:
1160 case MULT_EXPR:
1161 case RDIV_EXPR:
1162 case MIN_EXPR:
1163 case MAX_EXPR:
1164 break;
1165
1166 default:
1167 return NULL_TREE;
1168 }
1169
1170 d1 = TREE_REAL_CST (arg1);
1171 d2 = TREE_REAL_CST (arg2);
1172
1173 type = TREE_TYPE (arg1);
1174 mode = TYPE_MODE (type);
1175
1176 /* Don't perform operation if we honor signaling NaNs and
1177 either operand is a NaN. */
1178 if (HONOR_SNANS (mode)
1179 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1180 return NULL_TREE;
1181
1182 /* Don't perform operation if it would raise a division
1183 by zero exception. */
1184 if (code == RDIV_EXPR
1185 && REAL_VALUES_EQUAL (d2, dconst0)
1186 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1187 return NULL_TREE;
1188
1189 /* If either operand is a NaN, just return it. Otherwise, set up
1190 for floating-point trap; we return an overflow. */
1191 if (REAL_VALUE_ISNAN (d1))
1192 return arg1;
1193 else if (REAL_VALUE_ISNAN (d2))
1194 return arg2;
1195
1196 inexact = real_arithmetic (&value, code, &d1, &d2);
1197 real_convert (&result, mode, &value);
1198
1199 /* Don't constant fold this floating point operation if
1200 the result has overflowed and flag_trapping_math. */
1201 if (flag_trapping_math
1202 && MODE_HAS_INFINITIES (mode)
1203 && REAL_VALUE_ISINF (result)
1204 && !REAL_VALUE_ISINF (d1)
1205 && !REAL_VALUE_ISINF (d2))
1206 return NULL_TREE;
1207
1208 /* Don't constant fold this floating point operation if the
1209 result may dependent upon the run-time rounding mode and
1210 flag_rounding_math is set, or if GCC's software emulation
1211 is unable to accurately represent the result. */
1212 if ((flag_rounding_math
1213 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1214 && (inexact || !real_identical (&result, &value)))
1215 return NULL_TREE;
1216
1217 t = build_real (type, result);
1218
1219 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1220 return t;
1221 }
1222
1223 if (TREE_CODE (arg1) == FIXED_CST)
1224 {
1225 FIXED_VALUE_TYPE f1;
1226 FIXED_VALUE_TYPE f2;
1227 FIXED_VALUE_TYPE result;
1228 tree t, type;
1229 int sat_p;
1230 bool overflow_p;
1231
1232 /* The following codes are handled by fixed_arithmetic. */
1233 switch (code)
1234 {
1235 case PLUS_EXPR:
1236 case MINUS_EXPR:
1237 case MULT_EXPR:
1238 case TRUNC_DIV_EXPR:
1239 if (TREE_CODE (arg2) != FIXED_CST)
1240 return NULL_TREE;
1241 f2 = TREE_FIXED_CST (arg2);
1242 break;
1243
1244 case LSHIFT_EXPR:
1245 case RSHIFT_EXPR:
1246 {
1247 if (TREE_CODE (arg2) != INTEGER_CST)
1248 return NULL_TREE;
1249 wide_int w2 = arg2;
1250 f2.data.high = w2.elt (1);
1251 f2.data.low = w2.elt (0);
1252 f2.mode = SImode;
1253 }
1254 break;
1255
1256 default:
1257 return NULL_TREE;
1258 }
1259
1260 f1 = TREE_FIXED_CST (arg1);
1261 type = TREE_TYPE (arg1);
1262 sat_p = TYPE_SATURATING (type);
1263 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1264 t = build_fixed (type, result);
1265 /* Propagate overflow flags. */
1266 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1267 TREE_OVERFLOW (t) = 1;
1268 return t;
1269 }
1270
1271 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1272 {
1273 tree type = TREE_TYPE (arg1);
1274 tree r1 = TREE_REALPART (arg1);
1275 tree i1 = TREE_IMAGPART (arg1);
1276 tree r2 = TREE_REALPART (arg2);
1277 tree i2 = TREE_IMAGPART (arg2);
1278 tree real, imag;
1279
1280 switch (code)
1281 {
1282 case PLUS_EXPR:
1283 case MINUS_EXPR:
1284 real = const_binop (code, r1, r2);
1285 imag = const_binop (code, i1, i2);
1286 break;
1287
1288 case MULT_EXPR:
1289 if (COMPLEX_FLOAT_TYPE_P (type))
1290 return do_mpc_arg2 (arg1, arg2, type,
1291 /* do_nonfinite= */ folding_initializer,
1292 mpc_mul);
1293
1294 real = const_binop (MINUS_EXPR,
1295 const_binop (MULT_EXPR, r1, r2),
1296 const_binop (MULT_EXPR, i1, i2));
1297 imag = const_binop (PLUS_EXPR,
1298 const_binop (MULT_EXPR, r1, i2),
1299 const_binop (MULT_EXPR, i1, r2));
1300 break;
1301
1302 case RDIV_EXPR:
1303 if (COMPLEX_FLOAT_TYPE_P (type))
1304 return do_mpc_arg2 (arg1, arg2, type,
1305 /* do_nonfinite= */ folding_initializer,
1306 mpc_div);
1307 /* Fallthru ... */
1308 case TRUNC_DIV_EXPR:
1309 case CEIL_DIV_EXPR:
1310 case FLOOR_DIV_EXPR:
1311 case ROUND_DIV_EXPR:
1312 if (flag_complex_method == 0)
1313 {
1314 /* Keep this algorithm in sync with
1315 tree-complex.c:expand_complex_div_straight().
1316
1317 Expand complex division to scalars, straightforward algorithm.
1318 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1319 t = br*br + bi*bi
1320 */
1321 tree magsquared
1322 = const_binop (PLUS_EXPR,
1323 const_binop (MULT_EXPR, r2, r2),
1324 const_binop (MULT_EXPR, i2, i2));
1325 tree t1
1326 = const_binop (PLUS_EXPR,
1327 const_binop (MULT_EXPR, r1, r2),
1328 const_binop (MULT_EXPR, i1, i2));
1329 tree t2
1330 = const_binop (MINUS_EXPR,
1331 const_binop (MULT_EXPR, i1, r2),
1332 const_binop (MULT_EXPR, r1, i2));
1333
1334 real = const_binop (code, t1, magsquared);
1335 imag = const_binop (code, t2, magsquared);
1336 }
1337 else
1338 {
1339 /* Keep this algorithm in sync with
1340 tree-complex.c:expand_complex_div_wide().
1341
1342 Expand complex division to scalars, modified algorithm to minimize
1343 overflow with wide input ranges. */
1344 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1345 fold_abs_const (r2, TREE_TYPE (type)),
1346 fold_abs_const (i2, TREE_TYPE (type)));
1347
1348 if (integer_nonzerop (compare))
1349 {
1350 /* In the TRUE branch, we compute
1351 ratio = br/bi;
1352 div = (br * ratio) + bi;
1353 tr = (ar * ratio) + ai;
1354 ti = (ai * ratio) - ar;
1355 tr = tr / div;
1356 ti = ti / div; */
1357 tree ratio = const_binop (code, r2, i2);
1358 tree div = const_binop (PLUS_EXPR, i2,
1359 const_binop (MULT_EXPR, r2, ratio));
1360 real = const_binop (MULT_EXPR, r1, ratio);
1361 real = const_binop (PLUS_EXPR, real, i1);
1362 real = const_binop (code, real, div);
1363
1364 imag = const_binop (MULT_EXPR, i1, ratio);
1365 imag = const_binop (MINUS_EXPR, imag, r1);
1366 imag = const_binop (code, imag, div);
1367 }
1368 else
1369 {
1370 /* In the FALSE branch, we compute
1371 ratio = d/c;
1372 divisor = (d * ratio) + c;
1373 tr = (b * ratio) + a;
1374 ti = b - (a * ratio);
1375 tr = tr / div;
1376 ti = ti / div; */
1377 tree ratio = const_binop (code, i2, r2);
1378 tree div = const_binop (PLUS_EXPR, r2,
1379 const_binop (MULT_EXPR, i2, ratio));
1380
1381 real = const_binop (MULT_EXPR, i1, ratio);
1382 real = const_binop (PLUS_EXPR, real, r1);
1383 real = const_binop (code, real, div);
1384
1385 imag = const_binop (MULT_EXPR, r1, ratio);
1386 imag = const_binop (MINUS_EXPR, i1, imag);
1387 imag = const_binop (code, imag, div);
1388 }
1389 }
1390 break;
1391
1392 default:
1393 return NULL_TREE;
1394 }
1395
1396 if (real && imag)
1397 return build_complex (type, real, imag);
1398 }
1399
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == VECTOR_CST)
1402 {
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1406
1407 for (i = 0; i < count; i++)
1408 {
1409 tree elem1 = VECTOR_CST_ELT (arg1, i);
1410 tree elem2 = VECTOR_CST_ELT (arg2, i);
1411
1412 elts[i] = const_binop (code, elem1, elem2);
1413
1414 /* It is possible that const_binop cannot handle the given
1415 code and return NULL_TREE */
1416 if (elts[i] == NULL_TREE)
1417 return NULL_TREE;
1418 }
1419
1420 return build_vector (type, elts);
1421 }
1422
1423 /* Shifts allow a scalar offset for a vector. */
1424 if (TREE_CODE (arg1) == VECTOR_CST
1425 && TREE_CODE (arg2) == INTEGER_CST)
1426 {
1427 tree type = TREE_TYPE (arg1);
1428 int count = TYPE_VECTOR_SUBPARTS (type), i;
1429 tree *elts = XALLOCAVEC (tree, count);
1430
1431 for (i = 0; i < count; i++)
1432 {
1433 tree elem1 = VECTOR_CST_ELT (arg1, i);
1434
1435 elts[i] = const_binop (code, elem1, arg2);
1436
1437 /* It is possible that const_binop cannot handle the given
1438 code and return NULL_TREE. */
1439 if (elts[i] == NULL_TREE)
1440 return NULL_TREE;
1441 }
1442
1443 return build_vector (type, elts);
1444 }
1445 return NULL_TREE;
1446 }
1447
1448 /* Overload that adds a TYPE parameter to be able to dispatch
1449 to fold_relational_const. */
1450
1451 tree
1452 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1453 {
1454 if (TREE_CODE_CLASS (code) == tcc_comparison)
1455 return fold_relational_const (code, type, arg1, arg2);
1456
1457 /* ??? Until we make the const_binop worker take the type of the
1458 result as argument put those cases that need it here. */
1459 switch (code)
1460 {
1461 case COMPLEX_EXPR:
1462 if ((TREE_CODE (arg1) == REAL_CST
1463 && TREE_CODE (arg2) == REAL_CST)
1464 || (TREE_CODE (arg1) == INTEGER_CST
1465 && TREE_CODE (arg2) == INTEGER_CST))
1466 return build_complex (type, arg1, arg2);
1467 return NULL_TREE;
1468
1469 case VEC_PACK_TRUNC_EXPR:
1470 case VEC_PACK_FIX_TRUNC_EXPR:
1471 {
1472 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1473 tree *elts;
1474
1475 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1476 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1477 if (TREE_CODE (arg1) != VECTOR_CST
1478 || TREE_CODE (arg2) != VECTOR_CST)
1479 return NULL_TREE;
1480
1481 elts = XALLOCAVEC (tree, nelts);
1482 if (!vec_cst_ctor_to_array (arg1, elts)
1483 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1484 return NULL_TREE;
1485
1486 for (i = 0; i < nelts; i++)
1487 {
1488 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1489 ? NOP_EXPR : FIX_TRUNC_EXPR,
1490 TREE_TYPE (type), elts[i]);
1491 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1492 return NULL_TREE;
1493 }
1494
1495 return build_vector (type, elts);
1496 }
1497
1498 case VEC_WIDEN_MULT_LO_EXPR:
1499 case VEC_WIDEN_MULT_HI_EXPR:
1500 case VEC_WIDEN_MULT_EVEN_EXPR:
1501 case VEC_WIDEN_MULT_ODD_EXPR:
1502 {
1503 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1504 unsigned int out, ofs, scale;
1505 tree *elts;
1506
1507 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1508 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1509 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1510 return NULL_TREE;
1511
1512 elts = XALLOCAVEC (tree, nelts * 4);
1513 if (!vec_cst_ctor_to_array (arg1, elts)
1514 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1515 return NULL_TREE;
1516
1517 if (code == VEC_WIDEN_MULT_LO_EXPR)
1518 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1519 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1520 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1521 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1522 scale = 1, ofs = 0;
1523 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1524 scale = 1, ofs = 1;
1525
1526 for (out = 0; out < nelts; out++)
1527 {
1528 unsigned int in1 = (out << scale) + ofs;
1529 unsigned int in2 = in1 + nelts * 2;
1530 tree t1, t2;
1531
1532 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1533 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1534
1535 if (t1 == NULL_TREE || t2 == NULL_TREE)
1536 return NULL_TREE;
1537 elts[out] = const_binop (MULT_EXPR, t1, t2);
1538 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1539 return NULL_TREE;
1540 }
1541
1542 return build_vector (type, elts);
1543 }
1544
1545 default:;
1546 }
1547
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1551 return const_binop (code, arg1, arg2);
1552 }
1553
1554 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1555 Return zero if computing the constants is not possible. */
1556
1557 tree
1558 const_unop (enum tree_code code, tree type, tree arg0)
1559 {
1560 switch (code)
1561 {
1562 CASE_CONVERT:
1563 case FLOAT_EXPR:
1564 case FIX_TRUNC_EXPR:
1565 case FIXED_CONVERT_EXPR:
1566 return fold_convert_const (code, type, arg0);
1567
1568 case ADDR_SPACE_CONVERT_EXPR:
1569 if (integer_zerop (arg0))
1570 return fold_convert_const (code, type, arg0);
1571 break;
1572
1573 case VIEW_CONVERT_EXPR:
1574 return fold_view_convert_expr (type, arg0);
1575
1576 case NEGATE_EXPR:
1577 {
1578 /* Can't call fold_negate_const directly here as that doesn't
1579 handle all cases and we might not be able to negate some
1580 constants. */
1581 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1582 if (tem && CONSTANT_CLASS_P (tem))
1583 return tem;
1584 break;
1585 }
1586
1587 case ABS_EXPR:
1588 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1589 return fold_abs_const (arg0, type);
1590 break;
1591
1592 case CONJ_EXPR:
1593 if (TREE_CODE (arg0) == COMPLEX_CST)
1594 {
1595 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1596 TREE_TYPE (type));
1597 return build_complex (type, TREE_REALPART (arg0), ipart);
1598 }
1599 break;
1600
1601 case BIT_NOT_EXPR:
1602 if (TREE_CODE (arg0) == INTEGER_CST)
1603 return fold_not_const (arg0, type);
1604 /* Perform BIT_NOT_EXPR on each element individually. */
1605 else if (TREE_CODE (arg0) == VECTOR_CST)
1606 {
1607 tree *elements;
1608 tree elem;
1609 unsigned count = VECTOR_CST_NELTS (arg0), i;
1610
1611 elements = XALLOCAVEC (tree, count);
1612 for (i = 0; i < count; i++)
1613 {
1614 elem = VECTOR_CST_ELT (arg0, i);
1615 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1616 if (elem == NULL_TREE)
1617 break;
1618 elements[i] = elem;
1619 }
1620 if (i == count)
1621 return build_vector (type, elements);
1622 }
1623 break;
1624
1625 case TRUTH_NOT_EXPR:
1626 if (TREE_CODE (arg0) == INTEGER_CST)
1627 return constant_boolean_node (integer_zerop (arg0), type);
1628 break;
1629
1630 case REALPART_EXPR:
1631 if (TREE_CODE (arg0) == COMPLEX_CST)
1632 return fold_convert (type, TREE_REALPART (arg0));
1633 break;
1634
1635 case IMAGPART_EXPR:
1636 if (TREE_CODE (arg0) == COMPLEX_CST)
1637 return fold_convert (type, TREE_IMAGPART (arg0));
1638 break;
1639
1640 case VEC_UNPACK_LO_EXPR:
1641 case VEC_UNPACK_HI_EXPR:
1642 case VEC_UNPACK_FLOAT_LO_EXPR:
1643 case VEC_UNPACK_FLOAT_HI_EXPR:
1644 {
1645 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1646 tree *elts;
1647 enum tree_code subcode;
1648
1649 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1650 if (TREE_CODE (arg0) != VECTOR_CST)
1651 return NULL_TREE;
1652
1653 elts = XALLOCAVEC (tree, nelts * 2);
1654 if (!vec_cst_ctor_to_array (arg0, elts))
1655 return NULL_TREE;
1656
1657 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1658 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1659 elts += nelts;
1660
1661 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1662 subcode = NOP_EXPR;
1663 else
1664 subcode = FLOAT_EXPR;
1665
1666 for (i = 0; i < nelts; i++)
1667 {
1668 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1669 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1670 return NULL_TREE;
1671 }
1672
1673 return build_vector (type, elts);
1674 }
1675
1676 case REDUC_MIN_EXPR:
1677 case REDUC_MAX_EXPR:
1678 case REDUC_PLUS_EXPR:
1679 {
1680 unsigned int nelts, i;
1681 tree *elts;
1682 enum tree_code subcode;
1683
1684 if (TREE_CODE (arg0) != VECTOR_CST)
1685 return NULL_TREE;
1686 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1687
1688 elts = XALLOCAVEC (tree, nelts);
1689 if (!vec_cst_ctor_to_array (arg0, elts))
1690 return NULL_TREE;
1691
1692 switch (code)
1693 {
1694 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1695 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1696 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1697 default: gcc_unreachable ();
1698 }
1699
1700 for (i = 1; i < nelts; i++)
1701 {
1702 elts[0] = const_binop (subcode, elts[0], elts[i]);
1703 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1704 return NULL_TREE;
1705 }
1706
1707 return elts[0];
1708 }
1709
1710 default:
1711 break;
1712 }
1713
1714 return NULL_TREE;
1715 }
1716
1717 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1718 indicates which particular sizetype to create. */
1719
1720 tree
1721 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1722 {
1723 return build_int_cst (sizetype_tab[(int) kind], number);
1724 }
1725 \f
1726 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1727 is a tree code. The type of the result is taken from the operands.
1728 Both must be equivalent integer types, ala int_binop_types_match_p.
1729 If the operands are constant, so is the result. */
1730
1731 tree
1732 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1733 {
1734 tree type = TREE_TYPE (arg0);
1735
1736 if (arg0 == error_mark_node || arg1 == error_mark_node)
1737 return error_mark_node;
1738
1739 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1740 TREE_TYPE (arg1)));
1741
1742 /* Handle the special case of two integer constants faster. */
1743 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1744 {
1745 /* And some specific cases even faster than that. */
1746 if (code == PLUS_EXPR)
1747 {
1748 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1749 return arg1;
1750 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1751 return arg0;
1752 }
1753 else if (code == MINUS_EXPR)
1754 {
1755 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1756 return arg0;
1757 }
1758 else if (code == MULT_EXPR)
1759 {
1760 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1761 return arg1;
1762 }
1763
1764 /* Handle general case of two integer constants. For sizetype
1765 constant calculations we always want to know about overflow,
1766 even in the unsigned case. */
1767 return int_const_binop_1 (code, arg0, arg1, -1);
1768 }
1769
1770 return fold_build2_loc (loc, code, type, arg0, arg1);
1771 }
1772
1773 /* Given two values, either both of sizetype or both of bitsizetype,
1774 compute the difference between the two values. Return the value
1775 in signed type corresponding to the type of the operands. */
1776
1777 tree
1778 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1779 {
1780 tree type = TREE_TYPE (arg0);
1781 tree ctype;
1782
1783 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1784 TREE_TYPE (arg1)));
1785
1786 /* If the type is already signed, just do the simple thing. */
1787 if (!TYPE_UNSIGNED (type))
1788 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1789
1790 if (type == sizetype)
1791 ctype = ssizetype;
1792 else if (type == bitsizetype)
1793 ctype = sbitsizetype;
1794 else
1795 ctype = signed_type_for (type);
1796
1797 /* If either operand is not a constant, do the conversions to the signed
1798 type and subtract. The hardware will do the right thing with any
1799 overflow in the subtraction. */
1800 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1801 return size_binop_loc (loc, MINUS_EXPR,
1802 fold_convert_loc (loc, ctype, arg0),
1803 fold_convert_loc (loc, ctype, arg1));
1804
1805 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1806 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1807 overflow) and negate (which can't either). Special-case a result
1808 of zero while we're here. */
1809 if (tree_int_cst_equal (arg0, arg1))
1810 return build_int_cst (ctype, 0);
1811 else if (tree_int_cst_lt (arg1, arg0))
1812 return fold_convert_loc (loc, ctype,
1813 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1814 else
1815 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1816 fold_convert_loc (loc, ctype,
1817 size_binop_loc (loc,
1818 MINUS_EXPR,
1819 arg1, arg0)));
1820 }
1821 \f
1822 /* A subroutine of fold_convert_const handling conversions of an
1823 INTEGER_CST to another integer type. */
1824
1825 static tree
1826 fold_convert_const_int_from_int (tree type, const_tree arg1)
1827 {
1828 /* Given an integer constant, make new constant with new type,
1829 appropriately sign-extended or truncated. Use widest_int
1830 so that any extension is done according ARG1's type. */
1831 return force_fit_type (type, wi::to_widest (arg1),
1832 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1833 TREE_OVERFLOW (arg1));
1834 }
1835
1836 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1837 to an integer type. */
1838
1839 static tree
1840 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1841 {
1842 bool overflow = false;
1843 tree t;
1844
1845 /* The following code implements the floating point to integer
1846 conversion rules required by the Java Language Specification,
1847 that IEEE NaNs are mapped to zero and values that overflow
1848 the target precision saturate, i.e. values greater than
1849 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1850 are mapped to INT_MIN. These semantics are allowed by the
1851 C and C++ standards that simply state that the behavior of
1852 FP-to-integer conversion is unspecified upon overflow. */
1853
1854 wide_int val;
1855 REAL_VALUE_TYPE r;
1856 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1857
1858 switch (code)
1859 {
1860 case FIX_TRUNC_EXPR:
1861 real_trunc (&r, VOIDmode, &x);
1862 break;
1863
1864 default:
1865 gcc_unreachable ();
1866 }
1867
1868 /* If R is NaN, return zero and show we have an overflow. */
1869 if (REAL_VALUE_ISNAN (r))
1870 {
1871 overflow = true;
1872 val = wi::zero (TYPE_PRECISION (type));
1873 }
1874
1875 /* See if R is less than the lower bound or greater than the
1876 upper bound. */
1877
1878 if (! overflow)
1879 {
1880 tree lt = TYPE_MIN_VALUE (type);
1881 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1882 if (REAL_VALUES_LESS (r, l))
1883 {
1884 overflow = true;
1885 val = lt;
1886 }
1887 }
1888
1889 if (! overflow)
1890 {
1891 tree ut = TYPE_MAX_VALUE (type);
1892 if (ut)
1893 {
1894 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1895 if (REAL_VALUES_LESS (u, r))
1896 {
1897 overflow = true;
1898 val = ut;
1899 }
1900 }
1901 }
1902
1903 if (! overflow)
1904 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1905
1906 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1907 return t;
1908 }
1909
1910 /* A subroutine of fold_convert_const handling conversions of a
1911 FIXED_CST to an integer type. */
1912
1913 static tree
1914 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1915 {
1916 tree t;
1917 double_int temp, temp_trunc;
1918 unsigned int mode;
1919
1920 /* Right shift FIXED_CST to temp by fbit. */
1921 temp = TREE_FIXED_CST (arg1).data;
1922 mode = TREE_FIXED_CST (arg1).mode;
1923 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1924 {
1925 temp = temp.rshift (GET_MODE_FBIT (mode),
1926 HOST_BITS_PER_DOUBLE_INT,
1927 SIGNED_FIXED_POINT_MODE_P (mode));
1928
1929 /* Left shift temp to temp_trunc by fbit. */
1930 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1931 HOST_BITS_PER_DOUBLE_INT,
1932 SIGNED_FIXED_POINT_MODE_P (mode));
1933 }
1934 else
1935 {
1936 temp = double_int_zero;
1937 temp_trunc = double_int_zero;
1938 }
1939
1940 /* If FIXED_CST is negative, we need to round the value toward 0.
1941 By checking if the fractional bits are not zero to add 1 to temp. */
1942 if (SIGNED_FIXED_POINT_MODE_P (mode)
1943 && temp_trunc.is_negative ()
1944 && TREE_FIXED_CST (arg1).data != temp_trunc)
1945 temp += double_int_one;
1946
1947 /* Given a fixed-point constant, make new constant with new type,
1948 appropriately sign-extended or truncated. */
1949 t = force_fit_type (type, temp, -1,
1950 (temp.is_negative ()
1951 && (TYPE_UNSIGNED (type)
1952 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1953 | TREE_OVERFLOW (arg1));
1954
1955 return t;
1956 }
1957
1958 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1959 to another floating point type. */
1960
1961 static tree
1962 fold_convert_const_real_from_real (tree type, const_tree arg1)
1963 {
1964 REAL_VALUE_TYPE value;
1965 tree t;
1966
1967 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1968 t = build_real (type, value);
1969
1970 /* If converting an infinity or NAN to a representation that doesn't
1971 have one, set the overflow bit so that we can produce some kind of
1972 error message at the appropriate point if necessary. It's not the
1973 most user-friendly message, but it's better than nothing. */
1974 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1975 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1976 TREE_OVERFLOW (t) = 1;
1977 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1978 && !MODE_HAS_NANS (TYPE_MODE (type)))
1979 TREE_OVERFLOW (t) = 1;
1980 /* Regular overflow, conversion produced an infinity in a mode that
1981 can't represent them. */
1982 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1983 && REAL_VALUE_ISINF (value)
1984 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1985 TREE_OVERFLOW (t) = 1;
1986 else
1987 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1988 return t;
1989 }
1990
1991 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1992 to a floating point type. */
1993
1994 static tree
1995 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1996 {
1997 REAL_VALUE_TYPE value;
1998 tree t;
1999
2000 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2001 t = build_real (type, value);
2002
2003 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2004 return t;
2005 }
2006
2007 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2008 to another fixed-point type. */
2009
2010 static tree
2011 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2012 {
2013 FIXED_VALUE_TYPE value;
2014 tree t;
2015 bool overflow_p;
2016
2017 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2018 TYPE_SATURATING (type));
2019 t = build_fixed (type, value);
2020
2021 /* Propagate overflow flags. */
2022 if (overflow_p | TREE_OVERFLOW (arg1))
2023 TREE_OVERFLOW (t) = 1;
2024 return t;
2025 }
2026
2027 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2028 to a fixed-point type. */
2029
2030 static tree
2031 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2032 {
2033 FIXED_VALUE_TYPE value;
2034 tree t;
2035 bool overflow_p;
2036 double_int di;
2037
2038 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2039
2040 di.low = TREE_INT_CST_ELT (arg1, 0);
2041 if (TREE_INT_CST_NUNITS (arg1) == 1)
2042 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2043 else
2044 di.high = TREE_INT_CST_ELT (arg1, 1);
2045
2046 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2047 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2048 TYPE_SATURATING (type));
2049 t = build_fixed (type, value);
2050
2051 /* Propagate overflow flags. */
2052 if (overflow_p | TREE_OVERFLOW (arg1))
2053 TREE_OVERFLOW (t) = 1;
2054 return t;
2055 }
2056
2057 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2058 to a fixed-point type. */
2059
2060 static tree
2061 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2062 {
2063 FIXED_VALUE_TYPE value;
2064 tree t;
2065 bool overflow_p;
2066
2067 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2068 &TREE_REAL_CST (arg1),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2071
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2076 }
2077
2078 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2079 type TYPE. If no simplification can be done return NULL_TREE. */
2080
2081 static tree
2082 fold_convert_const (enum tree_code code, tree type, tree arg1)
2083 {
2084 if (TREE_TYPE (arg1) == type)
2085 return arg1;
2086
2087 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2088 || TREE_CODE (type) == OFFSET_TYPE)
2089 {
2090 if (TREE_CODE (arg1) == INTEGER_CST)
2091 return fold_convert_const_int_from_int (type, arg1);
2092 else if (TREE_CODE (arg1) == REAL_CST)
2093 return fold_convert_const_int_from_real (code, type, arg1);
2094 else if (TREE_CODE (arg1) == FIXED_CST)
2095 return fold_convert_const_int_from_fixed (type, arg1);
2096 }
2097 else if (TREE_CODE (type) == REAL_TYPE)
2098 {
2099 if (TREE_CODE (arg1) == INTEGER_CST)
2100 return build_real_from_int_cst (type, arg1);
2101 else if (TREE_CODE (arg1) == REAL_CST)
2102 return fold_convert_const_real_from_real (type, arg1);
2103 else if (TREE_CODE (arg1) == FIXED_CST)
2104 return fold_convert_const_real_from_fixed (type, arg1);
2105 }
2106 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2107 {
2108 if (TREE_CODE (arg1) == FIXED_CST)
2109 return fold_convert_const_fixed_from_fixed (type, arg1);
2110 else if (TREE_CODE (arg1) == INTEGER_CST)
2111 return fold_convert_const_fixed_from_int (type, arg1);
2112 else if (TREE_CODE (arg1) == REAL_CST)
2113 return fold_convert_const_fixed_from_real (type, arg1);
2114 }
2115 return NULL_TREE;
2116 }
2117
2118 /* Construct a vector of zero elements of vector type TYPE. */
2119
2120 static tree
2121 build_zero_vector (tree type)
2122 {
2123 tree t;
2124
2125 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2126 return build_vector_from_val (type, t);
2127 }
2128
2129 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2130
2131 bool
2132 fold_convertible_p (const_tree type, const_tree arg)
2133 {
2134 tree orig = TREE_TYPE (arg);
2135
2136 if (type == orig)
2137 return true;
2138
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return false;
2143
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2145 return true;
2146
2147 switch (TREE_CODE (type))
2148 {
2149 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2150 case POINTER_TYPE: case REFERENCE_TYPE:
2151 case OFFSET_TYPE:
2152 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2153 || TREE_CODE (orig) == OFFSET_TYPE)
2154 return true;
2155 return (TREE_CODE (orig) == VECTOR_TYPE
2156 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2157
2158 case REAL_TYPE:
2159 case FIXED_POINT_TYPE:
2160 case COMPLEX_TYPE:
2161 case VECTOR_TYPE:
2162 case VOID_TYPE:
2163 return TREE_CODE (type) == TREE_CODE (orig);
2164
2165 default:
2166 return false;
2167 }
2168 }
2169
2170 /* Convert expression ARG to type TYPE. Used by the middle-end for
2171 simple conversions in preference to calling the front-end's convert. */
2172
2173 tree
2174 fold_convert_loc (location_t loc, tree type, tree arg)
2175 {
2176 tree orig = TREE_TYPE (arg);
2177 tree tem;
2178
2179 if (type == orig)
2180 return arg;
2181
2182 if (TREE_CODE (arg) == ERROR_MARK
2183 || TREE_CODE (type) == ERROR_MARK
2184 || TREE_CODE (orig) == ERROR_MARK)
2185 return error_mark_node;
2186
2187 switch (TREE_CODE (type))
2188 {
2189 case POINTER_TYPE:
2190 case REFERENCE_TYPE:
2191 /* Handle conversions between pointers to different address spaces. */
2192 if (POINTER_TYPE_P (orig)
2193 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2194 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2195 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2196 /* fall through */
2197
2198 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2199 case OFFSET_TYPE:
2200 if (TREE_CODE (arg) == INTEGER_CST)
2201 {
2202 tem = fold_convert_const (NOP_EXPR, type, arg);
2203 if (tem != NULL_TREE)
2204 return tem;
2205 }
2206 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2207 || TREE_CODE (orig) == OFFSET_TYPE)
2208 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2209 if (TREE_CODE (orig) == COMPLEX_TYPE)
2210 return fold_convert_loc (loc, type,
2211 fold_build1_loc (loc, REALPART_EXPR,
2212 TREE_TYPE (orig), arg));
2213 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2214 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2215 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2216
2217 case REAL_TYPE:
2218 if (TREE_CODE (arg) == INTEGER_CST)
2219 {
2220 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2221 if (tem != NULL_TREE)
2222 return tem;
2223 }
2224 else if (TREE_CODE (arg) == REAL_CST)
2225 {
2226 tem = fold_convert_const (NOP_EXPR, type, arg);
2227 if (tem != NULL_TREE)
2228 return tem;
2229 }
2230 else if (TREE_CODE (arg) == FIXED_CST)
2231 {
2232 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2233 if (tem != NULL_TREE)
2234 return tem;
2235 }
2236
2237 switch (TREE_CODE (orig))
2238 {
2239 case INTEGER_TYPE:
2240 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2241 case POINTER_TYPE: case REFERENCE_TYPE:
2242 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2243
2244 case REAL_TYPE:
2245 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2246
2247 case FIXED_POINT_TYPE:
2248 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2249
2250 case COMPLEX_TYPE:
2251 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2252 return fold_convert_loc (loc, type, tem);
2253
2254 default:
2255 gcc_unreachable ();
2256 }
2257
2258 case FIXED_POINT_TYPE:
2259 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2260 || TREE_CODE (arg) == REAL_CST)
2261 {
2262 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 goto fold_convert_exit;
2265 }
2266
2267 switch (TREE_CODE (orig))
2268 {
2269 case FIXED_POINT_TYPE:
2270 case INTEGER_TYPE:
2271 case ENUMERAL_TYPE:
2272 case BOOLEAN_TYPE:
2273 case REAL_TYPE:
2274 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2275
2276 case COMPLEX_TYPE:
2277 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2278 return fold_convert_loc (loc, type, tem);
2279
2280 default:
2281 gcc_unreachable ();
2282 }
2283
2284 case COMPLEX_TYPE:
2285 switch (TREE_CODE (orig))
2286 {
2287 case INTEGER_TYPE:
2288 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2289 case POINTER_TYPE: case REFERENCE_TYPE:
2290 case REAL_TYPE:
2291 case FIXED_POINT_TYPE:
2292 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2293 fold_convert_loc (loc, TREE_TYPE (type), arg),
2294 fold_convert_loc (loc, TREE_TYPE (type),
2295 integer_zero_node));
2296 case COMPLEX_TYPE:
2297 {
2298 tree rpart, ipart;
2299
2300 if (TREE_CODE (arg) == COMPLEX_EXPR)
2301 {
2302 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2303 TREE_OPERAND (arg, 0));
2304 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2305 TREE_OPERAND (arg, 1));
2306 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2307 }
2308
2309 arg = save_expr (arg);
2310 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2311 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2312 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2313 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2314 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2315 }
2316
2317 default:
2318 gcc_unreachable ();
2319 }
2320
2321 case VECTOR_TYPE:
2322 if (integer_zerop (arg))
2323 return build_zero_vector (type);
2324 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2325 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2326 || TREE_CODE (orig) == VECTOR_TYPE);
2327 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2328
2329 case VOID_TYPE:
2330 tem = fold_ignored_result (arg);
2331 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2332
2333 default:
2334 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2335 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2336 gcc_unreachable ();
2337 }
2338 fold_convert_exit:
2339 protected_set_expr_location_unshare (tem, loc);
2340 return tem;
2341 }
2342 \f
2343 /* Return false if expr can be assumed not to be an lvalue, true
2344 otherwise. */
2345
2346 static bool
2347 maybe_lvalue_p (const_tree x)
2348 {
2349 /* We only need to wrap lvalue tree codes. */
2350 switch (TREE_CODE (x))
2351 {
2352 case VAR_DECL:
2353 case PARM_DECL:
2354 case RESULT_DECL:
2355 case LABEL_DECL:
2356 case FUNCTION_DECL:
2357 case SSA_NAME:
2358
2359 case COMPONENT_REF:
2360 case MEM_REF:
2361 case INDIRECT_REF:
2362 case ARRAY_REF:
2363 case ARRAY_RANGE_REF:
2364 case BIT_FIELD_REF:
2365 case OBJ_TYPE_REF:
2366
2367 case REALPART_EXPR:
2368 case IMAGPART_EXPR:
2369 case PREINCREMENT_EXPR:
2370 case PREDECREMENT_EXPR:
2371 case SAVE_EXPR:
2372 case TRY_CATCH_EXPR:
2373 case WITH_CLEANUP_EXPR:
2374 case COMPOUND_EXPR:
2375 case MODIFY_EXPR:
2376 case TARGET_EXPR:
2377 case COND_EXPR:
2378 case BIND_EXPR:
2379 break;
2380
2381 default:
2382 /* Assume the worst for front-end tree codes. */
2383 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2384 break;
2385 return false;
2386 }
2387
2388 return true;
2389 }
2390
2391 /* Return an expr equal to X but certainly not valid as an lvalue. */
2392
2393 tree
2394 non_lvalue_loc (location_t loc, tree x)
2395 {
2396 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2397 us. */
2398 if (in_gimple_form)
2399 return x;
2400
2401 if (! maybe_lvalue_p (x))
2402 return x;
2403 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2404 }
2405
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2408
2409 static tree
2410 pedantic_non_lvalue_loc (location_t loc, tree x)
2411 {
2412 return protected_set_expr_location_unshare (x, loc);
2413 }
2414 \f
2415 /* Given a tree comparison code, return the code that is the logical inverse.
2416 It is generally not safe to do this for floating-point comparisons, except
2417 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2418 ERROR_MARK in this case. */
2419
2420 enum tree_code
2421 invert_tree_comparison (enum tree_code code, bool honor_nans)
2422 {
2423 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2424 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2425 return ERROR_MARK;
2426
2427 switch (code)
2428 {
2429 case EQ_EXPR:
2430 return NE_EXPR;
2431 case NE_EXPR:
2432 return EQ_EXPR;
2433 case GT_EXPR:
2434 return honor_nans ? UNLE_EXPR : LE_EXPR;
2435 case GE_EXPR:
2436 return honor_nans ? UNLT_EXPR : LT_EXPR;
2437 case LT_EXPR:
2438 return honor_nans ? UNGE_EXPR : GE_EXPR;
2439 case LE_EXPR:
2440 return honor_nans ? UNGT_EXPR : GT_EXPR;
2441 case LTGT_EXPR:
2442 return UNEQ_EXPR;
2443 case UNEQ_EXPR:
2444 return LTGT_EXPR;
2445 case UNGT_EXPR:
2446 return LE_EXPR;
2447 case UNGE_EXPR:
2448 return LT_EXPR;
2449 case UNLT_EXPR:
2450 return GE_EXPR;
2451 case UNLE_EXPR:
2452 return GT_EXPR;
2453 case ORDERED_EXPR:
2454 return UNORDERED_EXPR;
2455 case UNORDERED_EXPR:
2456 return ORDERED_EXPR;
2457 default:
2458 gcc_unreachable ();
2459 }
2460 }
2461
2462 /* Similar, but return the comparison that results if the operands are
2463 swapped. This is safe for floating-point. */
2464
2465 enum tree_code
2466 swap_tree_comparison (enum tree_code code)
2467 {
2468 switch (code)
2469 {
2470 case EQ_EXPR:
2471 case NE_EXPR:
2472 case ORDERED_EXPR:
2473 case UNORDERED_EXPR:
2474 case LTGT_EXPR:
2475 case UNEQ_EXPR:
2476 return code;
2477 case GT_EXPR:
2478 return LT_EXPR;
2479 case GE_EXPR:
2480 return LE_EXPR;
2481 case LT_EXPR:
2482 return GT_EXPR;
2483 case LE_EXPR:
2484 return GE_EXPR;
2485 case UNGT_EXPR:
2486 return UNLT_EXPR;
2487 case UNGE_EXPR:
2488 return UNLE_EXPR;
2489 case UNLT_EXPR:
2490 return UNGT_EXPR;
2491 case UNLE_EXPR:
2492 return UNGE_EXPR;
2493 default:
2494 gcc_unreachable ();
2495 }
2496 }
2497
2498
2499 /* Convert a comparison tree code from an enum tree_code representation
2500 into a compcode bit-based encoding. This function is the inverse of
2501 compcode_to_comparison. */
2502
2503 static enum comparison_code
2504 comparison_to_compcode (enum tree_code code)
2505 {
2506 switch (code)
2507 {
2508 case LT_EXPR:
2509 return COMPCODE_LT;
2510 case EQ_EXPR:
2511 return COMPCODE_EQ;
2512 case LE_EXPR:
2513 return COMPCODE_LE;
2514 case GT_EXPR:
2515 return COMPCODE_GT;
2516 case NE_EXPR:
2517 return COMPCODE_NE;
2518 case GE_EXPR:
2519 return COMPCODE_GE;
2520 case ORDERED_EXPR:
2521 return COMPCODE_ORD;
2522 case UNORDERED_EXPR:
2523 return COMPCODE_UNORD;
2524 case UNLT_EXPR:
2525 return COMPCODE_UNLT;
2526 case UNEQ_EXPR:
2527 return COMPCODE_UNEQ;
2528 case UNLE_EXPR:
2529 return COMPCODE_UNLE;
2530 case UNGT_EXPR:
2531 return COMPCODE_UNGT;
2532 case LTGT_EXPR:
2533 return COMPCODE_LTGT;
2534 case UNGE_EXPR:
2535 return COMPCODE_UNGE;
2536 default:
2537 gcc_unreachable ();
2538 }
2539 }
2540
2541 /* Convert a compcode bit-based encoding of a comparison operator back
2542 to GCC's enum tree_code representation. This function is the
2543 inverse of comparison_to_compcode. */
2544
2545 static enum tree_code
2546 compcode_to_comparison (enum comparison_code code)
2547 {
2548 switch (code)
2549 {
2550 case COMPCODE_LT:
2551 return LT_EXPR;
2552 case COMPCODE_EQ:
2553 return EQ_EXPR;
2554 case COMPCODE_LE:
2555 return LE_EXPR;
2556 case COMPCODE_GT:
2557 return GT_EXPR;
2558 case COMPCODE_NE:
2559 return NE_EXPR;
2560 case COMPCODE_GE:
2561 return GE_EXPR;
2562 case COMPCODE_ORD:
2563 return ORDERED_EXPR;
2564 case COMPCODE_UNORD:
2565 return UNORDERED_EXPR;
2566 case COMPCODE_UNLT:
2567 return UNLT_EXPR;
2568 case COMPCODE_UNEQ:
2569 return UNEQ_EXPR;
2570 case COMPCODE_UNLE:
2571 return UNLE_EXPR;
2572 case COMPCODE_UNGT:
2573 return UNGT_EXPR;
2574 case COMPCODE_LTGT:
2575 return LTGT_EXPR;
2576 case COMPCODE_UNGE:
2577 return UNGE_EXPR;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Return a tree for the comparison which is the combination of
2584 doing the AND or OR (depending on CODE) of the two operations LCODE
2585 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2586 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2587 if this makes the transformation invalid. */
2588
2589 tree
2590 combine_comparisons (location_t loc,
2591 enum tree_code code, enum tree_code lcode,
2592 enum tree_code rcode, tree truth_type,
2593 tree ll_arg, tree lr_arg)
2594 {
2595 bool honor_nans = HONOR_NANS (element_mode (ll_arg));
2596 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2597 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2598 int compcode;
2599
2600 switch (code)
2601 {
2602 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2603 compcode = lcompcode & rcompcode;
2604 break;
2605
2606 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2607 compcode = lcompcode | rcompcode;
2608 break;
2609
2610 default:
2611 return NULL_TREE;
2612 }
2613
2614 if (!honor_nans)
2615 {
2616 /* Eliminate unordered comparisons, as well as LTGT and ORD
2617 which are not used unless the mode has NaNs. */
2618 compcode &= ~COMPCODE_UNORD;
2619 if (compcode == COMPCODE_LTGT)
2620 compcode = COMPCODE_NE;
2621 else if (compcode == COMPCODE_ORD)
2622 compcode = COMPCODE_TRUE;
2623 }
2624 else if (flag_trapping_math)
2625 {
2626 /* Check that the original operation and the optimized ones will trap
2627 under the same condition. */
2628 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2629 && (lcompcode != COMPCODE_EQ)
2630 && (lcompcode != COMPCODE_ORD);
2631 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2632 && (rcompcode != COMPCODE_EQ)
2633 && (rcompcode != COMPCODE_ORD);
2634 bool trap = (compcode & COMPCODE_UNORD) == 0
2635 && (compcode != COMPCODE_EQ)
2636 && (compcode != COMPCODE_ORD);
2637
2638 /* In a short-circuited boolean expression the LHS might be
2639 such that the RHS, if evaluated, will never trap. For
2640 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2641 if neither x nor y is NaN. (This is a mixed blessing: for
2642 example, the expression above will never trap, hence
2643 optimizing it to x < y would be invalid). */
2644 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2645 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2646 rtrap = false;
2647
2648 /* If the comparison was short-circuited, and only the RHS
2649 trapped, we may now generate a spurious trap. */
2650 if (rtrap && !ltrap
2651 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2652 return NULL_TREE;
2653
2654 /* If we changed the conditions that cause a trap, we lose. */
2655 if ((ltrap || rtrap) != trap)
2656 return NULL_TREE;
2657 }
2658
2659 if (compcode == COMPCODE_TRUE)
2660 return constant_boolean_node (true, truth_type);
2661 else if (compcode == COMPCODE_FALSE)
2662 return constant_boolean_node (false, truth_type);
2663 else
2664 {
2665 enum tree_code tcode;
2666
2667 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2668 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2669 }
2670 }
2671 \f
2672 /* Return nonzero if two operands (typically of the same tree node)
2673 are necessarily equal. If either argument has side-effects this
2674 function returns zero. FLAGS modifies behavior as follows:
2675
2676 If OEP_ONLY_CONST is set, only return nonzero for constants.
2677 This function tests whether the operands are indistinguishable;
2678 it does not test whether they are equal using C's == operation.
2679 The distinction is important for IEEE floating point, because
2680 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2681 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2682
2683 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2684 even though it may hold multiple values during a function.
2685 This is because a GCC tree node guarantees that nothing else is
2686 executed between the evaluation of its "operands" (which may often
2687 be evaluated in arbitrary order). Hence if the operands themselves
2688 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2689 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2690 unset means assuming isochronic (or instantaneous) tree equivalence.
2691 Unless comparing arbitrary expression trees, such as from different
2692 statements, this flag can usually be left unset.
2693
2694 If OEP_PURE_SAME is set, then pure functions with identical arguments
2695 are considered the same. It is used when the caller has other ways
2696 to ensure that global memory is unchanged in between. */
2697
2698 int
2699 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2700 {
2701 /* If either is ERROR_MARK, they aren't equal. */
2702 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2703 || TREE_TYPE (arg0) == error_mark_node
2704 || TREE_TYPE (arg1) == error_mark_node)
2705 return 0;
2706
2707 /* Similar, if either does not have a type (like a released SSA name),
2708 they aren't equal. */
2709 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2710 return 0;
2711
2712 /* Check equality of integer constants before bailing out due to
2713 precision differences. */
2714 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2715 return tree_int_cst_equal (arg0, arg1);
2716
2717 /* If both types don't have the same signedness, then we can't consider
2718 them equal. We must check this before the STRIP_NOPS calls
2719 because they may change the signedness of the arguments. As pointers
2720 strictly don't have a signedness, require either two pointers or
2721 two non-pointers as well. */
2722 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2723 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2724 return 0;
2725
2726 /* We cannot consider pointers to different address space equal. */
2727 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2728 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2729 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2730 return 0;
2731
2732 /* If both types don't have the same precision, then it is not safe
2733 to strip NOPs. */
2734 if (element_precision (TREE_TYPE (arg0))
2735 != element_precision (TREE_TYPE (arg1)))
2736 return 0;
2737
2738 STRIP_NOPS (arg0);
2739 STRIP_NOPS (arg1);
2740
2741 /* In case both args are comparisons but with different comparison
2742 code, try to swap the comparison operands of one arg to produce
2743 a match and compare that variant. */
2744 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2745 && COMPARISON_CLASS_P (arg0)
2746 && COMPARISON_CLASS_P (arg1))
2747 {
2748 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2749
2750 if (TREE_CODE (arg0) == swap_code)
2751 return operand_equal_p (TREE_OPERAND (arg0, 0),
2752 TREE_OPERAND (arg1, 1), flags)
2753 && operand_equal_p (TREE_OPERAND (arg0, 1),
2754 TREE_OPERAND (arg1, 0), flags);
2755 }
2756
2757 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2758 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2759 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2760 return 0;
2761
2762 /* This is needed for conversions and for COMPONENT_REF.
2763 Might as well play it safe and always test this. */
2764 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2765 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2766 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2767 return 0;
2768
2769 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2770 We don't care about side effects in that case because the SAVE_EXPR
2771 takes care of that for us. In all other cases, two expressions are
2772 equal if they have no side effects. If we have two identical
2773 expressions with side effects that should be treated the same due
2774 to the only side effects being identical SAVE_EXPR's, that will
2775 be detected in the recursive calls below.
2776 If we are taking an invariant address of two identical objects
2777 they are necessarily equal as well. */
2778 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2779 && (TREE_CODE (arg0) == SAVE_EXPR
2780 || (flags & OEP_CONSTANT_ADDRESS_OF)
2781 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2782 return 1;
2783
2784 /* Next handle constant cases, those for which we can return 1 even
2785 if ONLY_CONST is set. */
2786 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2787 switch (TREE_CODE (arg0))
2788 {
2789 case INTEGER_CST:
2790 return tree_int_cst_equal (arg0, arg1);
2791
2792 case FIXED_CST:
2793 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2794 TREE_FIXED_CST (arg1));
2795
2796 case REAL_CST:
2797 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2798 TREE_REAL_CST (arg1)))
2799 return 1;
2800
2801
2802 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2803 {
2804 /* If we do not distinguish between signed and unsigned zero,
2805 consider them equal. */
2806 if (real_zerop (arg0) && real_zerop (arg1))
2807 return 1;
2808 }
2809 return 0;
2810
2811 case VECTOR_CST:
2812 {
2813 unsigned i;
2814
2815 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2816 return 0;
2817
2818 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2819 {
2820 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2821 VECTOR_CST_ELT (arg1, i), flags))
2822 return 0;
2823 }
2824 return 1;
2825 }
2826
2827 case COMPLEX_CST:
2828 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2829 flags)
2830 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2831 flags));
2832
2833 case STRING_CST:
2834 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2835 && ! memcmp (TREE_STRING_POINTER (arg0),
2836 TREE_STRING_POINTER (arg1),
2837 TREE_STRING_LENGTH (arg0)));
2838
2839 case ADDR_EXPR:
2840 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2841 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2842 ? OEP_CONSTANT_ADDRESS_OF : 0);
2843 default:
2844 break;
2845 }
2846
2847 if (flags & OEP_ONLY_CONST)
2848 return 0;
2849
2850 /* Define macros to test an operand from arg0 and arg1 for equality and a
2851 variant that allows null and views null as being different from any
2852 non-null value. In the latter case, if either is null, the both
2853 must be; otherwise, do the normal comparison. */
2854 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2855 TREE_OPERAND (arg1, N), flags)
2856
2857 #define OP_SAME_WITH_NULL(N) \
2858 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2859 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2860
2861 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2862 {
2863 case tcc_unary:
2864 /* Two conversions are equal only if signedness and modes match. */
2865 switch (TREE_CODE (arg0))
2866 {
2867 CASE_CONVERT:
2868 case FIX_TRUNC_EXPR:
2869 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2870 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2871 return 0;
2872 break;
2873 default:
2874 break;
2875 }
2876
2877 return OP_SAME (0);
2878
2879
2880 case tcc_comparison:
2881 case tcc_binary:
2882 if (OP_SAME (0) && OP_SAME (1))
2883 return 1;
2884
2885 /* For commutative ops, allow the other order. */
2886 return (commutative_tree_code (TREE_CODE (arg0))
2887 && operand_equal_p (TREE_OPERAND (arg0, 0),
2888 TREE_OPERAND (arg1, 1), flags)
2889 && operand_equal_p (TREE_OPERAND (arg0, 1),
2890 TREE_OPERAND (arg1, 0), flags));
2891
2892 case tcc_reference:
2893 /* If either of the pointer (or reference) expressions we are
2894 dereferencing contain a side effect, these cannot be equal,
2895 but their addresses can be. */
2896 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2897 && (TREE_SIDE_EFFECTS (arg0)
2898 || TREE_SIDE_EFFECTS (arg1)))
2899 return 0;
2900
2901 switch (TREE_CODE (arg0))
2902 {
2903 case INDIRECT_REF:
2904 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2905 return OP_SAME (0);
2906
2907 case REALPART_EXPR:
2908 case IMAGPART_EXPR:
2909 return OP_SAME (0);
2910
2911 case TARGET_MEM_REF:
2912 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2913 /* Require equal extra operands and then fall through to MEM_REF
2914 handling of the two common operands. */
2915 if (!OP_SAME_WITH_NULL (2)
2916 || !OP_SAME_WITH_NULL (3)
2917 || !OP_SAME_WITH_NULL (4))
2918 return 0;
2919 /* Fallthru. */
2920 case MEM_REF:
2921 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2922 /* Require equal access sizes, and similar pointer types.
2923 We can have incomplete types for array references of
2924 variable-sized arrays from the Fortran frontend
2925 though. Also verify the types are compatible. */
2926 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2927 || (TYPE_SIZE (TREE_TYPE (arg0))
2928 && TYPE_SIZE (TREE_TYPE (arg1))
2929 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2930 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2931 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2932 && alias_ptr_types_compatible_p
2933 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2934 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2935 && OP_SAME (0) && OP_SAME (1));
2936
2937 case ARRAY_REF:
2938 case ARRAY_RANGE_REF:
2939 /* Operands 2 and 3 may be null.
2940 Compare the array index by value if it is constant first as we
2941 may have different types but same value here. */
2942 if (!OP_SAME (0))
2943 return 0;
2944 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2945 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2946 TREE_OPERAND (arg1, 1))
2947 || OP_SAME (1))
2948 && OP_SAME_WITH_NULL (2)
2949 && OP_SAME_WITH_NULL (3));
2950
2951 case COMPONENT_REF:
2952 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2953 may be NULL when we're called to compare MEM_EXPRs. */
2954 if (!OP_SAME_WITH_NULL (0)
2955 || !OP_SAME (1))
2956 return 0;
2957 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2958 return OP_SAME_WITH_NULL (2);
2959
2960 case BIT_FIELD_REF:
2961 if (!OP_SAME (0))
2962 return 0;
2963 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2964 return OP_SAME (1) && OP_SAME (2);
2965
2966 default:
2967 return 0;
2968 }
2969
2970 case tcc_expression:
2971 switch (TREE_CODE (arg0))
2972 {
2973 case ADDR_EXPR:
2974 case TRUTH_NOT_EXPR:
2975 return OP_SAME (0);
2976
2977 case TRUTH_ANDIF_EXPR:
2978 case TRUTH_ORIF_EXPR:
2979 return OP_SAME (0) && OP_SAME (1);
2980
2981 case FMA_EXPR:
2982 case WIDEN_MULT_PLUS_EXPR:
2983 case WIDEN_MULT_MINUS_EXPR:
2984 if (!OP_SAME (2))
2985 return 0;
2986 /* The multiplcation operands are commutative. */
2987 /* FALLTHRU */
2988
2989 case TRUTH_AND_EXPR:
2990 case TRUTH_OR_EXPR:
2991 case TRUTH_XOR_EXPR:
2992 if (OP_SAME (0) && OP_SAME (1))
2993 return 1;
2994
2995 /* Otherwise take into account this is a commutative operation. */
2996 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2997 TREE_OPERAND (arg1, 1), flags)
2998 && operand_equal_p (TREE_OPERAND (arg0, 1),
2999 TREE_OPERAND (arg1, 0), flags));
3000
3001 case COND_EXPR:
3002 case VEC_COND_EXPR:
3003 case DOT_PROD_EXPR:
3004 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3005
3006 default:
3007 return 0;
3008 }
3009
3010 case tcc_vl_exp:
3011 switch (TREE_CODE (arg0))
3012 {
3013 case CALL_EXPR:
3014 /* If the CALL_EXPRs call different functions, then they
3015 clearly can not be equal. */
3016 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3017 flags))
3018 return 0;
3019
3020 {
3021 unsigned int cef = call_expr_flags (arg0);
3022 if (flags & OEP_PURE_SAME)
3023 cef &= ECF_CONST | ECF_PURE;
3024 else
3025 cef &= ECF_CONST;
3026 if (!cef)
3027 return 0;
3028 }
3029
3030 /* Now see if all the arguments are the same. */
3031 {
3032 const_call_expr_arg_iterator iter0, iter1;
3033 const_tree a0, a1;
3034 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3035 a1 = first_const_call_expr_arg (arg1, &iter1);
3036 a0 && a1;
3037 a0 = next_const_call_expr_arg (&iter0),
3038 a1 = next_const_call_expr_arg (&iter1))
3039 if (! operand_equal_p (a0, a1, flags))
3040 return 0;
3041
3042 /* If we get here and both argument lists are exhausted
3043 then the CALL_EXPRs are equal. */
3044 return ! (a0 || a1);
3045 }
3046 default:
3047 return 0;
3048 }
3049
3050 case tcc_declaration:
3051 /* Consider __builtin_sqrt equal to sqrt. */
3052 return (TREE_CODE (arg0) == FUNCTION_DECL
3053 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3054 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3055 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3056
3057 default:
3058 return 0;
3059 }
3060
3061 #undef OP_SAME
3062 #undef OP_SAME_WITH_NULL
3063 }
3064 \f
3065 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3066 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3067
3068 When in doubt, return 0. */
3069
3070 static int
3071 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3072 {
3073 int unsignedp1, unsignedpo;
3074 tree primarg0, primarg1, primother;
3075 unsigned int correct_width;
3076
3077 if (operand_equal_p (arg0, arg1, 0))
3078 return 1;
3079
3080 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3081 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3082 return 0;
3083
3084 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3085 and see if the inner values are the same. This removes any
3086 signedness comparison, which doesn't matter here. */
3087 primarg0 = arg0, primarg1 = arg1;
3088 STRIP_NOPS (primarg0);
3089 STRIP_NOPS (primarg1);
3090 if (operand_equal_p (primarg0, primarg1, 0))
3091 return 1;
3092
3093 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3094 actual comparison operand, ARG0.
3095
3096 First throw away any conversions to wider types
3097 already present in the operands. */
3098
3099 primarg1 = get_narrower (arg1, &unsignedp1);
3100 primother = get_narrower (other, &unsignedpo);
3101
3102 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3103 if (unsignedp1 == unsignedpo
3104 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3105 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3106 {
3107 tree type = TREE_TYPE (arg0);
3108
3109 /* Make sure shorter operand is extended the right way
3110 to match the longer operand. */
3111 primarg1 = fold_convert (signed_or_unsigned_type_for
3112 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3113
3114 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3115 return 1;
3116 }
3117
3118 return 0;
3119 }
3120 \f
3121 /* See if ARG is an expression that is either a comparison or is performing
3122 arithmetic on comparisons. The comparisons must only be comparing
3123 two different values, which will be stored in *CVAL1 and *CVAL2; if
3124 they are nonzero it means that some operands have already been found.
3125 No variables may be used anywhere else in the expression except in the
3126 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3127 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3128
3129 If this is true, return 1. Otherwise, return zero. */
3130
3131 static int
3132 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3133 {
3134 enum tree_code code = TREE_CODE (arg);
3135 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3136
3137 /* We can handle some of the tcc_expression cases here. */
3138 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3139 tclass = tcc_unary;
3140 else if (tclass == tcc_expression
3141 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3142 || code == COMPOUND_EXPR))
3143 tclass = tcc_binary;
3144
3145 else if (tclass == tcc_expression && code == SAVE_EXPR
3146 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3147 {
3148 /* If we've already found a CVAL1 or CVAL2, this expression is
3149 two complex to handle. */
3150 if (*cval1 || *cval2)
3151 return 0;
3152
3153 tclass = tcc_unary;
3154 *save_p = 1;
3155 }
3156
3157 switch (tclass)
3158 {
3159 case tcc_unary:
3160 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3161
3162 case tcc_binary:
3163 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3164 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3165 cval1, cval2, save_p));
3166
3167 case tcc_constant:
3168 return 1;
3169
3170 case tcc_expression:
3171 if (code == COND_EXPR)
3172 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3173 cval1, cval2, save_p)
3174 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3175 cval1, cval2, save_p)
3176 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3177 cval1, cval2, save_p));
3178 return 0;
3179
3180 case tcc_comparison:
3181 /* First see if we can handle the first operand, then the second. For
3182 the second operand, we know *CVAL1 can't be zero. It must be that
3183 one side of the comparison is each of the values; test for the
3184 case where this isn't true by failing if the two operands
3185 are the same. */
3186
3187 if (operand_equal_p (TREE_OPERAND (arg, 0),
3188 TREE_OPERAND (arg, 1), 0))
3189 return 0;
3190
3191 if (*cval1 == 0)
3192 *cval1 = TREE_OPERAND (arg, 0);
3193 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3194 ;
3195 else if (*cval2 == 0)
3196 *cval2 = TREE_OPERAND (arg, 0);
3197 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3198 ;
3199 else
3200 return 0;
3201
3202 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3203 ;
3204 else if (*cval2 == 0)
3205 *cval2 = TREE_OPERAND (arg, 1);
3206 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3207 ;
3208 else
3209 return 0;
3210
3211 return 1;
3212
3213 default:
3214 return 0;
3215 }
3216 }
3217 \f
3218 /* ARG is a tree that is known to contain just arithmetic operations and
3219 comparisons. Evaluate the operations in the tree substituting NEW0 for
3220 any occurrence of OLD0 as an operand of a comparison and likewise for
3221 NEW1 and OLD1. */
3222
3223 static tree
3224 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3225 tree old1, tree new1)
3226 {
3227 tree type = TREE_TYPE (arg);
3228 enum tree_code code = TREE_CODE (arg);
3229 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3230
3231 /* We can handle some of the tcc_expression cases here. */
3232 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3233 tclass = tcc_unary;
3234 else if (tclass == tcc_expression
3235 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3236 tclass = tcc_binary;
3237
3238 switch (tclass)
3239 {
3240 case tcc_unary:
3241 return fold_build1_loc (loc, code, type,
3242 eval_subst (loc, TREE_OPERAND (arg, 0),
3243 old0, new0, old1, new1));
3244
3245 case tcc_binary:
3246 return fold_build2_loc (loc, code, type,
3247 eval_subst (loc, TREE_OPERAND (arg, 0),
3248 old0, new0, old1, new1),
3249 eval_subst (loc, TREE_OPERAND (arg, 1),
3250 old0, new0, old1, new1));
3251
3252 case tcc_expression:
3253 switch (code)
3254 {
3255 case SAVE_EXPR:
3256 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3257 old1, new1);
3258
3259 case COMPOUND_EXPR:
3260 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3261 old1, new1);
3262
3263 case COND_EXPR:
3264 return fold_build3_loc (loc, code, type,
3265 eval_subst (loc, TREE_OPERAND (arg, 0),
3266 old0, new0, old1, new1),
3267 eval_subst (loc, TREE_OPERAND (arg, 1),
3268 old0, new0, old1, new1),
3269 eval_subst (loc, TREE_OPERAND (arg, 2),
3270 old0, new0, old1, new1));
3271 default:
3272 break;
3273 }
3274 /* Fall through - ??? */
3275
3276 case tcc_comparison:
3277 {
3278 tree arg0 = TREE_OPERAND (arg, 0);
3279 tree arg1 = TREE_OPERAND (arg, 1);
3280
3281 /* We need to check both for exact equality and tree equality. The
3282 former will be true if the operand has a side-effect. In that
3283 case, we know the operand occurred exactly once. */
3284
3285 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3286 arg0 = new0;
3287 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3288 arg0 = new1;
3289
3290 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3291 arg1 = new0;
3292 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3293 arg1 = new1;
3294
3295 return fold_build2_loc (loc, code, type, arg0, arg1);
3296 }
3297
3298 default:
3299 return arg;
3300 }
3301 }
3302 \f
3303 /* Return a tree for the case when the result of an expression is RESULT
3304 converted to TYPE and OMITTED was previously an operand of the expression
3305 but is now not needed (e.g., we folded OMITTED * 0).
3306
3307 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3308 the conversion of RESULT to TYPE. */
3309
3310 tree
3311 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3312 {
3313 tree t = fold_convert_loc (loc, type, result);
3314
3315 /* If the resulting operand is an empty statement, just return the omitted
3316 statement casted to void. */
3317 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3318 return build1_loc (loc, NOP_EXPR, void_type_node,
3319 fold_ignored_result (omitted));
3320
3321 if (TREE_SIDE_EFFECTS (omitted))
3322 return build2_loc (loc, COMPOUND_EXPR, type,
3323 fold_ignored_result (omitted), t);
3324
3325 return non_lvalue_loc (loc, t);
3326 }
3327
3328 /* Return a tree for the case when the result of an expression is RESULT
3329 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3330 of the expression but are now not needed.
3331
3332 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3333 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3334 evaluated before OMITTED2. Otherwise, if neither has side effects,
3335 just do the conversion of RESULT to TYPE. */
3336
3337 tree
3338 omit_two_operands_loc (location_t loc, tree type, tree result,
3339 tree omitted1, tree omitted2)
3340 {
3341 tree t = fold_convert_loc (loc, type, result);
3342
3343 if (TREE_SIDE_EFFECTS (omitted2))
3344 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3345 if (TREE_SIDE_EFFECTS (omitted1))
3346 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3347
3348 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3349 }
3350
3351 \f
3352 /* Return a simplified tree node for the truth-negation of ARG. This
3353 never alters ARG itself. We assume that ARG is an operation that
3354 returns a truth value (0 or 1).
3355
3356 FIXME: one would think we would fold the result, but it causes
3357 problems with the dominator optimizer. */
3358
3359 static tree
3360 fold_truth_not_expr (location_t loc, tree arg)
3361 {
3362 tree type = TREE_TYPE (arg);
3363 enum tree_code code = TREE_CODE (arg);
3364 location_t loc1, loc2;
3365
3366 /* If this is a comparison, we can simply invert it, except for
3367 floating-point non-equality comparisons, in which case we just
3368 enclose a TRUTH_NOT_EXPR around what we have. */
3369
3370 if (TREE_CODE_CLASS (code) == tcc_comparison)
3371 {
3372 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3373 if (FLOAT_TYPE_P (op_type)
3374 && flag_trapping_math
3375 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3376 && code != NE_EXPR && code != EQ_EXPR)
3377 return NULL_TREE;
3378
3379 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3380 if (code == ERROR_MARK)
3381 return NULL_TREE;
3382
3383 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3384 TREE_OPERAND (arg, 1));
3385 }
3386
3387 switch (code)
3388 {
3389 case INTEGER_CST:
3390 return constant_boolean_node (integer_zerop (arg), type);
3391
3392 case TRUTH_AND_EXPR:
3393 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3394 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3395 return build2_loc (loc, TRUTH_OR_EXPR, type,
3396 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3397 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3398
3399 case TRUTH_OR_EXPR:
3400 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3401 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3402 return build2_loc (loc, TRUTH_AND_EXPR, type,
3403 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3404 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3405
3406 case TRUTH_XOR_EXPR:
3407 /* Here we can invert either operand. We invert the first operand
3408 unless the second operand is a TRUTH_NOT_EXPR in which case our
3409 result is the XOR of the first operand with the inside of the
3410 negation of the second operand. */
3411
3412 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3413 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3414 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3415 else
3416 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3417 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3418 TREE_OPERAND (arg, 1));
3419
3420 case TRUTH_ANDIF_EXPR:
3421 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3422 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3423 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3424 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3425 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3426
3427 case TRUTH_ORIF_EXPR:
3428 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3429 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3430 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3431 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3432 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3433
3434 case TRUTH_NOT_EXPR:
3435 return TREE_OPERAND (arg, 0);
3436
3437 case COND_EXPR:
3438 {
3439 tree arg1 = TREE_OPERAND (arg, 1);
3440 tree arg2 = TREE_OPERAND (arg, 2);
3441
3442 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3443 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3444
3445 /* A COND_EXPR may have a throw as one operand, which
3446 then has void type. Just leave void operands
3447 as they are. */
3448 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3449 VOID_TYPE_P (TREE_TYPE (arg1))
3450 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3451 VOID_TYPE_P (TREE_TYPE (arg2))
3452 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3453 }
3454
3455 case COMPOUND_EXPR:
3456 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3457 return build2_loc (loc, COMPOUND_EXPR, type,
3458 TREE_OPERAND (arg, 0),
3459 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3460
3461 case NON_LVALUE_EXPR:
3462 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3463 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3464
3465 CASE_CONVERT:
3466 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3467 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3468
3469 /* ... fall through ... */
3470
3471 case FLOAT_EXPR:
3472 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3473 return build1_loc (loc, TREE_CODE (arg), type,
3474 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3475
3476 case BIT_AND_EXPR:
3477 if (!integer_onep (TREE_OPERAND (arg, 1)))
3478 return NULL_TREE;
3479 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3480
3481 case SAVE_EXPR:
3482 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3483
3484 case CLEANUP_POINT_EXPR:
3485 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3486 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3487 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3488
3489 default:
3490 return NULL_TREE;
3491 }
3492 }
3493
3494 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3495 assume that ARG is an operation that returns a truth value (0 or 1
3496 for scalars, 0 or -1 for vectors). Return the folded expression if
3497 folding is successful. Otherwise, return NULL_TREE. */
3498
3499 static tree
3500 fold_invert_truthvalue (location_t loc, tree arg)
3501 {
3502 tree type = TREE_TYPE (arg);
3503 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3504 ? BIT_NOT_EXPR
3505 : TRUTH_NOT_EXPR,
3506 type, arg);
3507 }
3508
3509 /* Return a simplified tree node for the truth-negation of ARG. This
3510 never alters ARG itself. We assume that ARG is an operation that
3511 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3512
3513 tree
3514 invert_truthvalue_loc (location_t loc, tree arg)
3515 {
3516 if (TREE_CODE (arg) == ERROR_MARK)
3517 return arg;
3518
3519 tree type = TREE_TYPE (arg);
3520 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3521 ? BIT_NOT_EXPR
3522 : TRUTH_NOT_EXPR,
3523 type, arg);
3524 }
3525
3526 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3527 operands are another bit-wise operation with a common input. If so,
3528 distribute the bit operations to save an operation and possibly two if
3529 constants are involved. For example, convert
3530 (A | B) & (A | C) into A | (B & C)
3531 Further simplification will occur if B and C are constants.
3532
3533 If this optimization cannot be done, 0 will be returned. */
3534
3535 static tree
3536 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3537 tree arg0, tree arg1)
3538 {
3539 tree common;
3540 tree left, right;
3541
3542 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3543 || TREE_CODE (arg0) == code
3544 || (TREE_CODE (arg0) != BIT_AND_EXPR
3545 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3546 return 0;
3547
3548 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3549 {
3550 common = TREE_OPERAND (arg0, 0);
3551 left = TREE_OPERAND (arg0, 1);
3552 right = TREE_OPERAND (arg1, 1);
3553 }
3554 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3555 {
3556 common = TREE_OPERAND (arg0, 0);
3557 left = TREE_OPERAND (arg0, 1);
3558 right = TREE_OPERAND (arg1, 0);
3559 }
3560 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3561 {
3562 common = TREE_OPERAND (arg0, 1);
3563 left = TREE_OPERAND (arg0, 0);
3564 right = TREE_OPERAND (arg1, 1);
3565 }
3566 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3567 {
3568 common = TREE_OPERAND (arg0, 1);
3569 left = TREE_OPERAND (arg0, 0);
3570 right = TREE_OPERAND (arg1, 0);
3571 }
3572 else
3573 return 0;
3574
3575 common = fold_convert_loc (loc, type, common);
3576 left = fold_convert_loc (loc, type, left);
3577 right = fold_convert_loc (loc, type, right);
3578 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3579 fold_build2_loc (loc, code, type, left, right));
3580 }
3581
3582 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3583 with code CODE. This optimization is unsafe. */
3584 static tree
3585 distribute_real_division (location_t loc, enum tree_code code, tree type,
3586 tree arg0, tree arg1)
3587 {
3588 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3589 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3590
3591 /* (A / C) +- (B / C) -> (A +- B) / C. */
3592 if (mul0 == mul1
3593 && operand_equal_p (TREE_OPERAND (arg0, 1),
3594 TREE_OPERAND (arg1, 1), 0))
3595 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3596 fold_build2_loc (loc, code, type,
3597 TREE_OPERAND (arg0, 0),
3598 TREE_OPERAND (arg1, 0)),
3599 TREE_OPERAND (arg0, 1));
3600
3601 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3602 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3603 TREE_OPERAND (arg1, 0), 0)
3604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3605 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3606 {
3607 REAL_VALUE_TYPE r0, r1;
3608 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3609 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3610 if (!mul0)
3611 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3612 if (!mul1)
3613 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3614 real_arithmetic (&r0, code, &r0, &r1);
3615 return fold_build2_loc (loc, MULT_EXPR, type,
3616 TREE_OPERAND (arg0, 0),
3617 build_real (type, r0));
3618 }
3619
3620 return NULL_TREE;
3621 }
3622 \f
3623 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3624 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3625
3626 static tree
3627 make_bit_field_ref (location_t loc, tree inner, tree type,
3628 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3629 {
3630 tree result, bftype;
3631
3632 if (bitpos == 0)
3633 {
3634 tree size = TYPE_SIZE (TREE_TYPE (inner));
3635 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3636 || POINTER_TYPE_P (TREE_TYPE (inner)))
3637 && tree_fits_shwi_p (size)
3638 && tree_to_shwi (size) == bitsize)
3639 return fold_convert_loc (loc, type, inner);
3640 }
3641
3642 bftype = type;
3643 if (TYPE_PRECISION (bftype) != bitsize
3644 || TYPE_UNSIGNED (bftype) == !unsignedp)
3645 bftype = build_nonstandard_integer_type (bitsize, 0);
3646
3647 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3648 size_int (bitsize), bitsize_int (bitpos));
3649
3650 if (bftype != type)
3651 result = fold_convert_loc (loc, type, result);
3652
3653 return result;
3654 }
3655
3656 /* Optimize a bit-field compare.
3657
3658 There are two cases: First is a compare against a constant and the
3659 second is a comparison of two items where the fields are at the same
3660 bit position relative to the start of a chunk (byte, halfword, word)
3661 large enough to contain it. In these cases we can avoid the shift
3662 implicit in bitfield extractions.
3663
3664 For constants, we emit a compare of the shifted constant with the
3665 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3666 compared. For two fields at the same position, we do the ANDs with the
3667 similar mask and compare the result of the ANDs.
3668
3669 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3670 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3671 are the left and right operands of the comparison, respectively.
3672
3673 If the optimization described above can be done, we return the resulting
3674 tree. Otherwise we return zero. */
3675
3676 static tree
3677 optimize_bit_field_compare (location_t loc, enum tree_code code,
3678 tree compare_type, tree lhs, tree rhs)
3679 {
3680 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3681 tree type = TREE_TYPE (lhs);
3682 tree unsigned_type;
3683 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3684 machine_mode lmode, rmode, nmode;
3685 int lunsignedp, runsignedp;
3686 int lvolatilep = 0, rvolatilep = 0;
3687 tree linner, rinner = NULL_TREE;
3688 tree mask;
3689 tree offset;
3690
3691 /* Get all the information about the extractions being done. If the bit size
3692 if the same as the size of the underlying object, we aren't doing an
3693 extraction at all and so can do nothing. We also don't want to
3694 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3695 then will no longer be able to replace it. */
3696 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3697 &lunsignedp, &lvolatilep, false);
3698 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3699 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3700 return 0;
3701
3702 if (!const_p)
3703 {
3704 /* If this is not a constant, we can only do something if bit positions,
3705 sizes, and signedness are the same. */
3706 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3707 &runsignedp, &rvolatilep, false);
3708
3709 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3710 || lunsignedp != runsignedp || offset != 0
3711 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3712 return 0;
3713 }
3714
3715 /* See if we can find a mode to refer to this field. We should be able to,
3716 but fail if we can't. */
3717 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3718 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3719 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3720 TYPE_ALIGN (TREE_TYPE (rinner))),
3721 word_mode, false);
3722 if (nmode == VOIDmode)
3723 return 0;
3724
3725 /* Set signed and unsigned types of the precision of this mode for the
3726 shifts below. */
3727 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3728
3729 /* Compute the bit position and size for the new reference and our offset
3730 within it. If the new reference is the same size as the original, we
3731 won't optimize anything, so return zero. */
3732 nbitsize = GET_MODE_BITSIZE (nmode);
3733 nbitpos = lbitpos & ~ (nbitsize - 1);
3734 lbitpos -= nbitpos;
3735 if (nbitsize == lbitsize)
3736 return 0;
3737
3738 if (BYTES_BIG_ENDIAN)
3739 lbitpos = nbitsize - lbitsize - lbitpos;
3740
3741 /* Make the mask to be used against the extracted field. */
3742 mask = build_int_cst_type (unsigned_type, -1);
3743 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3744 mask = const_binop (RSHIFT_EXPR, mask,
3745 size_int (nbitsize - lbitsize - lbitpos));
3746
3747 if (! const_p)
3748 /* If not comparing with constant, just rework the comparison
3749 and return. */
3750 return fold_build2_loc (loc, code, compare_type,
3751 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3752 make_bit_field_ref (loc, linner,
3753 unsigned_type,
3754 nbitsize, nbitpos,
3755 1),
3756 mask),
3757 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3758 make_bit_field_ref (loc, rinner,
3759 unsigned_type,
3760 nbitsize, nbitpos,
3761 1),
3762 mask));
3763
3764 /* Otherwise, we are handling the constant case. See if the constant is too
3765 big for the field. Warn and return a tree of for 0 (false) if so. We do
3766 this not only for its own sake, but to avoid having to test for this
3767 error case below. If we didn't, we might generate wrong code.
3768
3769 For unsigned fields, the constant shifted right by the field length should
3770 be all zero. For signed fields, the high-order bits should agree with
3771 the sign bit. */
3772
3773 if (lunsignedp)
3774 {
3775 if (wi::lrshift (rhs, lbitsize) != 0)
3776 {
3777 warning (0, "comparison is always %d due to width of bit-field",
3778 code == NE_EXPR);
3779 return constant_boolean_node (code == NE_EXPR, compare_type);
3780 }
3781 }
3782 else
3783 {
3784 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3785 if (tem != 0 && tem != -1)
3786 {
3787 warning (0, "comparison is always %d due to width of bit-field",
3788 code == NE_EXPR);
3789 return constant_boolean_node (code == NE_EXPR, compare_type);
3790 }
3791 }
3792
3793 /* Single-bit compares should always be against zero. */
3794 if (lbitsize == 1 && ! integer_zerop (rhs))
3795 {
3796 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3797 rhs = build_int_cst (type, 0);
3798 }
3799
3800 /* Make a new bitfield reference, shift the constant over the
3801 appropriate number of bits and mask it with the computed mask
3802 (in case this was a signed field). If we changed it, make a new one. */
3803 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3804
3805 rhs = const_binop (BIT_AND_EXPR,
3806 const_binop (LSHIFT_EXPR,
3807 fold_convert_loc (loc, unsigned_type, rhs),
3808 size_int (lbitpos)),
3809 mask);
3810
3811 lhs = build2_loc (loc, code, compare_type,
3812 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3813 return lhs;
3814 }
3815 \f
3816 /* Subroutine for fold_truth_andor_1: decode a field reference.
3817
3818 If EXP is a comparison reference, we return the innermost reference.
3819
3820 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3821 set to the starting bit number.
3822
3823 If the innermost field can be completely contained in a mode-sized
3824 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3825
3826 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3827 otherwise it is not changed.
3828
3829 *PUNSIGNEDP is set to the signedness of the field.
3830
3831 *PMASK is set to the mask used. This is either contained in a
3832 BIT_AND_EXPR or derived from the width of the field.
3833
3834 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3835
3836 Return 0 if this is not a component reference or is one that we can't
3837 do anything with. */
3838
3839 static tree
3840 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3841 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3842 int *punsignedp, int *pvolatilep,
3843 tree *pmask, tree *pand_mask)
3844 {
3845 tree outer_type = 0;
3846 tree and_mask = 0;
3847 tree mask, inner, offset;
3848 tree unsigned_type;
3849 unsigned int precision;
3850
3851 /* All the optimizations using this function assume integer fields.
3852 There are problems with FP fields since the type_for_size call
3853 below can fail for, e.g., XFmode. */
3854 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3855 return 0;
3856
3857 /* We are interested in the bare arrangement of bits, so strip everything
3858 that doesn't affect the machine mode. However, record the type of the
3859 outermost expression if it may matter below. */
3860 if (CONVERT_EXPR_P (exp)
3861 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3862 outer_type = TREE_TYPE (exp);
3863 STRIP_NOPS (exp);
3864
3865 if (TREE_CODE (exp) == BIT_AND_EXPR)
3866 {
3867 and_mask = TREE_OPERAND (exp, 1);
3868 exp = TREE_OPERAND (exp, 0);
3869 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3870 if (TREE_CODE (and_mask) != INTEGER_CST)
3871 return 0;
3872 }
3873
3874 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3875 punsignedp, pvolatilep, false);
3876 if ((inner == exp && and_mask == 0)
3877 || *pbitsize < 0 || offset != 0
3878 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3879 return 0;
3880
3881 /* If the number of bits in the reference is the same as the bitsize of
3882 the outer type, then the outer type gives the signedness. Otherwise
3883 (in case of a small bitfield) the signedness is unchanged. */
3884 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3885 *punsignedp = TYPE_UNSIGNED (outer_type);
3886
3887 /* Compute the mask to access the bitfield. */
3888 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3889 precision = TYPE_PRECISION (unsigned_type);
3890
3891 mask = build_int_cst_type (unsigned_type, -1);
3892
3893 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3894 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3895
3896 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3897 if (and_mask != 0)
3898 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3899 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3900
3901 *pmask = mask;
3902 *pand_mask = and_mask;
3903 return inner;
3904 }
3905
3906 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3907 bit positions and MASK is SIGNED. */
3908
3909 static int
3910 all_ones_mask_p (const_tree mask, unsigned int size)
3911 {
3912 tree type = TREE_TYPE (mask);
3913 unsigned int precision = TYPE_PRECISION (type);
3914
3915 /* If this function returns true when the type of the mask is
3916 UNSIGNED, then there will be errors. In particular see
3917 gcc.c-torture/execute/990326-1.c. There does not appear to be
3918 any documentation paper trail as to why this is so. But the pre
3919 wide-int worked with that restriction and it has been preserved
3920 here. */
3921 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3922 return false;
3923
3924 return wi::mask (size, false, precision) == mask;
3925 }
3926
3927 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3928 represents the sign bit of EXP's type. If EXP represents a sign
3929 or zero extension, also test VAL against the unextended type.
3930 The return value is the (sub)expression whose sign bit is VAL,
3931 or NULL_TREE otherwise. */
3932
3933 tree
3934 sign_bit_p (tree exp, const_tree val)
3935 {
3936 int width;
3937 tree t;
3938
3939 /* Tree EXP must have an integral type. */
3940 t = TREE_TYPE (exp);
3941 if (! INTEGRAL_TYPE_P (t))
3942 return NULL_TREE;
3943
3944 /* Tree VAL must be an integer constant. */
3945 if (TREE_CODE (val) != INTEGER_CST
3946 || TREE_OVERFLOW (val))
3947 return NULL_TREE;
3948
3949 width = TYPE_PRECISION (t);
3950 if (wi::only_sign_bit_p (val, width))
3951 return exp;
3952
3953 /* Handle extension from a narrower type. */
3954 if (TREE_CODE (exp) == NOP_EXPR
3955 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3956 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3957
3958 return NULL_TREE;
3959 }
3960
3961 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3962 to be evaluated unconditionally. */
3963
3964 static int
3965 simple_operand_p (const_tree exp)
3966 {
3967 /* Strip any conversions that don't change the machine mode. */
3968 STRIP_NOPS (exp);
3969
3970 return (CONSTANT_CLASS_P (exp)
3971 || TREE_CODE (exp) == SSA_NAME
3972 || (DECL_P (exp)
3973 && ! TREE_ADDRESSABLE (exp)
3974 && ! TREE_THIS_VOLATILE (exp)
3975 && ! DECL_NONLOCAL (exp)
3976 /* Don't regard global variables as simple. They may be
3977 allocated in ways unknown to the compiler (shared memory,
3978 #pragma weak, etc). */
3979 && ! TREE_PUBLIC (exp)
3980 && ! DECL_EXTERNAL (exp)
3981 /* Weakrefs are not safe to be read, since they can be NULL.
3982 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3983 have DECL_WEAK flag set. */
3984 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3985 /* Loading a static variable is unduly expensive, but global
3986 registers aren't expensive. */
3987 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3988 }
3989
3990 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3991 to be evaluated unconditionally.
3992 I addition to simple_operand_p, we assume that comparisons, conversions,
3993 and logic-not operations are simple, if their operands are simple, too. */
3994
3995 static bool
3996 simple_operand_p_2 (tree exp)
3997 {
3998 enum tree_code code;
3999
4000 if (TREE_SIDE_EFFECTS (exp)
4001 || tree_could_trap_p (exp))
4002 return false;
4003
4004 while (CONVERT_EXPR_P (exp))
4005 exp = TREE_OPERAND (exp, 0);
4006
4007 code = TREE_CODE (exp);
4008
4009 if (TREE_CODE_CLASS (code) == tcc_comparison)
4010 return (simple_operand_p (TREE_OPERAND (exp, 0))
4011 && simple_operand_p (TREE_OPERAND (exp, 1)));
4012
4013 if (code == TRUTH_NOT_EXPR)
4014 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4015
4016 return simple_operand_p (exp);
4017 }
4018
4019 \f
4020 /* The following functions are subroutines to fold_range_test and allow it to
4021 try to change a logical combination of comparisons into a range test.
4022
4023 For example, both
4024 X == 2 || X == 3 || X == 4 || X == 5
4025 and
4026 X >= 2 && X <= 5
4027 are converted to
4028 (unsigned) (X - 2) <= 3
4029
4030 We describe each set of comparisons as being either inside or outside
4031 a range, using a variable named like IN_P, and then describe the
4032 range with a lower and upper bound. If one of the bounds is omitted,
4033 it represents either the highest or lowest value of the type.
4034
4035 In the comments below, we represent a range by two numbers in brackets
4036 preceded by a "+" to designate being inside that range, or a "-" to
4037 designate being outside that range, so the condition can be inverted by
4038 flipping the prefix. An omitted bound is represented by a "-". For
4039 example, "- [-, 10]" means being outside the range starting at the lowest
4040 possible value and ending at 10, in other words, being greater than 10.
4041 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4042 always false.
4043
4044 We set up things so that the missing bounds are handled in a consistent
4045 manner so neither a missing bound nor "true" and "false" need to be
4046 handled using a special case. */
4047
4048 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4049 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4050 and UPPER1_P are nonzero if the respective argument is an upper bound
4051 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4052 must be specified for a comparison. ARG1 will be converted to ARG0's
4053 type if both are specified. */
4054
4055 static tree
4056 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4057 tree arg1, int upper1_p)
4058 {
4059 tree tem;
4060 int result;
4061 int sgn0, sgn1;
4062
4063 /* If neither arg represents infinity, do the normal operation.
4064 Else, if not a comparison, return infinity. Else handle the special
4065 comparison rules. Note that most of the cases below won't occur, but
4066 are handled for consistency. */
4067
4068 if (arg0 != 0 && arg1 != 0)
4069 {
4070 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4071 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4072 STRIP_NOPS (tem);
4073 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4074 }
4075
4076 if (TREE_CODE_CLASS (code) != tcc_comparison)
4077 return 0;
4078
4079 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4080 for neither. In real maths, we cannot assume open ended ranges are
4081 the same. But, this is computer arithmetic, where numbers are finite.
4082 We can therefore make the transformation of any unbounded range with
4083 the value Z, Z being greater than any representable number. This permits
4084 us to treat unbounded ranges as equal. */
4085 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4086 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4087 switch (code)
4088 {
4089 case EQ_EXPR:
4090 result = sgn0 == sgn1;
4091 break;
4092 case NE_EXPR:
4093 result = sgn0 != sgn1;
4094 break;
4095 case LT_EXPR:
4096 result = sgn0 < sgn1;
4097 break;
4098 case LE_EXPR:
4099 result = sgn0 <= sgn1;
4100 break;
4101 case GT_EXPR:
4102 result = sgn0 > sgn1;
4103 break;
4104 case GE_EXPR:
4105 result = sgn0 >= sgn1;
4106 break;
4107 default:
4108 gcc_unreachable ();
4109 }
4110
4111 return constant_boolean_node (result, type);
4112 }
4113 \f
4114 /* Helper routine for make_range. Perform one step for it, return
4115 new expression if the loop should continue or NULL_TREE if it should
4116 stop. */
4117
4118 tree
4119 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4120 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4121 bool *strict_overflow_p)
4122 {
4123 tree arg0_type = TREE_TYPE (arg0);
4124 tree n_low, n_high, low = *p_low, high = *p_high;
4125 int in_p = *p_in_p, n_in_p;
4126
4127 switch (code)
4128 {
4129 case TRUTH_NOT_EXPR:
4130 /* We can only do something if the range is testing for zero. */
4131 if (low == NULL_TREE || high == NULL_TREE
4132 || ! integer_zerop (low) || ! integer_zerop (high))
4133 return NULL_TREE;
4134 *p_in_p = ! in_p;
4135 return arg0;
4136
4137 case EQ_EXPR: case NE_EXPR:
4138 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4139 /* We can only do something if the range is testing for zero
4140 and if the second operand is an integer constant. Note that
4141 saying something is "in" the range we make is done by
4142 complementing IN_P since it will set in the initial case of
4143 being not equal to zero; "out" is leaving it alone. */
4144 if (low == NULL_TREE || high == NULL_TREE
4145 || ! integer_zerop (low) || ! integer_zerop (high)
4146 || TREE_CODE (arg1) != INTEGER_CST)
4147 return NULL_TREE;
4148
4149 switch (code)
4150 {
4151 case NE_EXPR: /* - [c, c] */
4152 low = high = arg1;
4153 break;
4154 case EQ_EXPR: /* + [c, c] */
4155 in_p = ! in_p, low = high = arg1;
4156 break;
4157 case GT_EXPR: /* - [-, c] */
4158 low = 0, high = arg1;
4159 break;
4160 case GE_EXPR: /* + [c, -] */
4161 in_p = ! in_p, low = arg1, high = 0;
4162 break;
4163 case LT_EXPR: /* - [c, -] */
4164 low = arg1, high = 0;
4165 break;
4166 case LE_EXPR: /* + [-, c] */
4167 in_p = ! in_p, low = 0, high = arg1;
4168 break;
4169 default:
4170 gcc_unreachable ();
4171 }
4172
4173 /* If this is an unsigned comparison, we also know that EXP is
4174 greater than or equal to zero. We base the range tests we make
4175 on that fact, so we record it here so we can parse existing
4176 range tests. We test arg0_type since often the return type
4177 of, e.g. EQ_EXPR, is boolean. */
4178 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4179 {
4180 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4181 in_p, low, high, 1,
4182 build_int_cst (arg0_type, 0),
4183 NULL_TREE))
4184 return NULL_TREE;
4185
4186 in_p = n_in_p, low = n_low, high = n_high;
4187
4188 /* If the high bound is missing, but we have a nonzero low
4189 bound, reverse the range so it goes from zero to the low bound
4190 minus 1. */
4191 if (high == 0 && low && ! integer_zerop (low))
4192 {
4193 in_p = ! in_p;
4194 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4195 build_int_cst (TREE_TYPE (low), 1), 0);
4196 low = build_int_cst (arg0_type, 0);
4197 }
4198 }
4199
4200 *p_low = low;
4201 *p_high = high;
4202 *p_in_p = in_p;
4203 return arg0;
4204
4205 case NEGATE_EXPR:
4206 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4207 low and high are non-NULL, then normalize will DTRT. */
4208 if (!TYPE_UNSIGNED (arg0_type)
4209 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4210 {
4211 if (low == NULL_TREE)
4212 low = TYPE_MIN_VALUE (arg0_type);
4213 if (high == NULL_TREE)
4214 high = TYPE_MAX_VALUE (arg0_type);
4215 }
4216
4217 /* (-x) IN [a,b] -> x in [-b, -a] */
4218 n_low = range_binop (MINUS_EXPR, exp_type,
4219 build_int_cst (exp_type, 0),
4220 0, high, 1);
4221 n_high = range_binop (MINUS_EXPR, exp_type,
4222 build_int_cst (exp_type, 0),
4223 0, low, 0);
4224 if (n_high != 0 && TREE_OVERFLOW (n_high))
4225 return NULL_TREE;
4226 goto normalize;
4227
4228 case BIT_NOT_EXPR:
4229 /* ~ X -> -X - 1 */
4230 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4231 build_int_cst (exp_type, 1));
4232
4233 case PLUS_EXPR:
4234 case MINUS_EXPR:
4235 if (TREE_CODE (arg1) != INTEGER_CST)
4236 return NULL_TREE;
4237
4238 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4239 move a constant to the other side. */
4240 if (!TYPE_UNSIGNED (arg0_type)
4241 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4242 return NULL_TREE;
4243
4244 /* If EXP is signed, any overflow in the computation is undefined,
4245 so we don't worry about it so long as our computations on
4246 the bounds don't overflow. For unsigned, overflow is defined
4247 and this is exactly the right thing. */
4248 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4249 arg0_type, low, 0, arg1, 0);
4250 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4251 arg0_type, high, 1, arg1, 0);
4252 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4253 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4254 return NULL_TREE;
4255
4256 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4257 *strict_overflow_p = true;
4258
4259 normalize:
4260 /* Check for an unsigned range which has wrapped around the maximum
4261 value thus making n_high < n_low, and normalize it. */
4262 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4263 {
4264 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4265 build_int_cst (TREE_TYPE (n_high), 1), 0);
4266 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4267 build_int_cst (TREE_TYPE (n_low), 1), 0);
4268
4269 /* If the range is of the form +/- [ x+1, x ], we won't
4270 be able to normalize it. But then, it represents the
4271 whole range or the empty set, so make it
4272 +/- [ -, - ]. */
4273 if (tree_int_cst_equal (n_low, low)
4274 && tree_int_cst_equal (n_high, high))
4275 low = high = 0;
4276 else
4277 in_p = ! in_p;
4278 }
4279 else
4280 low = n_low, high = n_high;
4281
4282 *p_low = low;
4283 *p_high = high;
4284 *p_in_p = in_p;
4285 return arg0;
4286
4287 CASE_CONVERT:
4288 case NON_LVALUE_EXPR:
4289 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4290 return NULL_TREE;
4291
4292 if (! INTEGRAL_TYPE_P (arg0_type)
4293 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4294 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4295 return NULL_TREE;
4296
4297 n_low = low, n_high = high;
4298
4299 if (n_low != 0)
4300 n_low = fold_convert_loc (loc, arg0_type, n_low);
4301
4302 if (n_high != 0)
4303 n_high = fold_convert_loc (loc, arg0_type, n_high);
4304
4305 /* If we're converting arg0 from an unsigned type, to exp,
4306 a signed type, we will be doing the comparison as unsigned.
4307 The tests above have already verified that LOW and HIGH
4308 are both positive.
4309
4310 So we have to ensure that we will handle large unsigned
4311 values the same way that the current signed bounds treat
4312 negative values. */
4313
4314 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4315 {
4316 tree high_positive;
4317 tree equiv_type;
4318 /* For fixed-point modes, we need to pass the saturating flag
4319 as the 2nd parameter. */
4320 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4321 equiv_type
4322 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4323 TYPE_SATURATING (arg0_type));
4324 else
4325 equiv_type
4326 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4327
4328 /* A range without an upper bound is, naturally, unbounded.
4329 Since convert would have cropped a very large value, use
4330 the max value for the destination type. */
4331 high_positive
4332 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4333 : TYPE_MAX_VALUE (arg0_type);
4334
4335 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4336 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4337 fold_convert_loc (loc, arg0_type,
4338 high_positive),
4339 build_int_cst (arg0_type, 1));
4340
4341 /* If the low bound is specified, "and" the range with the
4342 range for which the original unsigned value will be
4343 positive. */
4344 if (low != 0)
4345 {
4346 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4347 1, fold_convert_loc (loc, arg0_type,
4348 integer_zero_node),
4349 high_positive))
4350 return NULL_TREE;
4351
4352 in_p = (n_in_p == in_p);
4353 }
4354 else
4355 {
4356 /* Otherwise, "or" the range with the range of the input
4357 that will be interpreted as negative. */
4358 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4359 1, fold_convert_loc (loc, arg0_type,
4360 integer_zero_node),
4361 high_positive))
4362 return NULL_TREE;
4363
4364 in_p = (in_p != n_in_p);
4365 }
4366 }
4367
4368 *p_low = n_low;
4369 *p_high = n_high;
4370 *p_in_p = in_p;
4371 return arg0;
4372
4373 default:
4374 return NULL_TREE;
4375 }
4376 }
4377
4378 /* Given EXP, a logical expression, set the range it is testing into
4379 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4380 actually being tested. *PLOW and *PHIGH will be made of the same
4381 type as the returned expression. If EXP is not a comparison, we
4382 will most likely not be returning a useful value and range. Set
4383 *STRICT_OVERFLOW_P to true if the return value is only valid
4384 because signed overflow is undefined; otherwise, do not change
4385 *STRICT_OVERFLOW_P. */
4386
4387 tree
4388 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4389 bool *strict_overflow_p)
4390 {
4391 enum tree_code code;
4392 tree arg0, arg1 = NULL_TREE;
4393 tree exp_type, nexp;
4394 int in_p;
4395 tree low, high;
4396 location_t loc = EXPR_LOCATION (exp);
4397
4398 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4399 and see if we can refine the range. Some of the cases below may not
4400 happen, but it doesn't seem worth worrying about this. We "continue"
4401 the outer loop when we've changed something; otherwise we "break"
4402 the switch, which will "break" the while. */
4403
4404 in_p = 0;
4405 low = high = build_int_cst (TREE_TYPE (exp), 0);
4406
4407 while (1)
4408 {
4409 code = TREE_CODE (exp);
4410 exp_type = TREE_TYPE (exp);
4411 arg0 = NULL_TREE;
4412
4413 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4414 {
4415 if (TREE_OPERAND_LENGTH (exp) > 0)
4416 arg0 = TREE_OPERAND (exp, 0);
4417 if (TREE_CODE_CLASS (code) == tcc_binary
4418 || TREE_CODE_CLASS (code) == tcc_comparison
4419 || (TREE_CODE_CLASS (code) == tcc_expression
4420 && TREE_OPERAND_LENGTH (exp) > 1))
4421 arg1 = TREE_OPERAND (exp, 1);
4422 }
4423 if (arg0 == NULL_TREE)
4424 break;
4425
4426 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4427 &high, &in_p, strict_overflow_p);
4428 if (nexp == NULL_TREE)
4429 break;
4430 exp = nexp;
4431 }
4432
4433 /* If EXP is a constant, we can evaluate whether this is true or false. */
4434 if (TREE_CODE (exp) == INTEGER_CST)
4435 {
4436 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4437 exp, 0, low, 0))
4438 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4439 exp, 1, high, 1)));
4440 low = high = 0;
4441 exp = 0;
4442 }
4443
4444 *pin_p = in_p, *plow = low, *phigh = high;
4445 return exp;
4446 }
4447 \f
4448 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4449 type, TYPE, return an expression to test if EXP is in (or out of, depending
4450 on IN_P) the range. Return 0 if the test couldn't be created. */
4451
4452 tree
4453 build_range_check (location_t loc, tree type, tree exp, int in_p,
4454 tree low, tree high)
4455 {
4456 tree etype = TREE_TYPE (exp), value;
4457
4458 #ifdef HAVE_canonicalize_funcptr_for_compare
4459 /* Disable this optimization for function pointer expressions
4460 on targets that require function pointer canonicalization. */
4461 if (HAVE_canonicalize_funcptr_for_compare
4462 && TREE_CODE (etype) == POINTER_TYPE
4463 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4464 return NULL_TREE;
4465 #endif
4466
4467 if (! in_p)
4468 {
4469 value = build_range_check (loc, type, exp, 1, low, high);
4470 if (value != 0)
4471 return invert_truthvalue_loc (loc, value);
4472
4473 return 0;
4474 }
4475
4476 if (low == 0 && high == 0)
4477 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4478
4479 if (low == 0)
4480 return fold_build2_loc (loc, LE_EXPR, type, exp,
4481 fold_convert_loc (loc, etype, high));
4482
4483 if (high == 0)
4484 return fold_build2_loc (loc, GE_EXPR, type, exp,
4485 fold_convert_loc (loc, etype, low));
4486
4487 if (operand_equal_p (low, high, 0))
4488 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4489 fold_convert_loc (loc, etype, low));
4490
4491 if (integer_zerop (low))
4492 {
4493 if (! TYPE_UNSIGNED (etype))
4494 {
4495 etype = unsigned_type_for (etype);
4496 high = fold_convert_loc (loc, etype, high);
4497 exp = fold_convert_loc (loc, etype, exp);
4498 }
4499 return build_range_check (loc, type, exp, 1, 0, high);
4500 }
4501
4502 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4503 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4504 {
4505 int prec = TYPE_PRECISION (etype);
4506
4507 if (wi::mask (prec - 1, false, prec) == high)
4508 {
4509 if (TYPE_UNSIGNED (etype))
4510 {
4511 tree signed_etype = signed_type_for (etype);
4512 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4513 etype
4514 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4515 else
4516 etype = signed_etype;
4517 exp = fold_convert_loc (loc, etype, exp);
4518 }
4519 return fold_build2_loc (loc, GT_EXPR, type, exp,
4520 build_int_cst (etype, 0));
4521 }
4522 }
4523
4524 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4525 This requires wrap-around arithmetics for the type of the expression.
4526 First make sure that arithmetics in this type is valid, then make sure
4527 that it wraps around. */
4528 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4529 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4530 TYPE_UNSIGNED (etype));
4531
4532 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4533 {
4534 tree utype, minv, maxv;
4535
4536 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4537 for the type in question, as we rely on this here. */
4538 utype = unsigned_type_for (etype);
4539 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4540 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4541 build_int_cst (TREE_TYPE (maxv), 1), 1);
4542 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4543
4544 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4545 minv, 1, maxv, 1)))
4546 etype = utype;
4547 else
4548 return 0;
4549 }
4550
4551 high = fold_convert_loc (loc, etype, high);
4552 low = fold_convert_loc (loc, etype, low);
4553 exp = fold_convert_loc (loc, etype, exp);
4554
4555 value = const_binop (MINUS_EXPR, high, low);
4556
4557
4558 if (POINTER_TYPE_P (etype))
4559 {
4560 if (value != 0 && !TREE_OVERFLOW (value))
4561 {
4562 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4563 return build_range_check (loc, type,
4564 fold_build_pointer_plus_loc (loc, exp, low),
4565 1, build_int_cst (etype, 0), value);
4566 }
4567 return 0;
4568 }
4569
4570 if (value != 0 && !TREE_OVERFLOW (value))
4571 return build_range_check (loc, type,
4572 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4573 1, build_int_cst (etype, 0), value);
4574
4575 return 0;
4576 }
4577 \f
4578 /* Return the predecessor of VAL in its type, handling the infinite case. */
4579
4580 static tree
4581 range_predecessor (tree val)
4582 {
4583 tree type = TREE_TYPE (val);
4584
4585 if (INTEGRAL_TYPE_P (type)
4586 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4587 return 0;
4588 else
4589 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4590 build_int_cst (TREE_TYPE (val), 1), 0);
4591 }
4592
4593 /* Return the successor of VAL in its type, handling the infinite case. */
4594
4595 static tree
4596 range_successor (tree val)
4597 {
4598 tree type = TREE_TYPE (val);
4599
4600 if (INTEGRAL_TYPE_P (type)
4601 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4602 return 0;
4603 else
4604 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4605 build_int_cst (TREE_TYPE (val), 1), 0);
4606 }
4607
4608 /* Given two ranges, see if we can merge them into one. Return 1 if we
4609 can, 0 if we can't. Set the output range into the specified parameters. */
4610
4611 bool
4612 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4613 tree high0, int in1_p, tree low1, tree high1)
4614 {
4615 int no_overlap;
4616 int subset;
4617 int temp;
4618 tree tem;
4619 int in_p;
4620 tree low, high;
4621 int lowequal = ((low0 == 0 && low1 == 0)
4622 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4623 low0, 0, low1, 0)));
4624 int highequal = ((high0 == 0 && high1 == 0)
4625 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4626 high0, 1, high1, 1)));
4627
4628 /* Make range 0 be the range that starts first, or ends last if they
4629 start at the same value. Swap them if it isn't. */
4630 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4631 low0, 0, low1, 0))
4632 || (lowequal
4633 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4634 high1, 1, high0, 1))))
4635 {
4636 temp = in0_p, in0_p = in1_p, in1_p = temp;
4637 tem = low0, low0 = low1, low1 = tem;
4638 tem = high0, high0 = high1, high1 = tem;
4639 }
4640
4641 /* Now flag two cases, whether the ranges are disjoint or whether the
4642 second range is totally subsumed in the first. Note that the tests
4643 below are simplified by the ones above. */
4644 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4645 high0, 1, low1, 0));
4646 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4647 high1, 1, high0, 1));
4648
4649 /* We now have four cases, depending on whether we are including or
4650 excluding the two ranges. */
4651 if (in0_p && in1_p)
4652 {
4653 /* If they don't overlap, the result is false. If the second range
4654 is a subset it is the result. Otherwise, the range is from the start
4655 of the second to the end of the first. */
4656 if (no_overlap)
4657 in_p = 0, low = high = 0;
4658 else if (subset)
4659 in_p = 1, low = low1, high = high1;
4660 else
4661 in_p = 1, low = low1, high = high0;
4662 }
4663
4664 else if (in0_p && ! in1_p)
4665 {
4666 /* If they don't overlap, the result is the first range. If they are
4667 equal, the result is false. If the second range is a subset of the
4668 first, and the ranges begin at the same place, we go from just after
4669 the end of the second range to the end of the first. If the second
4670 range is not a subset of the first, or if it is a subset and both
4671 ranges end at the same place, the range starts at the start of the
4672 first range and ends just before the second range.
4673 Otherwise, we can't describe this as a single range. */
4674 if (no_overlap)
4675 in_p = 1, low = low0, high = high0;
4676 else if (lowequal && highequal)
4677 in_p = 0, low = high = 0;
4678 else if (subset && lowequal)
4679 {
4680 low = range_successor (high1);
4681 high = high0;
4682 in_p = 1;
4683 if (low == 0)
4684 {
4685 /* We are in the weird situation where high0 > high1 but
4686 high1 has no successor. Punt. */
4687 return 0;
4688 }
4689 }
4690 else if (! subset || highequal)
4691 {
4692 low = low0;
4693 high = range_predecessor (low1);
4694 in_p = 1;
4695 if (high == 0)
4696 {
4697 /* low0 < low1 but low1 has no predecessor. Punt. */
4698 return 0;
4699 }
4700 }
4701 else
4702 return 0;
4703 }
4704
4705 else if (! in0_p && in1_p)
4706 {
4707 /* If they don't overlap, the result is the second range. If the second
4708 is a subset of the first, the result is false. Otherwise,
4709 the range starts just after the first range and ends at the
4710 end of the second. */
4711 if (no_overlap)
4712 in_p = 1, low = low1, high = high1;
4713 else if (subset || highequal)
4714 in_p = 0, low = high = 0;
4715 else
4716 {
4717 low = range_successor (high0);
4718 high = high1;
4719 in_p = 1;
4720 if (low == 0)
4721 {
4722 /* high1 > high0 but high0 has no successor. Punt. */
4723 return 0;
4724 }
4725 }
4726 }
4727
4728 else
4729 {
4730 /* The case where we are excluding both ranges. Here the complex case
4731 is if they don't overlap. In that case, the only time we have a
4732 range is if they are adjacent. If the second is a subset of the
4733 first, the result is the first. Otherwise, the range to exclude
4734 starts at the beginning of the first range and ends at the end of the
4735 second. */
4736 if (no_overlap)
4737 {
4738 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4739 range_successor (high0),
4740 1, low1, 0)))
4741 in_p = 0, low = low0, high = high1;
4742 else
4743 {
4744 /* Canonicalize - [min, x] into - [-, x]. */
4745 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4746 switch (TREE_CODE (TREE_TYPE (low0)))
4747 {
4748 case ENUMERAL_TYPE:
4749 if (TYPE_PRECISION (TREE_TYPE (low0))
4750 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4751 break;
4752 /* FALLTHROUGH */
4753 case INTEGER_TYPE:
4754 if (tree_int_cst_equal (low0,
4755 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4756 low0 = 0;
4757 break;
4758 case POINTER_TYPE:
4759 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4760 && integer_zerop (low0))
4761 low0 = 0;
4762 break;
4763 default:
4764 break;
4765 }
4766
4767 /* Canonicalize - [x, max] into - [x, -]. */
4768 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4769 switch (TREE_CODE (TREE_TYPE (high1)))
4770 {
4771 case ENUMERAL_TYPE:
4772 if (TYPE_PRECISION (TREE_TYPE (high1))
4773 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4774 break;
4775 /* FALLTHROUGH */
4776 case INTEGER_TYPE:
4777 if (tree_int_cst_equal (high1,
4778 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4779 high1 = 0;
4780 break;
4781 case POINTER_TYPE:
4782 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4783 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4784 high1, 1,
4785 build_int_cst (TREE_TYPE (high1), 1),
4786 1)))
4787 high1 = 0;
4788 break;
4789 default:
4790 break;
4791 }
4792
4793 /* The ranges might be also adjacent between the maximum and
4794 minimum values of the given type. For
4795 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4796 return + [x + 1, y - 1]. */
4797 if (low0 == 0 && high1 == 0)
4798 {
4799 low = range_successor (high0);
4800 high = range_predecessor (low1);
4801 if (low == 0 || high == 0)
4802 return 0;
4803
4804 in_p = 1;
4805 }
4806 else
4807 return 0;
4808 }
4809 }
4810 else if (subset)
4811 in_p = 0, low = low0, high = high0;
4812 else
4813 in_p = 0, low = low0, high = high1;
4814 }
4815
4816 *pin_p = in_p, *plow = low, *phigh = high;
4817 return 1;
4818 }
4819 \f
4820
4821 /* Subroutine of fold, looking inside expressions of the form
4822 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4823 of the COND_EXPR. This function is being used also to optimize
4824 A op B ? C : A, by reversing the comparison first.
4825
4826 Return a folded expression whose code is not a COND_EXPR
4827 anymore, or NULL_TREE if no folding opportunity is found. */
4828
4829 static tree
4830 fold_cond_expr_with_comparison (location_t loc, tree type,
4831 tree arg0, tree arg1, tree arg2)
4832 {
4833 enum tree_code comp_code = TREE_CODE (arg0);
4834 tree arg00 = TREE_OPERAND (arg0, 0);
4835 tree arg01 = TREE_OPERAND (arg0, 1);
4836 tree arg1_type = TREE_TYPE (arg1);
4837 tree tem;
4838
4839 STRIP_NOPS (arg1);
4840 STRIP_NOPS (arg2);
4841
4842 /* If we have A op 0 ? A : -A, consider applying the following
4843 transformations:
4844
4845 A == 0? A : -A same as -A
4846 A != 0? A : -A same as A
4847 A >= 0? A : -A same as abs (A)
4848 A > 0? A : -A same as abs (A)
4849 A <= 0? A : -A same as -abs (A)
4850 A < 0? A : -A same as -abs (A)
4851
4852 None of these transformations work for modes with signed
4853 zeros. If A is +/-0, the first two transformations will
4854 change the sign of the result (from +0 to -0, or vice
4855 versa). The last four will fix the sign of the result,
4856 even though the original expressions could be positive or
4857 negative, depending on the sign of A.
4858
4859 Note that all these transformations are correct if A is
4860 NaN, since the two alternatives (A and -A) are also NaNs. */
4861 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4862 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4863 ? real_zerop (arg01)
4864 : integer_zerop (arg01))
4865 && ((TREE_CODE (arg2) == NEGATE_EXPR
4866 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4867 /* In the case that A is of the form X-Y, '-A' (arg2) may
4868 have already been folded to Y-X, check for that. */
4869 || (TREE_CODE (arg1) == MINUS_EXPR
4870 && TREE_CODE (arg2) == MINUS_EXPR
4871 && operand_equal_p (TREE_OPERAND (arg1, 0),
4872 TREE_OPERAND (arg2, 1), 0)
4873 && operand_equal_p (TREE_OPERAND (arg1, 1),
4874 TREE_OPERAND (arg2, 0), 0))))
4875 switch (comp_code)
4876 {
4877 case EQ_EXPR:
4878 case UNEQ_EXPR:
4879 tem = fold_convert_loc (loc, arg1_type, arg1);
4880 return pedantic_non_lvalue_loc (loc,
4881 fold_convert_loc (loc, type,
4882 negate_expr (tem)));
4883 case NE_EXPR:
4884 case LTGT_EXPR:
4885 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4886 case UNGE_EXPR:
4887 case UNGT_EXPR:
4888 if (flag_trapping_math)
4889 break;
4890 /* Fall through. */
4891 case GE_EXPR:
4892 case GT_EXPR:
4893 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4894 arg1 = fold_convert_loc (loc, signed_type_for
4895 (TREE_TYPE (arg1)), arg1);
4896 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4897 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4898 case UNLE_EXPR:
4899 case UNLT_EXPR:
4900 if (flag_trapping_math)
4901 break;
4902 case LE_EXPR:
4903 case LT_EXPR:
4904 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4905 arg1 = fold_convert_loc (loc, signed_type_for
4906 (TREE_TYPE (arg1)), arg1);
4907 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4908 return negate_expr (fold_convert_loc (loc, type, tem));
4909 default:
4910 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4911 break;
4912 }
4913
4914 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4915 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4916 both transformations are correct when A is NaN: A != 0
4917 is then true, and A == 0 is false. */
4918
4919 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4920 && integer_zerop (arg01) && integer_zerop (arg2))
4921 {
4922 if (comp_code == NE_EXPR)
4923 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4924 else if (comp_code == EQ_EXPR)
4925 return build_zero_cst (type);
4926 }
4927
4928 /* Try some transformations of A op B ? A : B.
4929
4930 A == B? A : B same as B
4931 A != B? A : B same as A
4932 A >= B? A : B same as max (A, B)
4933 A > B? A : B same as max (B, A)
4934 A <= B? A : B same as min (A, B)
4935 A < B? A : B same as min (B, A)
4936
4937 As above, these transformations don't work in the presence
4938 of signed zeros. For example, if A and B are zeros of
4939 opposite sign, the first two transformations will change
4940 the sign of the result. In the last four, the original
4941 expressions give different results for (A=+0, B=-0) and
4942 (A=-0, B=+0), but the transformed expressions do not.
4943
4944 The first two transformations are correct if either A or B
4945 is a NaN. In the first transformation, the condition will
4946 be false, and B will indeed be chosen. In the case of the
4947 second transformation, the condition A != B will be true,
4948 and A will be chosen.
4949
4950 The conversions to max() and min() are not correct if B is
4951 a number and A is not. The conditions in the original
4952 expressions will be false, so all four give B. The min()
4953 and max() versions would give a NaN instead. */
4954 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4955 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4956 /* Avoid these transformations if the COND_EXPR may be used
4957 as an lvalue in the C++ front-end. PR c++/19199. */
4958 && (in_gimple_form
4959 || VECTOR_TYPE_P (type)
4960 || (! lang_GNU_CXX ()
4961 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4962 || ! maybe_lvalue_p (arg1)
4963 || ! maybe_lvalue_p (arg2)))
4964 {
4965 tree comp_op0 = arg00;
4966 tree comp_op1 = arg01;
4967 tree comp_type = TREE_TYPE (comp_op0);
4968
4969 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4970 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4971 {
4972 comp_type = type;
4973 comp_op0 = arg1;
4974 comp_op1 = arg2;
4975 }
4976
4977 switch (comp_code)
4978 {
4979 case EQ_EXPR:
4980 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4981 case NE_EXPR:
4982 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4983 case LE_EXPR:
4984 case LT_EXPR:
4985 case UNLE_EXPR:
4986 case UNLT_EXPR:
4987 /* In C++ a ?: expression can be an lvalue, so put the
4988 operand which will be used if they are equal first
4989 so that we can convert this back to the
4990 corresponding COND_EXPR. */
4991 if (!HONOR_NANS (element_mode (arg1)))
4992 {
4993 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4994 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4995 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4996 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4997 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4998 comp_op1, comp_op0);
4999 return pedantic_non_lvalue_loc (loc,
5000 fold_convert_loc (loc, type, tem));
5001 }
5002 break;
5003 case GE_EXPR:
5004 case GT_EXPR:
5005 case UNGE_EXPR:
5006 case UNGT_EXPR:
5007 if (!HONOR_NANS (element_mode (arg1)))
5008 {
5009 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5010 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5011 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5012 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5013 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5014 comp_op1, comp_op0);
5015 return pedantic_non_lvalue_loc (loc,
5016 fold_convert_loc (loc, type, tem));
5017 }
5018 break;
5019 case UNEQ_EXPR:
5020 if (!HONOR_NANS (element_mode (arg1)))
5021 return pedantic_non_lvalue_loc (loc,
5022 fold_convert_loc (loc, type, arg2));
5023 break;
5024 case LTGT_EXPR:
5025 if (!HONOR_NANS (element_mode (arg1)))
5026 return pedantic_non_lvalue_loc (loc,
5027 fold_convert_loc (loc, type, arg1));
5028 break;
5029 default:
5030 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5031 break;
5032 }
5033 }
5034
5035 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5036 we might still be able to simplify this. For example,
5037 if C1 is one less or one more than C2, this might have started
5038 out as a MIN or MAX and been transformed by this function.
5039 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5040
5041 if (INTEGRAL_TYPE_P (type)
5042 && TREE_CODE (arg01) == INTEGER_CST
5043 && TREE_CODE (arg2) == INTEGER_CST)
5044 switch (comp_code)
5045 {
5046 case EQ_EXPR:
5047 if (TREE_CODE (arg1) == INTEGER_CST)
5048 break;
5049 /* We can replace A with C1 in this case. */
5050 arg1 = fold_convert_loc (loc, type, arg01);
5051 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5052
5053 case LT_EXPR:
5054 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5055 MIN_EXPR, to preserve the signedness of the comparison. */
5056 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5057 OEP_ONLY_CONST)
5058 && operand_equal_p (arg01,
5059 const_binop (PLUS_EXPR, arg2,
5060 build_int_cst (type, 1)),
5061 OEP_ONLY_CONST))
5062 {
5063 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5064 fold_convert_loc (loc, TREE_TYPE (arg00),
5065 arg2));
5066 return pedantic_non_lvalue_loc (loc,
5067 fold_convert_loc (loc, type, tem));
5068 }
5069 break;
5070
5071 case LE_EXPR:
5072 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5073 as above. */
5074 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5075 OEP_ONLY_CONST)
5076 && operand_equal_p (arg01,
5077 const_binop (MINUS_EXPR, arg2,
5078 build_int_cst (type, 1)),
5079 OEP_ONLY_CONST))
5080 {
5081 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5082 fold_convert_loc (loc, TREE_TYPE (arg00),
5083 arg2));
5084 return pedantic_non_lvalue_loc (loc,
5085 fold_convert_loc (loc, type, tem));
5086 }
5087 break;
5088
5089 case GT_EXPR:
5090 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5091 MAX_EXPR, to preserve the signedness of the comparison. */
5092 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5093 OEP_ONLY_CONST)
5094 && operand_equal_p (arg01,
5095 const_binop (MINUS_EXPR, arg2,
5096 build_int_cst (type, 1)),
5097 OEP_ONLY_CONST))
5098 {
5099 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5100 fold_convert_loc (loc, TREE_TYPE (arg00),
5101 arg2));
5102 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5103 }
5104 break;
5105
5106 case GE_EXPR:
5107 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5108 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5109 OEP_ONLY_CONST)
5110 && operand_equal_p (arg01,
5111 const_binop (PLUS_EXPR, arg2,
5112 build_int_cst (type, 1)),
5113 OEP_ONLY_CONST))
5114 {
5115 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5116 fold_convert_loc (loc, TREE_TYPE (arg00),
5117 arg2));
5118 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5119 }
5120 break;
5121 case NE_EXPR:
5122 break;
5123 default:
5124 gcc_unreachable ();
5125 }
5126
5127 return NULL_TREE;
5128 }
5129
5130
5131 \f
5132 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5133 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5134 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5135 false) >= 2)
5136 #endif
5137
5138 /* EXP is some logical combination of boolean tests. See if we can
5139 merge it into some range test. Return the new tree if so. */
5140
5141 static tree
5142 fold_range_test (location_t loc, enum tree_code code, tree type,
5143 tree op0, tree op1)
5144 {
5145 int or_op = (code == TRUTH_ORIF_EXPR
5146 || code == TRUTH_OR_EXPR);
5147 int in0_p, in1_p, in_p;
5148 tree low0, low1, low, high0, high1, high;
5149 bool strict_overflow_p = false;
5150 tree tem, lhs, rhs;
5151 const char * const warnmsg = G_("assuming signed overflow does not occur "
5152 "when simplifying range test");
5153
5154 if (!INTEGRAL_TYPE_P (type))
5155 return 0;
5156
5157 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5158 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5159
5160 /* If this is an OR operation, invert both sides; we will invert
5161 again at the end. */
5162 if (or_op)
5163 in0_p = ! in0_p, in1_p = ! in1_p;
5164
5165 /* If both expressions are the same, if we can merge the ranges, and we
5166 can build the range test, return it or it inverted. If one of the
5167 ranges is always true or always false, consider it to be the same
5168 expression as the other. */
5169 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5170 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5171 in1_p, low1, high1)
5172 && 0 != (tem = (build_range_check (loc, type,
5173 lhs != 0 ? lhs
5174 : rhs != 0 ? rhs : integer_zero_node,
5175 in_p, low, high))))
5176 {
5177 if (strict_overflow_p)
5178 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5179 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5180 }
5181
5182 /* On machines where the branch cost is expensive, if this is a
5183 short-circuited branch and the underlying object on both sides
5184 is the same, make a non-short-circuit operation. */
5185 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5186 && lhs != 0 && rhs != 0
5187 && (code == TRUTH_ANDIF_EXPR
5188 || code == TRUTH_ORIF_EXPR)
5189 && operand_equal_p (lhs, rhs, 0))
5190 {
5191 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5192 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5193 which cases we can't do this. */
5194 if (simple_operand_p (lhs))
5195 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5196 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5197 type, op0, op1);
5198
5199 else if (!lang_hooks.decls.global_bindings_p ()
5200 && !CONTAINS_PLACEHOLDER_P (lhs))
5201 {
5202 tree common = save_expr (lhs);
5203
5204 if (0 != (lhs = build_range_check (loc, type, common,
5205 or_op ? ! in0_p : in0_p,
5206 low0, high0))
5207 && (0 != (rhs = build_range_check (loc, type, common,
5208 or_op ? ! in1_p : in1_p,
5209 low1, high1))))
5210 {
5211 if (strict_overflow_p)
5212 fold_overflow_warning (warnmsg,
5213 WARN_STRICT_OVERFLOW_COMPARISON);
5214 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5215 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5216 type, lhs, rhs);
5217 }
5218 }
5219 }
5220
5221 return 0;
5222 }
5223 \f
5224 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5225 bit value. Arrange things so the extra bits will be set to zero if and
5226 only if C is signed-extended to its full width. If MASK is nonzero,
5227 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5228
5229 static tree
5230 unextend (tree c, int p, int unsignedp, tree mask)
5231 {
5232 tree type = TREE_TYPE (c);
5233 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5234 tree temp;
5235
5236 if (p == modesize || unsignedp)
5237 return c;
5238
5239 /* We work by getting just the sign bit into the low-order bit, then
5240 into the high-order bit, then sign-extend. We then XOR that value
5241 with C. */
5242 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5243
5244 /* We must use a signed type in order to get an arithmetic right shift.
5245 However, we must also avoid introducing accidental overflows, so that
5246 a subsequent call to integer_zerop will work. Hence we must
5247 do the type conversion here. At this point, the constant is either
5248 zero or one, and the conversion to a signed type can never overflow.
5249 We could get an overflow if this conversion is done anywhere else. */
5250 if (TYPE_UNSIGNED (type))
5251 temp = fold_convert (signed_type_for (type), temp);
5252
5253 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5254 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5255 if (mask != 0)
5256 temp = const_binop (BIT_AND_EXPR, temp,
5257 fold_convert (TREE_TYPE (c), mask));
5258 /* If necessary, convert the type back to match the type of C. */
5259 if (TYPE_UNSIGNED (type))
5260 temp = fold_convert (type, temp);
5261
5262 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5263 }
5264 \f
5265 /* For an expression that has the form
5266 (A && B) || ~B
5267 or
5268 (A || B) && ~B,
5269 we can drop one of the inner expressions and simplify to
5270 A || ~B
5271 or
5272 A && ~B
5273 LOC is the location of the resulting expression. OP is the inner
5274 logical operation; the left-hand side in the examples above, while CMPOP
5275 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5276 removing a condition that guards another, as in
5277 (A != NULL && A->...) || A == NULL
5278 which we must not transform. If RHS_ONLY is true, only eliminate the
5279 right-most operand of the inner logical operation. */
5280
5281 static tree
5282 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5283 bool rhs_only)
5284 {
5285 tree type = TREE_TYPE (cmpop);
5286 enum tree_code code = TREE_CODE (cmpop);
5287 enum tree_code truthop_code = TREE_CODE (op);
5288 tree lhs = TREE_OPERAND (op, 0);
5289 tree rhs = TREE_OPERAND (op, 1);
5290 tree orig_lhs = lhs, orig_rhs = rhs;
5291 enum tree_code rhs_code = TREE_CODE (rhs);
5292 enum tree_code lhs_code = TREE_CODE (lhs);
5293 enum tree_code inv_code;
5294
5295 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5296 return NULL_TREE;
5297
5298 if (TREE_CODE_CLASS (code) != tcc_comparison)
5299 return NULL_TREE;
5300
5301 if (rhs_code == truthop_code)
5302 {
5303 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5304 if (newrhs != NULL_TREE)
5305 {
5306 rhs = newrhs;
5307 rhs_code = TREE_CODE (rhs);
5308 }
5309 }
5310 if (lhs_code == truthop_code && !rhs_only)
5311 {
5312 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5313 if (newlhs != NULL_TREE)
5314 {
5315 lhs = newlhs;
5316 lhs_code = TREE_CODE (lhs);
5317 }
5318 }
5319
5320 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5321 if (inv_code == rhs_code
5322 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5323 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5324 return lhs;
5325 if (!rhs_only && inv_code == lhs_code
5326 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5327 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5328 return rhs;
5329 if (rhs != orig_rhs || lhs != orig_lhs)
5330 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5331 lhs, rhs);
5332 return NULL_TREE;
5333 }
5334
5335 /* Find ways of folding logical expressions of LHS and RHS:
5336 Try to merge two comparisons to the same innermost item.
5337 Look for range tests like "ch >= '0' && ch <= '9'".
5338 Look for combinations of simple terms on machines with expensive branches
5339 and evaluate the RHS unconditionally.
5340
5341 For example, if we have p->a == 2 && p->b == 4 and we can make an
5342 object large enough to span both A and B, we can do this with a comparison
5343 against the object ANDed with the a mask.
5344
5345 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5346 operations to do this with one comparison.
5347
5348 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5349 function and the one above.
5350
5351 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5352 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5353
5354 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5355 two operands.
5356
5357 We return the simplified tree or 0 if no optimization is possible. */
5358
5359 static tree
5360 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5361 tree lhs, tree rhs)
5362 {
5363 /* If this is the "or" of two comparisons, we can do something if
5364 the comparisons are NE_EXPR. If this is the "and", we can do something
5365 if the comparisons are EQ_EXPR. I.e.,
5366 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5367
5368 WANTED_CODE is this operation code. For single bit fields, we can
5369 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5370 comparison for one-bit fields. */
5371
5372 enum tree_code wanted_code;
5373 enum tree_code lcode, rcode;
5374 tree ll_arg, lr_arg, rl_arg, rr_arg;
5375 tree ll_inner, lr_inner, rl_inner, rr_inner;
5376 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5377 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5378 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5379 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5380 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5381 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5382 machine_mode lnmode, rnmode;
5383 tree ll_mask, lr_mask, rl_mask, rr_mask;
5384 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5385 tree l_const, r_const;
5386 tree lntype, rntype, result;
5387 HOST_WIDE_INT first_bit, end_bit;
5388 int volatilep;
5389
5390 /* Start by getting the comparison codes. Fail if anything is volatile.
5391 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5392 it were surrounded with a NE_EXPR. */
5393
5394 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5395 return 0;
5396
5397 lcode = TREE_CODE (lhs);
5398 rcode = TREE_CODE (rhs);
5399
5400 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5401 {
5402 lhs = build2 (NE_EXPR, truth_type, lhs,
5403 build_int_cst (TREE_TYPE (lhs), 0));
5404 lcode = NE_EXPR;
5405 }
5406
5407 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5408 {
5409 rhs = build2 (NE_EXPR, truth_type, rhs,
5410 build_int_cst (TREE_TYPE (rhs), 0));
5411 rcode = NE_EXPR;
5412 }
5413
5414 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5415 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5416 return 0;
5417
5418 ll_arg = TREE_OPERAND (lhs, 0);
5419 lr_arg = TREE_OPERAND (lhs, 1);
5420 rl_arg = TREE_OPERAND (rhs, 0);
5421 rr_arg = TREE_OPERAND (rhs, 1);
5422
5423 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5424 if (simple_operand_p (ll_arg)
5425 && simple_operand_p (lr_arg))
5426 {
5427 if (operand_equal_p (ll_arg, rl_arg, 0)
5428 && operand_equal_p (lr_arg, rr_arg, 0))
5429 {
5430 result = combine_comparisons (loc, code, lcode, rcode,
5431 truth_type, ll_arg, lr_arg);
5432 if (result)
5433 return result;
5434 }
5435 else if (operand_equal_p (ll_arg, rr_arg, 0)
5436 && operand_equal_p (lr_arg, rl_arg, 0))
5437 {
5438 result = combine_comparisons (loc, code, lcode,
5439 swap_tree_comparison (rcode),
5440 truth_type, ll_arg, lr_arg);
5441 if (result)
5442 return result;
5443 }
5444 }
5445
5446 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5447 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5448
5449 /* If the RHS can be evaluated unconditionally and its operands are
5450 simple, it wins to evaluate the RHS unconditionally on machines
5451 with expensive branches. In this case, this isn't a comparison
5452 that can be merged. */
5453
5454 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5455 false) >= 2
5456 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5457 && simple_operand_p (rl_arg)
5458 && simple_operand_p (rr_arg))
5459 {
5460 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5461 if (code == TRUTH_OR_EXPR
5462 && lcode == NE_EXPR && integer_zerop (lr_arg)
5463 && rcode == NE_EXPR && integer_zerop (rr_arg)
5464 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5465 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5466 return build2_loc (loc, NE_EXPR, truth_type,
5467 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5468 ll_arg, rl_arg),
5469 build_int_cst (TREE_TYPE (ll_arg), 0));
5470
5471 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5472 if (code == TRUTH_AND_EXPR
5473 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5474 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5475 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5476 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5477 return build2_loc (loc, EQ_EXPR, truth_type,
5478 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5479 ll_arg, rl_arg),
5480 build_int_cst (TREE_TYPE (ll_arg), 0));
5481 }
5482
5483 /* See if the comparisons can be merged. Then get all the parameters for
5484 each side. */
5485
5486 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5487 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5488 return 0;
5489
5490 volatilep = 0;
5491 ll_inner = decode_field_reference (loc, ll_arg,
5492 &ll_bitsize, &ll_bitpos, &ll_mode,
5493 &ll_unsignedp, &volatilep, &ll_mask,
5494 &ll_and_mask);
5495 lr_inner = decode_field_reference (loc, lr_arg,
5496 &lr_bitsize, &lr_bitpos, &lr_mode,
5497 &lr_unsignedp, &volatilep, &lr_mask,
5498 &lr_and_mask);
5499 rl_inner = decode_field_reference (loc, rl_arg,
5500 &rl_bitsize, &rl_bitpos, &rl_mode,
5501 &rl_unsignedp, &volatilep, &rl_mask,
5502 &rl_and_mask);
5503 rr_inner = decode_field_reference (loc, rr_arg,
5504 &rr_bitsize, &rr_bitpos, &rr_mode,
5505 &rr_unsignedp, &volatilep, &rr_mask,
5506 &rr_and_mask);
5507
5508 /* It must be true that the inner operation on the lhs of each
5509 comparison must be the same if we are to be able to do anything.
5510 Then see if we have constants. If not, the same must be true for
5511 the rhs's. */
5512 if (volatilep || ll_inner == 0 || rl_inner == 0
5513 || ! operand_equal_p (ll_inner, rl_inner, 0))
5514 return 0;
5515
5516 if (TREE_CODE (lr_arg) == INTEGER_CST
5517 && TREE_CODE (rr_arg) == INTEGER_CST)
5518 l_const = lr_arg, r_const = rr_arg;
5519 else if (lr_inner == 0 || rr_inner == 0
5520 || ! operand_equal_p (lr_inner, rr_inner, 0))
5521 return 0;
5522 else
5523 l_const = r_const = 0;
5524
5525 /* If either comparison code is not correct for our logical operation,
5526 fail. However, we can convert a one-bit comparison against zero into
5527 the opposite comparison against that bit being set in the field. */
5528
5529 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5530 if (lcode != wanted_code)
5531 {
5532 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5533 {
5534 /* Make the left operand unsigned, since we are only interested
5535 in the value of one bit. Otherwise we are doing the wrong
5536 thing below. */
5537 ll_unsignedp = 1;
5538 l_const = ll_mask;
5539 }
5540 else
5541 return 0;
5542 }
5543
5544 /* This is analogous to the code for l_const above. */
5545 if (rcode != wanted_code)
5546 {
5547 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5548 {
5549 rl_unsignedp = 1;
5550 r_const = rl_mask;
5551 }
5552 else
5553 return 0;
5554 }
5555
5556 /* See if we can find a mode that contains both fields being compared on
5557 the left. If we can't, fail. Otherwise, update all constants and masks
5558 to be relative to a field of that size. */
5559 first_bit = MIN (ll_bitpos, rl_bitpos);
5560 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5561 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5562 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5563 volatilep);
5564 if (lnmode == VOIDmode)
5565 return 0;
5566
5567 lnbitsize = GET_MODE_BITSIZE (lnmode);
5568 lnbitpos = first_bit & ~ (lnbitsize - 1);
5569 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5570 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5571
5572 if (BYTES_BIG_ENDIAN)
5573 {
5574 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5575 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5576 }
5577
5578 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5579 size_int (xll_bitpos));
5580 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5581 size_int (xrl_bitpos));
5582
5583 if (l_const)
5584 {
5585 l_const = fold_convert_loc (loc, lntype, l_const);
5586 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5587 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5588 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5589 fold_build1_loc (loc, BIT_NOT_EXPR,
5590 lntype, ll_mask))))
5591 {
5592 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5593
5594 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5595 }
5596 }
5597 if (r_const)
5598 {
5599 r_const = fold_convert_loc (loc, lntype, r_const);
5600 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5601 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5602 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5603 fold_build1_loc (loc, BIT_NOT_EXPR,
5604 lntype, rl_mask))))
5605 {
5606 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5607
5608 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5609 }
5610 }
5611
5612 /* If the right sides are not constant, do the same for it. Also,
5613 disallow this optimization if a size or signedness mismatch occurs
5614 between the left and right sides. */
5615 if (l_const == 0)
5616 {
5617 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5618 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5619 /* Make sure the two fields on the right
5620 correspond to the left without being swapped. */
5621 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5622 return 0;
5623
5624 first_bit = MIN (lr_bitpos, rr_bitpos);
5625 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5626 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5627 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5628 volatilep);
5629 if (rnmode == VOIDmode)
5630 return 0;
5631
5632 rnbitsize = GET_MODE_BITSIZE (rnmode);
5633 rnbitpos = first_bit & ~ (rnbitsize - 1);
5634 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5635 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5636
5637 if (BYTES_BIG_ENDIAN)
5638 {
5639 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5640 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5641 }
5642
5643 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5644 rntype, lr_mask),
5645 size_int (xlr_bitpos));
5646 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5647 rntype, rr_mask),
5648 size_int (xrr_bitpos));
5649
5650 /* Make a mask that corresponds to both fields being compared.
5651 Do this for both items being compared. If the operands are the
5652 same size and the bits being compared are in the same position
5653 then we can do this by masking both and comparing the masked
5654 results. */
5655 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5656 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5657 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5658 {
5659 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5660 ll_unsignedp || rl_unsignedp);
5661 if (! all_ones_mask_p (ll_mask, lnbitsize))
5662 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5663
5664 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5665 lr_unsignedp || rr_unsignedp);
5666 if (! all_ones_mask_p (lr_mask, rnbitsize))
5667 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5668
5669 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5670 }
5671
5672 /* There is still another way we can do something: If both pairs of
5673 fields being compared are adjacent, we may be able to make a wider
5674 field containing them both.
5675
5676 Note that we still must mask the lhs/rhs expressions. Furthermore,
5677 the mask must be shifted to account for the shift done by
5678 make_bit_field_ref. */
5679 if ((ll_bitsize + ll_bitpos == rl_bitpos
5680 && lr_bitsize + lr_bitpos == rr_bitpos)
5681 || (ll_bitpos == rl_bitpos + rl_bitsize
5682 && lr_bitpos == rr_bitpos + rr_bitsize))
5683 {
5684 tree type;
5685
5686 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5687 ll_bitsize + rl_bitsize,
5688 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5689 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5690 lr_bitsize + rr_bitsize,
5691 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5692
5693 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5694 size_int (MIN (xll_bitpos, xrl_bitpos)));
5695 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5696 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5697
5698 /* Convert to the smaller type before masking out unwanted bits. */
5699 type = lntype;
5700 if (lntype != rntype)
5701 {
5702 if (lnbitsize > rnbitsize)
5703 {
5704 lhs = fold_convert_loc (loc, rntype, lhs);
5705 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5706 type = rntype;
5707 }
5708 else if (lnbitsize < rnbitsize)
5709 {
5710 rhs = fold_convert_loc (loc, lntype, rhs);
5711 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5712 type = lntype;
5713 }
5714 }
5715
5716 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5717 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5718
5719 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5720 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5721
5722 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5723 }
5724
5725 return 0;
5726 }
5727
5728 /* Handle the case of comparisons with constants. If there is something in
5729 common between the masks, those bits of the constants must be the same.
5730 If not, the condition is always false. Test for this to avoid generating
5731 incorrect code below. */
5732 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5733 if (! integer_zerop (result)
5734 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5735 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5736 {
5737 if (wanted_code == NE_EXPR)
5738 {
5739 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5740 return constant_boolean_node (true, truth_type);
5741 }
5742 else
5743 {
5744 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5745 return constant_boolean_node (false, truth_type);
5746 }
5747 }
5748
5749 /* Construct the expression we will return. First get the component
5750 reference we will make. Unless the mask is all ones the width of
5751 that field, perform the mask operation. Then compare with the
5752 merged constant. */
5753 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5754 ll_unsignedp || rl_unsignedp);
5755
5756 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5757 if (! all_ones_mask_p (ll_mask, lnbitsize))
5758 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5759
5760 return build2_loc (loc, wanted_code, truth_type, result,
5761 const_binop (BIT_IOR_EXPR, l_const, r_const));
5762 }
5763 \f
5764 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5765 constant. */
5766
5767 static tree
5768 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5769 tree op0, tree op1)
5770 {
5771 tree arg0 = op0;
5772 enum tree_code op_code;
5773 tree comp_const;
5774 tree minmax_const;
5775 int consts_equal, consts_lt;
5776 tree inner;
5777
5778 STRIP_SIGN_NOPS (arg0);
5779
5780 op_code = TREE_CODE (arg0);
5781 minmax_const = TREE_OPERAND (arg0, 1);
5782 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5783 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5784 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5785 inner = TREE_OPERAND (arg0, 0);
5786
5787 /* If something does not permit us to optimize, return the original tree. */
5788 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5789 || TREE_CODE (comp_const) != INTEGER_CST
5790 || TREE_OVERFLOW (comp_const)
5791 || TREE_CODE (minmax_const) != INTEGER_CST
5792 || TREE_OVERFLOW (minmax_const))
5793 return NULL_TREE;
5794
5795 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5796 and GT_EXPR, doing the rest with recursive calls using logical
5797 simplifications. */
5798 switch (code)
5799 {
5800 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5801 {
5802 tree tem
5803 = optimize_minmax_comparison (loc,
5804 invert_tree_comparison (code, false),
5805 type, op0, op1);
5806 if (tem)
5807 return invert_truthvalue_loc (loc, tem);
5808 return NULL_TREE;
5809 }
5810
5811 case GE_EXPR:
5812 return
5813 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5814 optimize_minmax_comparison
5815 (loc, EQ_EXPR, type, arg0, comp_const),
5816 optimize_minmax_comparison
5817 (loc, GT_EXPR, type, arg0, comp_const));
5818
5819 case EQ_EXPR:
5820 if (op_code == MAX_EXPR && consts_equal)
5821 /* MAX (X, 0) == 0 -> X <= 0 */
5822 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5823
5824 else if (op_code == MAX_EXPR && consts_lt)
5825 /* MAX (X, 0) == 5 -> X == 5 */
5826 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5827
5828 else if (op_code == MAX_EXPR)
5829 /* MAX (X, 0) == -1 -> false */
5830 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5831
5832 else if (consts_equal)
5833 /* MIN (X, 0) == 0 -> X >= 0 */
5834 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5835
5836 else if (consts_lt)
5837 /* MIN (X, 0) == 5 -> false */
5838 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5839
5840 else
5841 /* MIN (X, 0) == -1 -> X == -1 */
5842 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5843
5844 case GT_EXPR:
5845 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5846 /* MAX (X, 0) > 0 -> X > 0
5847 MAX (X, 0) > 5 -> X > 5 */
5848 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5849
5850 else if (op_code == MAX_EXPR)
5851 /* MAX (X, 0) > -1 -> true */
5852 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5853
5854 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5855 /* MIN (X, 0) > 0 -> false
5856 MIN (X, 0) > 5 -> false */
5857 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5858
5859 else
5860 /* MIN (X, 0) > -1 -> X > -1 */
5861 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5862
5863 default:
5864 return NULL_TREE;
5865 }
5866 }
5867 \f
5868 /* T is an integer expression that is being multiplied, divided, or taken a
5869 modulus (CODE says which and what kind of divide or modulus) by a
5870 constant C. See if we can eliminate that operation by folding it with
5871 other operations already in T. WIDE_TYPE, if non-null, is a type that
5872 should be used for the computation if wider than our type.
5873
5874 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5875 (X * 2) + (Y * 4). We must, however, be assured that either the original
5876 expression would not overflow or that overflow is undefined for the type
5877 in the language in question.
5878
5879 If we return a non-null expression, it is an equivalent form of the
5880 original computation, but need not be in the original type.
5881
5882 We set *STRICT_OVERFLOW_P to true if the return values depends on
5883 signed overflow being undefined. Otherwise we do not change
5884 *STRICT_OVERFLOW_P. */
5885
5886 static tree
5887 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5888 bool *strict_overflow_p)
5889 {
5890 /* To avoid exponential search depth, refuse to allow recursion past
5891 three levels. Beyond that (1) it's highly unlikely that we'll find
5892 something interesting and (2) we've probably processed it before
5893 when we built the inner expression. */
5894
5895 static int depth;
5896 tree ret;
5897
5898 if (depth > 3)
5899 return NULL;
5900
5901 depth++;
5902 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5903 depth--;
5904
5905 return ret;
5906 }
5907
5908 static tree
5909 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5910 bool *strict_overflow_p)
5911 {
5912 tree type = TREE_TYPE (t);
5913 enum tree_code tcode = TREE_CODE (t);
5914 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5915 > GET_MODE_SIZE (TYPE_MODE (type)))
5916 ? wide_type : type);
5917 tree t1, t2;
5918 int same_p = tcode == code;
5919 tree op0 = NULL_TREE, op1 = NULL_TREE;
5920 bool sub_strict_overflow_p;
5921
5922 /* Don't deal with constants of zero here; they confuse the code below. */
5923 if (integer_zerop (c))
5924 return NULL_TREE;
5925
5926 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5927 op0 = TREE_OPERAND (t, 0);
5928
5929 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5930 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5931
5932 /* Note that we need not handle conditional operations here since fold
5933 already handles those cases. So just do arithmetic here. */
5934 switch (tcode)
5935 {
5936 case INTEGER_CST:
5937 /* For a constant, we can always simplify if we are a multiply
5938 or (for divide and modulus) if it is a multiple of our constant. */
5939 if (code == MULT_EXPR
5940 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5941 return const_binop (code, fold_convert (ctype, t),
5942 fold_convert (ctype, c));
5943 break;
5944
5945 CASE_CONVERT: case NON_LVALUE_EXPR:
5946 /* If op0 is an expression ... */
5947 if ((COMPARISON_CLASS_P (op0)
5948 || UNARY_CLASS_P (op0)
5949 || BINARY_CLASS_P (op0)
5950 || VL_EXP_CLASS_P (op0)
5951 || EXPRESSION_CLASS_P (op0))
5952 /* ... and has wrapping overflow, and its type is smaller
5953 than ctype, then we cannot pass through as widening. */
5954 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5955 && (TYPE_PRECISION (ctype)
5956 > TYPE_PRECISION (TREE_TYPE (op0))))
5957 /* ... or this is a truncation (t is narrower than op0),
5958 then we cannot pass through this narrowing. */
5959 || (TYPE_PRECISION (type)
5960 < TYPE_PRECISION (TREE_TYPE (op0)))
5961 /* ... or signedness changes for division or modulus,
5962 then we cannot pass through this conversion. */
5963 || (code != MULT_EXPR
5964 && (TYPE_UNSIGNED (ctype)
5965 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5966 /* ... or has undefined overflow while the converted to
5967 type has not, we cannot do the operation in the inner type
5968 as that would introduce undefined overflow. */
5969 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5970 && !TYPE_OVERFLOW_UNDEFINED (type))))
5971 break;
5972
5973 /* Pass the constant down and see if we can make a simplification. If
5974 we can, replace this expression with the inner simplification for
5975 possible later conversion to our or some other type. */
5976 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5977 && TREE_CODE (t2) == INTEGER_CST
5978 && !TREE_OVERFLOW (t2)
5979 && (0 != (t1 = extract_muldiv (op0, t2, code,
5980 code == MULT_EXPR
5981 ? ctype : NULL_TREE,
5982 strict_overflow_p))))
5983 return t1;
5984 break;
5985
5986 case ABS_EXPR:
5987 /* If widening the type changes it from signed to unsigned, then we
5988 must avoid building ABS_EXPR itself as unsigned. */
5989 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5990 {
5991 tree cstype = (*signed_type_for) (ctype);
5992 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5993 != 0)
5994 {
5995 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5996 return fold_convert (ctype, t1);
5997 }
5998 break;
5999 }
6000 /* If the constant is negative, we cannot simplify this. */
6001 if (tree_int_cst_sgn (c) == -1)
6002 break;
6003 /* FALLTHROUGH */
6004 case NEGATE_EXPR:
6005 /* For division and modulus, type can't be unsigned, as e.g.
6006 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6007 For signed types, even with wrapping overflow, this is fine. */
6008 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6009 break;
6010 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6011 != 0)
6012 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6013 break;
6014
6015 case MIN_EXPR: case MAX_EXPR:
6016 /* If widening the type changes the signedness, then we can't perform
6017 this optimization as that changes the result. */
6018 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6019 break;
6020
6021 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6022 sub_strict_overflow_p = false;
6023 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6024 &sub_strict_overflow_p)) != 0
6025 && (t2 = extract_muldiv (op1, c, code, wide_type,
6026 &sub_strict_overflow_p)) != 0)
6027 {
6028 if (tree_int_cst_sgn (c) < 0)
6029 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6030 if (sub_strict_overflow_p)
6031 *strict_overflow_p = true;
6032 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6033 fold_convert (ctype, t2));
6034 }
6035 break;
6036
6037 case LSHIFT_EXPR: case RSHIFT_EXPR:
6038 /* If the second operand is constant, this is a multiplication
6039 or floor division, by a power of two, so we can treat it that
6040 way unless the multiplier or divisor overflows. Signed
6041 left-shift overflow is implementation-defined rather than
6042 undefined in C90, so do not convert signed left shift into
6043 multiplication. */
6044 if (TREE_CODE (op1) == INTEGER_CST
6045 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6046 /* const_binop may not detect overflow correctly,
6047 so check for it explicitly here. */
6048 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6049 && 0 != (t1 = fold_convert (ctype,
6050 const_binop (LSHIFT_EXPR,
6051 size_one_node,
6052 op1)))
6053 && !TREE_OVERFLOW (t1))
6054 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6055 ? MULT_EXPR : FLOOR_DIV_EXPR,
6056 ctype,
6057 fold_convert (ctype, op0),
6058 t1),
6059 c, code, wide_type, strict_overflow_p);
6060 break;
6061
6062 case PLUS_EXPR: case MINUS_EXPR:
6063 /* See if we can eliminate the operation on both sides. If we can, we
6064 can return a new PLUS or MINUS. If we can't, the only remaining
6065 cases where we can do anything are if the second operand is a
6066 constant. */
6067 sub_strict_overflow_p = false;
6068 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6069 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6070 if (t1 != 0 && t2 != 0
6071 && (code == MULT_EXPR
6072 /* If not multiplication, we can only do this if both operands
6073 are divisible by c. */
6074 || (multiple_of_p (ctype, op0, c)
6075 && multiple_of_p (ctype, op1, c))))
6076 {
6077 if (sub_strict_overflow_p)
6078 *strict_overflow_p = true;
6079 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6080 fold_convert (ctype, t2));
6081 }
6082
6083 /* If this was a subtraction, negate OP1 and set it to be an addition.
6084 This simplifies the logic below. */
6085 if (tcode == MINUS_EXPR)
6086 {
6087 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6088 /* If OP1 was not easily negatable, the constant may be OP0. */
6089 if (TREE_CODE (op0) == INTEGER_CST)
6090 {
6091 tree tem = op0;
6092 op0 = op1;
6093 op1 = tem;
6094 tem = t1;
6095 t1 = t2;
6096 t2 = tem;
6097 }
6098 }
6099
6100 if (TREE_CODE (op1) != INTEGER_CST)
6101 break;
6102
6103 /* If either OP1 or C are negative, this optimization is not safe for
6104 some of the division and remainder types while for others we need
6105 to change the code. */
6106 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6107 {
6108 if (code == CEIL_DIV_EXPR)
6109 code = FLOOR_DIV_EXPR;
6110 else if (code == FLOOR_DIV_EXPR)
6111 code = CEIL_DIV_EXPR;
6112 else if (code != MULT_EXPR
6113 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6114 break;
6115 }
6116
6117 /* If it's a multiply or a division/modulus operation of a multiple
6118 of our constant, do the operation and verify it doesn't overflow. */
6119 if (code == MULT_EXPR
6120 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6121 {
6122 op1 = const_binop (code, fold_convert (ctype, op1),
6123 fold_convert (ctype, c));
6124 /* We allow the constant to overflow with wrapping semantics. */
6125 if (op1 == 0
6126 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6127 break;
6128 }
6129 else
6130 break;
6131
6132 /* If we have an unsigned type, we cannot widen the operation since it
6133 will change the result if the original computation overflowed. */
6134 if (TYPE_UNSIGNED (ctype) && ctype != type)
6135 break;
6136
6137 /* If we were able to eliminate our operation from the first side,
6138 apply our operation to the second side and reform the PLUS. */
6139 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6140 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6141
6142 /* The last case is if we are a multiply. In that case, we can
6143 apply the distributive law to commute the multiply and addition
6144 if the multiplication of the constants doesn't overflow
6145 and overflow is defined. With undefined overflow
6146 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6147 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6148 return fold_build2 (tcode, ctype,
6149 fold_build2 (code, ctype,
6150 fold_convert (ctype, op0),
6151 fold_convert (ctype, c)),
6152 op1);
6153
6154 break;
6155
6156 case MULT_EXPR:
6157 /* We have a special case here if we are doing something like
6158 (C * 8) % 4 since we know that's zero. */
6159 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6160 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6161 /* If the multiplication can overflow we cannot optimize this. */
6162 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6163 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6164 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6165 {
6166 *strict_overflow_p = true;
6167 return omit_one_operand (type, integer_zero_node, op0);
6168 }
6169
6170 /* ... fall through ... */
6171
6172 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6173 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6174 /* If we can extract our operation from the LHS, do so and return a
6175 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6176 do something only if the second operand is a constant. */
6177 if (same_p
6178 && (t1 = extract_muldiv (op0, c, code, wide_type,
6179 strict_overflow_p)) != 0)
6180 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6181 fold_convert (ctype, op1));
6182 else if (tcode == MULT_EXPR && code == MULT_EXPR
6183 && (t1 = extract_muldiv (op1, c, code, wide_type,
6184 strict_overflow_p)) != 0)
6185 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6186 fold_convert (ctype, t1));
6187 else if (TREE_CODE (op1) != INTEGER_CST)
6188 return 0;
6189
6190 /* If these are the same operation types, we can associate them
6191 assuming no overflow. */
6192 if (tcode == code)
6193 {
6194 bool overflow_p = false;
6195 bool overflow_mul_p;
6196 signop sign = TYPE_SIGN (ctype);
6197 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6198 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6199 if (overflow_mul_p
6200 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6201 overflow_p = true;
6202 if (!overflow_p)
6203 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6204 wide_int_to_tree (ctype, mul));
6205 }
6206
6207 /* If these operations "cancel" each other, we have the main
6208 optimizations of this pass, which occur when either constant is a
6209 multiple of the other, in which case we replace this with either an
6210 operation or CODE or TCODE.
6211
6212 If we have an unsigned type, we cannot do this since it will change
6213 the result if the original computation overflowed. */
6214 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6215 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6216 || (tcode == MULT_EXPR
6217 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6218 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6219 && code != MULT_EXPR)))
6220 {
6221 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6222 {
6223 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6224 *strict_overflow_p = true;
6225 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6226 fold_convert (ctype,
6227 const_binop (TRUNC_DIV_EXPR,
6228 op1, c)));
6229 }
6230 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6231 {
6232 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6233 *strict_overflow_p = true;
6234 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6235 fold_convert (ctype,
6236 const_binop (TRUNC_DIV_EXPR,
6237 c, op1)));
6238 }
6239 }
6240 break;
6241
6242 default:
6243 break;
6244 }
6245
6246 return 0;
6247 }
6248 \f
6249 /* Return a node which has the indicated constant VALUE (either 0 or
6250 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6251 and is of the indicated TYPE. */
6252
6253 tree
6254 constant_boolean_node (bool value, tree type)
6255 {
6256 if (type == integer_type_node)
6257 return value ? integer_one_node : integer_zero_node;
6258 else if (type == boolean_type_node)
6259 return value ? boolean_true_node : boolean_false_node;
6260 else if (TREE_CODE (type) == VECTOR_TYPE)
6261 return build_vector_from_val (type,
6262 build_int_cst (TREE_TYPE (type),
6263 value ? -1 : 0));
6264 else
6265 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6266 }
6267
6268
6269 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6270 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6271 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6272 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6273 COND is the first argument to CODE; otherwise (as in the example
6274 given here), it is the second argument. TYPE is the type of the
6275 original expression. Return NULL_TREE if no simplification is
6276 possible. */
6277
6278 static tree
6279 fold_binary_op_with_conditional_arg (location_t loc,
6280 enum tree_code code,
6281 tree type, tree op0, tree op1,
6282 tree cond, tree arg, int cond_first_p)
6283 {
6284 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6285 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6286 tree test, true_value, false_value;
6287 tree lhs = NULL_TREE;
6288 tree rhs = NULL_TREE;
6289 enum tree_code cond_code = COND_EXPR;
6290
6291 if (TREE_CODE (cond) == COND_EXPR
6292 || TREE_CODE (cond) == VEC_COND_EXPR)
6293 {
6294 test = TREE_OPERAND (cond, 0);
6295 true_value = TREE_OPERAND (cond, 1);
6296 false_value = TREE_OPERAND (cond, 2);
6297 /* If this operand throws an expression, then it does not make
6298 sense to try to perform a logical or arithmetic operation
6299 involving it. */
6300 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6301 lhs = true_value;
6302 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6303 rhs = false_value;
6304 }
6305 else
6306 {
6307 tree testtype = TREE_TYPE (cond);
6308 test = cond;
6309 true_value = constant_boolean_node (true, testtype);
6310 false_value = constant_boolean_node (false, testtype);
6311 }
6312
6313 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6314 cond_code = VEC_COND_EXPR;
6315
6316 /* This transformation is only worthwhile if we don't have to wrap ARG
6317 in a SAVE_EXPR and the operation can be simplified without recursing
6318 on at least one of the branches once its pushed inside the COND_EXPR. */
6319 if (!TREE_CONSTANT (arg)
6320 && (TREE_SIDE_EFFECTS (arg)
6321 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6322 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6323 return NULL_TREE;
6324
6325 arg = fold_convert_loc (loc, arg_type, arg);
6326 if (lhs == 0)
6327 {
6328 true_value = fold_convert_loc (loc, cond_type, true_value);
6329 if (cond_first_p)
6330 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6331 else
6332 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6333 }
6334 if (rhs == 0)
6335 {
6336 false_value = fold_convert_loc (loc, cond_type, false_value);
6337 if (cond_first_p)
6338 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6339 else
6340 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6341 }
6342
6343 /* Check that we have simplified at least one of the branches. */
6344 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6345 return NULL_TREE;
6346
6347 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6348 }
6349
6350 \f
6351 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6352
6353 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6354 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6355 ADDEND is the same as X.
6356
6357 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6358 and finite. The problematic cases are when X is zero, and its mode
6359 has signed zeros. In the case of rounding towards -infinity,
6360 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6361 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6362
6363 bool
6364 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6365 {
6366 if (!real_zerop (addend))
6367 return false;
6368
6369 /* Don't allow the fold with -fsignaling-nans. */
6370 if (HONOR_SNANS (element_mode (type)))
6371 return false;
6372
6373 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6374 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6375 return true;
6376
6377 /* In a vector or complex, we would need to check the sign of all zeros. */
6378 if (TREE_CODE (addend) != REAL_CST)
6379 return false;
6380
6381 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6382 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6383 negate = !negate;
6384
6385 /* The mode has signed zeros, and we have to honor their sign.
6386 In this situation, there is only one case we can return true for.
6387 X - 0 is the same as X unless rounding towards -infinity is
6388 supported. */
6389 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6390 }
6391
6392 /* Subroutine of fold() that checks comparisons of built-in math
6393 functions against real constants.
6394
6395 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6396 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6397 is the type of the result and ARG0 and ARG1 are the operands of the
6398 comparison. ARG1 must be a TREE_REAL_CST.
6399
6400 The function returns the constant folded tree if a simplification
6401 can be made, and NULL_TREE otherwise. */
6402
6403 static tree
6404 fold_mathfn_compare (location_t loc,
6405 enum built_in_function fcode, enum tree_code code,
6406 tree type, tree arg0, tree arg1)
6407 {
6408 REAL_VALUE_TYPE c;
6409
6410 if (BUILTIN_SQRT_P (fcode))
6411 {
6412 tree arg = CALL_EXPR_ARG (arg0, 0);
6413 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6414
6415 c = TREE_REAL_CST (arg1);
6416 if (REAL_VALUE_NEGATIVE (c))
6417 {
6418 /* sqrt(x) < y is always false, if y is negative. */
6419 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6420 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6421
6422 /* sqrt(x) > y is always true, if y is negative and we
6423 don't care about NaNs, i.e. negative values of x. */
6424 if (code == NE_EXPR || !HONOR_NANS (mode))
6425 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6426
6427 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6428 return fold_build2_loc (loc, GE_EXPR, type, arg,
6429 build_real (TREE_TYPE (arg), dconst0));
6430 }
6431 else if (code == GT_EXPR || code == GE_EXPR)
6432 {
6433 REAL_VALUE_TYPE c2;
6434
6435 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6436 real_convert (&c2, mode, &c2);
6437
6438 if (REAL_VALUE_ISINF (c2))
6439 {
6440 /* sqrt(x) > y is x == +Inf, when y is very large. */
6441 if (HONOR_INFINITIES (mode))
6442 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6443 build_real (TREE_TYPE (arg), c2));
6444
6445 /* sqrt(x) > y is always false, when y is very large
6446 and we don't care about infinities. */
6447 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6448 }
6449
6450 /* sqrt(x) > c is the same as x > c*c. */
6451 return fold_build2_loc (loc, code, type, arg,
6452 build_real (TREE_TYPE (arg), c2));
6453 }
6454 else if (code == LT_EXPR || code == LE_EXPR)
6455 {
6456 REAL_VALUE_TYPE c2;
6457
6458 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6459 real_convert (&c2, mode, &c2);
6460
6461 if (REAL_VALUE_ISINF (c2))
6462 {
6463 /* sqrt(x) < y is always true, when y is a very large
6464 value and we don't care about NaNs or Infinities. */
6465 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6466 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6467
6468 /* sqrt(x) < y is x != +Inf when y is very large and we
6469 don't care about NaNs. */
6470 if (! HONOR_NANS (mode))
6471 return fold_build2_loc (loc, NE_EXPR, type, arg,
6472 build_real (TREE_TYPE (arg), c2));
6473
6474 /* sqrt(x) < y is x >= 0 when y is very large and we
6475 don't care about Infinities. */
6476 if (! HONOR_INFINITIES (mode))
6477 return fold_build2_loc (loc, GE_EXPR, type, arg,
6478 build_real (TREE_TYPE (arg), dconst0));
6479
6480 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6481 arg = save_expr (arg);
6482 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6483 fold_build2_loc (loc, GE_EXPR, type, arg,
6484 build_real (TREE_TYPE (arg),
6485 dconst0)),
6486 fold_build2_loc (loc, NE_EXPR, type, arg,
6487 build_real (TREE_TYPE (arg),
6488 c2)));
6489 }
6490
6491 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6492 if (! HONOR_NANS (mode))
6493 return fold_build2_loc (loc, code, type, arg,
6494 build_real (TREE_TYPE (arg), c2));
6495
6496 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6497 arg = save_expr (arg);
6498 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6499 fold_build2_loc (loc, GE_EXPR, type, arg,
6500 build_real (TREE_TYPE (arg),
6501 dconst0)),
6502 fold_build2_loc (loc, code, type, arg,
6503 build_real (TREE_TYPE (arg),
6504 c2)));
6505 }
6506 }
6507
6508 return NULL_TREE;
6509 }
6510
6511 /* Subroutine of fold() that optimizes comparisons against Infinities,
6512 either +Inf or -Inf.
6513
6514 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6515 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6516 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6517
6518 The function returns the constant folded tree if a simplification
6519 can be made, and NULL_TREE otherwise. */
6520
6521 static tree
6522 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6523 tree arg0, tree arg1)
6524 {
6525 machine_mode mode;
6526 REAL_VALUE_TYPE max;
6527 tree temp;
6528 bool neg;
6529
6530 mode = TYPE_MODE (TREE_TYPE (arg0));
6531
6532 /* For negative infinity swap the sense of the comparison. */
6533 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6534 if (neg)
6535 code = swap_tree_comparison (code);
6536
6537 switch (code)
6538 {
6539 case GT_EXPR:
6540 /* x > +Inf is always false, if with ignore sNANs. */
6541 if (HONOR_SNANS (mode))
6542 return NULL_TREE;
6543 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6544
6545 case LE_EXPR:
6546 /* x <= +Inf is always true, if we don't case about NaNs. */
6547 if (! HONOR_NANS (mode))
6548 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6549
6550 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6551 arg0 = save_expr (arg0);
6552 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6553
6554 case EQ_EXPR:
6555 case GE_EXPR:
6556 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6557 real_maxval (&max, neg, mode);
6558 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6559 arg0, build_real (TREE_TYPE (arg0), max));
6560
6561 case LT_EXPR:
6562 /* x < +Inf is always equal to x <= DBL_MAX. */
6563 real_maxval (&max, neg, mode);
6564 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6565 arg0, build_real (TREE_TYPE (arg0), max));
6566
6567 case NE_EXPR:
6568 /* x != +Inf is always equal to !(x > DBL_MAX). */
6569 real_maxval (&max, neg, mode);
6570 if (! HONOR_NANS (mode))
6571 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6572 arg0, build_real (TREE_TYPE (arg0), max));
6573
6574 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6575 arg0, build_real (TREE_TYPE (arg0), max));
6576 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6577
6578 default:
6579 break;
6580 }
6581
6582 return NULL_TREE;
6583 }
6584
6585 /* Subroutine of fold() that optimizes comparisons of a division by
6586 a nonzero integer constant against an integer constant, i.e.
6587 X/C1 op C2.
6588
6589 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6590 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6591 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6592
6593 The function returns the constant folded tree if a simplification
6594 can be made, and NULL_TREE otherwise. */
6595
6596 static tree
6597 fold_div_compare (location_t loc,
6598 enum tree_code code, tree type, tree arg0, tree arg1)
6599 {
6600 tree prod, tmp, hi, lo;
6601 tree arg00 = TREE_OPERAND (arg0, 0);
6602 tree arg01 = TREE_OPERAND (arg0, 1);
6603 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6604 bool neg_overflow = false;
6605 bool overflow;
6606
6607 /* We have to do this the hard way to detect unsigned overflow.
6608 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6609 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6610 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6611 neg_overflow = false;
6612
6613 if (sign == UNSIGNED)
6614 {
6615 tmp = int_const_binop (MINUS_EXPR, arg01,
6616 build_int_cst (TREE_TYPE (arg01), 1));
6617 lo = prod;
6618
6619 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6620 val = wi::add (prod, tmp, sign, &overflow);
6621 hi = force_fit_type (TREE_TYPE (arg00), val,
6622 -1, overflow | TREE_OVERFLOW (prod));
6623 }
6624 else if (tree_int_cst_sgn (arg01) >= 0)
6625 {
6626 tmp = int_const_binop (MINUS_EXPR, arg01,
6627 build_int_cst (TREE_TYPE (arg01), 1));
6628 switch (tree_int_cst_sgn (arg1))
6629 {
6630 case -1:
6631 neg_overflow = true;
6632 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6633 hi = prod;
6634 break;
6635
6636 case 0:
6637 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6638 hi = tmp;
6639 break;
6640
6641 case 1:
6642 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6643 lo = prod;
6644 break;
6645
6646 default:
6647 gcc_unreachable ();
6648 }
6649 }
6650 else
6651 {
6652 /* A negative divisor reverses the relational operators. */
6653 code = swap_tree_comparison (code);
6654
6655 tmp = int_const_binop (PLUS_EXPR, arg01,
6656 build_int_cst (TREE_TYPE (arg01), 1));
6657 switch (tree_int_cst_sgn (arg1))
6658 {
6659 case -1:
6660 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6661 lo = prod;
6662 break;
6663
6664 case 0:
6665 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6666 lo = tmp;
6667 break;
6668
6669 case 1:
6670 neg_overflow = true;
6671 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6672 hi = prod;
6673 break;
6674
6675 default:
6676 gcc_unreachable ();
6677 }
6678 }
6679
6680 switch (code)
6681 {
6682 case EQ_EXPR:
6683 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6684 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6685 if (TREE_OVERFLOW (hi))
6686 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6687 if (TREE_OVERFLOW (lo))
6688 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6689 return build_range_check (loc, type, arg00, 1, lo, hi);
6690
6691 case NE_EXPR:
6692 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6693 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6694 if (TREE_OVERFLOW (hi))
6695 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6696 if (TREE_OVERFLOW (lo))
6697 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6698 return build_range_check (loc, type, arg00, 0, lo, hi);
6699
6700 case LT_EXPR:
6701 if (TREE_OVERFLOW (lo))
6702 {
6703 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6704 return omit_one_operand_loc (loc, type, tmp, arg00);
6705 }
6706 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6707
6708 case LE_EXPR:
6709 if (TREE_OVERFLOW (hi))
6710 {
6711 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6712 return omit_one_operand_loc (loc, type, tmp, arg00);
6713 }
6714 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6715
6716 case GT_EXPR:
6717 if (TREE_OVERFLOW (hi))
6718 {
6719 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6720 return omit_one_operand_loc (loc, type, tmp, arg00);
6721 }
6722 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6723
6724 case GE_EXPR:
6725 if (TREE_OVERFLOW (lo))
6726 {
6727 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6728 return omit_one_operand_loc (loc, type, tmp, arg00);
6729 }
6730 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6731
6732 default:
6733 break;
6734 }
6735
6736 return NULL_TREE;
6737 }
6738
6739
6740 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6741 equality/inequality test, then return a simplified form of the test
6742 using a sign testing. Otherwise return NULL. TYPE is the desired
6743 result type. */
6744
6745 static tree
6746 fold_single_bit_test_into_sign_test (location_t loc,
6747 enum tree_code code, tree arg0, tree arg1,
6748 tree result_type)
6749 {
6750 /* If this is testing a single bit, we can optimize the test. */
6751 if ((code == NE_EXPR || code == EQ_EXPR)
6752 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6753 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6754 {
6755 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6756 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6757 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6758
6759 if (arg00 != NULL_TREE
6760 /* This is only a win if casting to a signed type is cheap,
6761 i.e. when arg00's type is not a partial mode. */
6762 && TYPE_PRECISION (TREE_TYPE (arg00))
6763 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6764 {
6765 tree stype = signed_type_for (TREE_TYPE (arg00));
6766 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6767 result_type,
6768 fold_convert_loc (loc, stype, arg00),
6769 build_int_cst (stype, 0));
6770 }
6771 }
6772
6773 return NULL_TREE;
6774 }
6775
6776 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6777 equality/inequality test, then return a simplified form of
6778 the test using shifts and logical operations. Otherwise return
6779 NULL. TYPE is the desired result type. */
6780
6781 tree
6782 fold_single_bit_test (location_t loc, enum tree_code code,
6783 tree arg0, tree arg1, tree result_type)
6784 {
6785 /* If this is testing a single bit, we can optimize the test. */
6786 if ((code == NE_EXPR || code == EQ_EXPR)
6787 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6788 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6789 {
6790 tree inner = TREE_OPERAND (arg0, 0);
6791 tree type = TREE_TYPE (arg0);
6792 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6793 machine_mode operand_mode = TYPE_MODE (type);
6794 int ops_unsigned;
6795 tree signed_type, unsigned_type, intermediate_type;
6796 tree tem, one;
6797
6798 /* First, see if we can fold the single bit test into a sign-bit
6799 test. */
6800 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6801 result_type);
6802 if (tem)
6803 return tem;
6804
6805 /* Otherwise we have (A & C) != 0 where C is a single bit,
6806 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6807 Similarly for (A & C) == 0. */
6808
6809 /* If INNER is a right shift of a constant and it plus BITNUM does
6810 not overflow, adjust BITNUM and INNER. */
6811 if (TREE_CODE (inner) == RSHIFT_EXPR
6812 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6813 && bitnum < TYPE_PRECISION (type)
6814 && wi::ltu_p (TREE_OPERAND (inner, 1),
6815 TYPE_PRECISION (type) - bitnum))
6816 {
6817 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6818 inner = TREE_OPERAND (inner, 0);
6819 }
6820
6821 /* If we are going to be able to omit the AND below, we must do our
6822 operations as unsigned. If we must use the AND, we have a choice.
6823 Normally unsigned is faster, but for some machines signed is. */
6824 #ifdef LOAD_EXTEND_OP
6825 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6826 && !flag_syntax_only) ? 0 : 1;
6827 #else
6828 ops_unsigned = 1;
6829 #endif
6830
6831 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6832 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6833 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6834 inner = fold_convert_loc (loc, intermediate_type, inner);
6835
6836 if (bitnum != 0)
6837 inner = build2 (RSHIFT_EXPR, intermediate_type,
6838 inner, size_int (bitnum));
6839
6840 one = build_int_cst (intermediate_type, 1);
6841
6842 if (code == EQ_EXPR)
6843 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6844
6845 /* Put the AND last so it can combine with more things. */
6846 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6847
6848 /* Make sure to return the proper type. */
6849 inner = fold_convert_loc (loc, result_type, inner);
6850
6851 return inner;
6852 }
6853 return NULL_TREE;
6854 }
6855
6856 /* Check whether we are allowed to reorder operands arg0 and arg1,
6857 such that the evaluation of arg1 occurs before arg0. */
6858
6859 static bool
6860 reorder_operands_p (const_tree arg0, const_tree arg1)
6861 {
6862 if (! flag_evaluation_order)
6863 return true;
6864 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6865 return true;
6866 return ! TREE_SIDE_EFFECTS (arg0)
6867 && ! TREE_SIDE_EFFECTS (arg1);
6868 }
6869
6870 /* Test whether it is preferable two swap two operands, ARG0 and
6871 ARG1, for example because ARG0 is an integer constant and ARG1
6872 isn't. If REORDER is true, only recommend swapping if we can
6873 evaluate the operands in reverse order. */
6874
6875 bool
6876 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6877 {
6878 if (CONSTANT_CLASS_P (arg1))
6879 return 0;
6880 if (CONSTANT_CLASS_P (arg0))
6881 return 1;
6882
6883 STRIP_NOPS (arg0);
6884 STRIP_NOPS (arg1);
6885
6886 if (TREE_CONSTANT (arg1))
6887 return 0;
6888 if (TREE_CONSTANT (arg0))
6889 return 1;
6890
6891 if (reorder && flag_evaluation_order
6892 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6893 return 0;
6894
6895 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6896 for commutative and comparison operators. Ensuring a canonical
6897 form allows the optimizers to find additional redundancies without
6898 having to explicitly check for both orderings. */
6899 if (TREE_CODE (arg0) == SSA_NAME
6900 && TREE_CODE (arg1) == SSA_NAME
6901 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6902 return 1;
6903
6904 /* Put SSA_NAMEs last. */
6905 if (TREE_CODE (arg1) == SSA_NAME)
6906 return 0;
6907 if (TREE_CODE (arg0) == SSA_NAME)
6908 return 1;
6909
6910 /* Put variables last. */
6911 if (DECL_P (arg1))
6912 return 0;
6913 if (DECL_P (arg0))
6914 return 1;
6915
6916 return 0;
6917 }
6918
6919 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6920 ARG0 is extended to a wider type. */
6921
6922 static tree
6923 fold_widened_comparison (location_t loc, enum tree_code code,
6924 tree type, tree arg0, tree arg1)
6925 {
6926 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6927 tree arg1_unw;
6928 tree shorter_type, outer_type;
6929 tree min, max;
6930 bool above, below;
6931
6932 if (arg0_unw == arg0)
6933 return NULL_TREE;
6934 shorter_type = TREE_TYPE (arg0_unw);
6935
6936 #ifdef HAVE_canonicalize_funcptr_for_compare
6937 /* Disable this optimization if we're casting a function pointer
6938 type on targets that require function pointer canonicalization. */
6939 if (HAVE_canonicalize_funcptr_for_compare
6940 && TREE_CODE (shorter_type) == POINTER_TYPE
6941 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6942 return NULL_TREE;
6943 #endif
6944
6945 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6946 return NULL_TREE;
6947
6948 arg1_unw = get_unwidened (arg1, NULL_TREE);
6949
6950 /* If possible, express the comparison in the shorter mode. */
6951 if ((code == EQ_EXPR || code == NE_EXPR
6952 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6953 && (TREE_TYPE (arg1_unw) == shorter_type
6954 || ((TYPE_PRECISION (shorter_type)
6955 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6956 && (TYPE_UNSIGNED (shorter_type)
6957 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6958 || (TREE_CODE (arg1_unw) == INTEGER_CST
6959 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6960 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6961 && int_fits_type_p (arg1_unw, shorter_type))))
6962 return fold_build2_loc (loc, code, type, arg0_unw,
6963 fold_convert_loc (loc, shorter_type, arg1_unw));
6964
6965 if (TREE_CODE (arg1_unw) != INTEGER_CST
6966 || TREE_CODE (shorter_type) != INTEGER_TYPE
6967 || !int_fits_type_p (arg1_unw, shorter_type))
6968 return NULL_TREE;
6969
6970 /* If we are comparing with the integer that does not fit into the range
6971 of the shorter type, the result is known. */
6972 outer_type = TREE_TYPE (arg1_unw);
6973 min = lower_bound_in_type (outer_type, shorter_type);
6974 max = upper_bound_in_type (outer_type, shorter_type);
6975
6976 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6977 max, arg1_unw));
6978 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6979 arg1_unw, min));
6980
6981 switch (code)
6982 {
6983 case EQ_EXPR:
6984 if (above || below)
6985 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6986 break;
6987
6988 case NE_EXPR:
6989 if (above || below)
6990 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6991 break;
6992
6993 case LT_EXPR:
6994 case LE_EXPR:
6995 if (above)
6996 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6997 else if (below)
6998 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6999
7000 case GT_EXPR:
7001 case GE_EXPR:
7002 if (above)
7003 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7004 else if (below)
7005 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7006
7007 default:
7008 break;
7009 }
7010
7011 return NULL_TREE;
7012 }
7013
7014 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7015 ARG0 just the signedness is changed. */
7016
7017 static tree
7018 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7019 tree arg0, tree arg1)
7020 {
7021 tree arg0_inner;
7022 tree inner_type, outer_type;
7023
7024 if (!CONVERT_EXPR_P (arg0))
7025 return NULL_TREE;
7026
7027 outer_type = TREE_TYPE (arg0);
7028 arg0_inner = TREE_OPERAND (arg0, 0);
7029 inner_type = TREE_TYPE (arg0_inner);
7030
7031 #ifdef HAVE_canonicalize_funcptr_for_compare
7032 /* Disable this optimization if we're casting a function pointer
7033 type on targets that require function pointer canonicalization. */
7034 if (HAVE_canonicalize_funcptr_for_compare
7035 && TREE_CODE (inner_type) == POINTER_TYPE
7036 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7037 return NULL_TREE;
7038 #endif
7039
7040 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7041 return NULL_TREE;
7042
7043 if (TREE_CODE (arg1) != INTEGER_CST
7044 && !(CONVERT_EXPR_P (arg1)
7045 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7046 return NULL_TREE;
7047
7048 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7049 && code != NE_EXPR
7050 && code != EQ_EXPR)
7051 return NULL_TREE;
7052
7053 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7054 return NULL_TREE;
7055
7056 if (TREE_CODE (arg1) == INTEGER_CST)
7057 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7058 TREE_OVERFLOW (arg1));
7059 else
7060 arg1 = fold_convert_loc (loc, inner_type, arg1);
7061
7062 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7063 }
7064
7065
7066 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7067 means A >= Y && A != MAX, but in this case we know that
7068 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7069
7070 static tree
7071 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7072 {
7073 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7074
7075 if (TREE_CODE (bound) == LT_EXPR)
7076 a = TREE_OPERAND (bound, 0);
7077 else if (TREE_CODE (bound) == GT_EXPR)
7078 a = TREE_OPERAND (bound, 1);
7079 else
7080 return NULL_TREE;
7081
7082 typea = TREE_TYPE (a);
7083 if (!INTEGRAL_TYPE_P (typea)
7084 && !POINTER_TYPE_P (typea))
7085 return NULL_TREE;
7086
7087 if (TREE_CODE (ineq) == LT_EXPR)
7088 {
7089 a1 = TREE_OPERAND (ineq, 1);
7090 y = TREE_OPERAND (ineq, 0);
7091 }
7092 else if (TREE_CODE (ineq) == GT_EXPR)
7093 {
7094 a1 = TREE_OPERAND (ineq, 0);
7095 y = TREE_OPERAND (ineq, 1);
7096 }
7097 else
7098 return NULL_TREE;
7099
7100 if (TREE_TYPE (a1) != typea)
7101 return NULL_TREE;
7102
7103 if (POINTER_TYPE_P (typea))
7104 {
7105 /* Convert the pointer types into integer before taking the difference. */
7106 tree ta = fold_convert_loc (loc, ssizetype, a);
7107 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7108 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7109 }
7110 else
7111 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7112
7113 if (!diff || !integer_onep (diff))
7114 return NULL_TREE;
7115
7116 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7117 }
7118
7119 /* Fold a sum or difference of at least one multiplication.
7120 Returns the folded tree or NULL if no simplification could be made. */
7121
7122 static tree
7123 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7124 tree arg0, tree arg1)
7125 {
7126 tree arg00, arg01, arg10, arg11;
7127 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7128
7129 /* (A * C) +- (B * C) -> (A+-B) * C.
7130 (A * C) +- A -> A * (C+-1).
7131 We are most concerned about the case where C is a constant,
7132 but other combinations show up during loop reduction. Since
7133 it is not difficult, try all four possibilities. */
7134
7135 if (TREE_CODE (arg0) == MULT_EXPR)
7136 {
7137 arg00 = TREE_OPERAND (arg0, 0);
7138 arg01 = TREE_OPERAND (arg0, 1);
7139 }
7140 else if (TREE_CODE (arg0) == INTEGER_CST)
7141 {
7142 arg00 = build_one_cst (type);
7143 arg01 = arg0;
7144 }
7145 else
7146 {
7147 /* We cannot generate constant 1 for fract. */
7148 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7149 return NULL_TREE;
7150 arg00 = arg0;
7151 arg01 = build_one_cst (type);
7152 }
7153 if (TREE_CODE (arg1) == MULT_EXPR)
7154 {
7155 arg10 = TREE_OPERAND (arg1, 0);
7156 arg11 = TREE_OPERAND (arg1, 1);
7157 }
7158 else if (TREE_CODE (arg1) == INTEGER_CST)
7159 {
7160 arg10 = build_one_cst (type);
7161 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7162 the purpose of this canonicalization. */
7163 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7164 && negate_expr_p (arg1)
7165 && code == PLUS_EXPR)
7166 {
7167 arg11 = negate_expr (arg1);
7168 code = MINUS_EXPR;
7169 }
7170 else
7171 arg11 = arg1;
7172 }
7173 else
7174 {
7175 /* We cannot generate constant 1 for fract. */
7176 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7177 return NULL_TREE;
7178 arg10 = arg1;
7179 arg11 = build_one_cst (type);
7180 }
7181 same = NULL_TREE;
7182
7183 if (operand_equal_p (arg01, arg11, 0))
7184 same = arg01, alt0 = arg00, alt1 = arg10;
7185 else if (operand_equal_p (arg00, arg10, 0))
7186 same = arg00, alt0 = arg01, alt1 = arg11;
7187 else if (operand_equal_p (arg00, arg11, 0))
7188 same = arg00, alt0 = arg01, alt1 = arg10;
7189 else if (operand_equal_p (arg01, arg10, 0))
7190 same = arg01, alt0 = arg00, alt1 = arg11;
7191
7192 /* No identical multiplicands; see if we can find a common
7193 power-of-two factor in non-power-of-two multiplies. This
7194 can help in multi-dimensional array access. */
7195 else if (tree_fits_shwi_p (arg01)
7196 && tree_fits_shwi_p (arg11))
7197 {
7198 HOST_WIDE_INT int01, int11, tmp;
7199 bool swap = false;
7200 tree maybe_same;
7201 int01 = tree_to_shwi (arg01);
7202 int11 = tree_to_shwi (arg11);
7203
7204 /* Move min of absolute values to int11. */
7205 if (absu_hwi (int01) < absu_hwi (int11))
7206 {
7207 tmp = int01, int01 = int11, int11 = tmp;
7208 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7209 maybe_same = arg01;
7210 swap = true;
7211 }
7212 else
7213 maybe_same = arg11;
7214
7215 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7216 /* The remainder should not be a constant, otherwise we
7217 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7218 increased the number of multiplications necessary. */
7219 && TREE_CODE (arg10) != INTEGER_CST)
7220 {
7221 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7222 build_int_cst (TREE_TYPE (arg00),
7223 int01 / int11));
7224 alt1 = arg10;
7225 same = maybe_same;
7226 if (swap)
7227 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7228 }
7229 }
7230
7231 if (same)
7232 return fold_build2_loc (loc, MULT_EXPR, type,
7233 fold_build2_loc (loc, code, type,
7234 fold_convert_loc (loc, type, alt0),
7235 fold_convert_loc (loc, type, alt1)),
7236 fold_convert_loc (loc, type, same));
7237
7238 return NULL_TREE;
7239 }
7240
7241 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7244 upon failure. */
7245
7246 static int
7247 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7248 {
7249 tree type = TREE_TYPE (expr);
7250 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7251 int byte, offset, word, words;
7252 unsigned char value;
7253
7254 if ((off == -1 && total_bytes > len)
7255 || off >= total_bytes)
7256 return 0;
7257 if (off == -1)
7258 off = 0;
7259 words = total_bytes / UNITS_PER_WORD;
7260
7261 for (byte = 0; byte < total_bytes; byte++)
7262 {
7263 int bitpos = byte * BITS_PER_UNIT;
7264 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7265 number of bytes. */
7266 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7267
7268 if (total_bytes > UNITS_PER_WORD)
7269 {
7270 word = byte / UNITS_PER_WORD;
7271 if (WORDS_BIG_ENDIAN)
7272 word = (words - 1) - word;
7273 offset = word * UNITS_PER_WORD;
7274 if (BYTES_BIG_ENDIAN)
7275 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7276 else
7277 offset += byte % UNITS_PER_WORD;
7278 }
7279 else
7280 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7281 if (offset >= off
7282 && offset - off < len)
7283 ptr[offset - off] = value;
7284 }
7285 return MIN (len, total_bytes - off);
7286 }
7287
7288
7289 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7290 specified by EXPR into the buffer PTR of length LEN bytes.
7291 Return the number of bytes placed in the buffer, or zero
7292 upon failure. */
7293
7294 static int
7295 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7296 {
7297 tree type = TREE_TYPE (expr);
7298 machine_mode mode = TYPE_MODE (type);
7299 int total_bytes = GET_MODE_SIZE (mode);
7300 FIXED_VALUE_TYPE value;
7301 tree i_value, i_type;
7302
7303 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7304 return 0;
7305
7306 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7307
7308 if (NULL_TREE == i_type
7309 || TYPE_PRECISION (i_type) != total_bytes)
7310 return 0;
7311
7312 value = TREE_FIXED_CST (expr);
7313 i_value = double_int_to_tree (i_type, value.data);
7314
7315 return native_encode_int (i_value, ptr, len, off);
7316 }
7317
7318
7319 /* Subroutine of native_encode_expr. Encode the REAL_CST
7320 specified by EXPR into the buffer PTR of length LEN bytes.
7321 Return the number of bytes placed in the buffer, or zero
7322 upon failure. */
7323
7324 static int
7325 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7326 {
7327 tree type = TREE_TYPE (expr);
7328 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7329 int byte, offset, word, words, bitpos;
7330 unsigned char value;
7331
7332 /* There are always 32 bits in each long, no matter the size of
7333 the hosts long. We handle floating point representations with
7334 up to 192 bits. */
7335 long tmp[6];
7336
7337 if ((off == -1 && total_bytes > len)
7338 || off >= total_bytes)
7339 return 0;
7340 if (off == -1)
7341 off = 0;
7342 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7343
7344 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7345
7346 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347 bitpos += BITS_PER_UNIT)
7348 {
7349 byte = (bitpos / BITS_PER_UNIT) & 3;
7350 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7351
7352 if (UNITS_PER_WORD < 4)
7353 {
7354 word = byte / UNITS_PER_WORD;
7355 if (WORDS_BIG_ENDIAN)
7356 word = (words - 1) - word;
7357 offset = word * UNITS_PER_WORD;
7358 if (BYTES_BIG_ENDIAN)
7359 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7360 else
7361 offset += byte % UNITS_PER_WORD;
7362 }
7363 else
7364 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7365 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7366 if (offset >= off
7367 && offset - off < len)
7368 ptr[offset - off] = value;
7369 }
7370 return MIN (len, total_bytes - off);
7371 }
7372
7373 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7374 specified by EXPR into the buffer PTR of length LEN bytes.
7375 Return the number of bytes placed in the buffer, or zero
7376 upon failure. */
7377
7378 static int
7379 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7380 {
7381 int rsize, isize;
7382 tree part;
7383
7384 part = TREE_REALPART (expr);
7385 rsize = native_encode_expr (part, ptr, len, off);
7386 if (off == -1
7387 && rsize == 0)
7388 return 0;
7389 part = TREE_IMAGPART (expr);
7390 if (off != -1)
7391 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7392 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7393 if (off == -1
7394 && isize != rsize)
7395 return 0;
7396 return rsize + isize;
7397 }
7398
7399
7400 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7401 specified by EXPR into the buffer PTR of length LEN bytes.
7402 Return the number of bytes placed in the buffer, or zero
7403 upon failure. */
7404
7405 static int
7406 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7407 {
7408 unsigned i, count;
7409 int size, offset;
7410 tree itype, elem;
7411
7412 offset = 0;
7413 count = VECTOR_CST_NELTS (expr);
7414 itype = TREE_TYPE (TREE_TYPE (expr));
7415 size = GET_MODE_SIZE (TYPE_MODE (itype));
7416 for (i = 0; i < count; i++)
7417 {
7418 if (off >= size)
7419 {
7420 off -= size;
7421 continue;
7422 }
7423 elem = VECTOR_CST_ELT (expr, i);
7424 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7425 if ((off == -1 && res != size)
7426 || res == 0)
7427 return 0;
7428 offset += res;
7429 if (offset >= len)
7430 return offset;
7431 if (off != -1)
7432 off = 0;
7433 }
7434 return offset;
7435 }
7436
7437
7438 /* Subroutine of native_encode_expr. Encode the STRING_CST
7439 specified by EXPR into the buffer PTR of length LEN bytes.
7440 Return the number of bytes placed in the buffer, or zero
7441 upon failure. */
7442
7443 static int
7444 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7445 {
7446 tree type = TREE_TYPE (expr);
7447 HOST_WIDE_INT total_bytes;
7448
7449 if (TREE_CODE (type) != ARRAY_TYPE
7450 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7451 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7452 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7453 return 0;
7454 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7455 if ((off == -1 && total_bytes > len)
7456 || off >= total_bytes)
7457 return 0;
7458 if (off == -1)
7459 off = 0;
7460 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7461 {
7462 int written = 0;
7463 if (off < TREE_STRING_LENGTH (expr))
7464 {
7465 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7466 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7467 }
7468 memset (ptr + written, 0,
7469 MIN (total_bytes - written, len - written));
7470 }
7471 else
7472 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7473 return MIN (total_bytes - off, len);
7474 }
7475
7476
7477 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7478 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7479 buffer PTR of length LEN bytes. If OFF is not -1 then start
7480 the encoding at byte offset OFF and encode at most LEN bytes.
7481 Return the number of bytes placed in the buffer, or zero upon failure. */
7482
7483 int
7484 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7485 {
7486 switch (TREE_CODE (expr))
7487 {
7488 case INTEGER_CST:
7489 return native_encode_int (expr, ptr, len, off);
7490
7491 case REAL_CST:
7492 return native_encode_real (expr, ptr, len, off);
7493
7494 case FIXED_CST:
7495 return native_encode_fixed (expr, ptr, len, off);
7496
7497 case COMPLEX_CST:
7498 return native_encode_complex (expr, ptr, len, off);
7499
7500 case VECTOR_CST:
7501 return native_encode_vector (expr, ptr, len, off);
7502
7503 case STRING_CST:
7504 return native_encode_string (expr, ptr, len, off);
7505
7506 default:
7507 return 0;
7508 }
7509 }
7510
7511
7512 /* Subroutine of native_interpret_expr. Interpret the contents of
7513 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7514 If the buffer cannot be interpreted, return NULL_TREE. */
7515
7516 static tree
7517 native_interpret_int (tree type, const unsigned char *ptr, int len)
7518 {
7519 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7520
7521 if (total_bytes > len
7522 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7523 return NULL_TREE;
7524
7525 wide_int result = wi::from_buffer (ptr, total_bytes);
7526
7527 return wide_int_to_tree (type, result);
7528 }
7529
7530
7531 /* Subroutine of native_interpret_expr. Interpret the contents of
7532 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7533 If the buffer cannot be interpreted, return NULL_TREE. */
7534
7535 static tree
7536 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7537 {
7538 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7539 double_int result;
7540 FIXED_VALUE_TYPE fixed_value;
7541
7542 if (total_bytes > len
7543 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7544 return NULL_TREE;
7545
7546 result = double_int::from_buffer (ptr, total_bytes);
7547 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7548
7549 return build_fixed (type, fixed_value);
7550 }
7551
7552
7553 /* Subroutine of native_interpret_expr. Interpret the contents of
7554 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7555 If the buffer cannot be interpreted, return NULL_TREE. */
7556
7557 static tree
7558 native_interpret_real (tree type, const unsigned char *ptr, int len)
7559 {
7560 machine_mode mode = TYPE_MODE (type);
7561 int total_bytes = GET_MODE_SIZE (mode);
7562 int byte, offset, word, words, bitpos;
7563 unsigned char value;
7564 /* There are always 32 bits in each long, no matter the size of
7565 the hosts long. We handle floating point representations with
7566 up to 192 bits. */
7567 REAL_VALUE_TYPE r;
7568 long tmp[6];
7569
7570 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7571 if (total_bytes > len || total_bytes > 24)
7572 return NULL_TREE;
7573 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7574
7575 memset (tmp, 0, sizeof (tmp));
7576 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7577 bitpos += BITS_PER_UNIT)
7578 {
7579 byte = (bitpos / BITS_PER_UNIT) & 3;
7580 if (UNITS_PER_WORD < 4)
7581 {
7582 word = byte / UNITS_PER_WORD;
7583 if (WORDS_BIG_ENDIAN)
7584 word = (words - 1) - word;
7585 offset = word * UNITS_PER_WORD;
7586 if (BYTES_BIG_ENDIAN)
7587 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7588 else
7589 offset += byte % UNITS_PER_WORD;
7590 }
7591 else
7592 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7593 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7594
7595 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7596 }
7597
7598 real_from_target (&r, tmp, mode);
7599 return build_real (type, r);
7600 }
7601
7602
7603 /* Subroutine of native_interpret_expr. Interpret the contents of
7604 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7605 If the buffer cannot be interpreted, return NULL_TREE. */
7606
7607 static tree
7608 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7609 {
7610 tree etype, rpart, ipart;
7611 int size;
7612
7613 etype = TREE_TYPE (type);
7614 size = GET_MODE_SIZE (TYPE_MODE (etype));
7615 if (size * 2 > len)
7616 return NULL_TREE;
7617 rpart = native_interpret_expr (etype, ptr, size);
7618 if (!rpart)
7619 return NULL_TREE;
7620 ipart = native_interpret_expr (etype, ptr+size, size);
7621 if (!ipart)
7622 return NULL_TREE;
7623 return build_complex (type, rpart, ipart);
7624 }
7625
7626
7627 /* Subroutine of native_interpret_expr. Interpret the contents of
7628 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7629 If the buffer cannot be interpreted, return NULL_TREE. */
7630
7631 static tree
7632 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7633 {
7634 tree etype, elem;
7635 int i, size, count;
7636 tree *elements;
7637
7638 etype = TREE_TYPE (type);
7639 size = GET_MODE_SIZE (TYPE_MODE (etype));
7640 count = TYPE_VECTOR_SUBPARTS (type);
7641 if (size * count > len)
7642 return NULL_TREE;
7643
7644 elements = XALLOCAVEC (tree, count);
7645 for (i = count - 1; i >= 0; i--)
7646 {
7647 elem = native_interpret_expr (etype, ptr+(i*size), size);
7648 if (!elem)
7649 return NULL_TREE;
7650 elements[i] = elem;
7651 }
7652 return build_vector (type, elements);
7653 }
7654
7655
7656 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7657 the buffer PTR of length LEN as a constant of type TYPE. For
7658 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7659 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7660 return NULL_TREE. */
7661
7662 tree
7663 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7664 {
7665 switch (TREE_CODE (type))
7666 {
7667 case INTEGER_TYPE:
7668 case ENUMERAL_TYPE:
7669 case BOOLEAN_TYPE:
7670 case POINTER_TYPE:
7671 case REFERENCE_TYPE:
7672 return native_interpret_int (type, ptr, len);
7673
7674 case REAL_TYPE:
7675 return native_interpret_real (type, ptr, len);
7676
7677 case FIXED_POINT_TYPE:
7678 return native_interpret_fixed (type, ptr, len);
7679
7680 case COMPLEX_TYPE:
7681 return native_interpret_complex (type, ptr, len);
7682
7683 case VECTOR_TYPE:
7684 return native_interpret_vector (type, ptr, len);
7685
7686 default:
7687 return NULL_TREE;
7688 }
7689 }
7690
7691 /* Returns true if we can interpret the contents of a native encoding
7692 as TYPE. */
7693
7694 static bool
7695 can_native_interpret_type_p (tree type)
7696 {
7697 switch (TREE_CODE (type))
7698 {
7699 case INTEGER_TYPE:
7700 case ENUMERAL_TYPE:
7701 case BOOLEAN_TYPE:
7702 case POINTER_TYPE:
7703 case REFERENCE_TYPE:
7704 case FIXED_POINT_TYPE:
7705 case REAL_TYPE:
7706 case COMPLEX_TYPE:
7707 case VECTOR_TYPE:
7708 return true;
7709 default:
7710 return false;
7711 }
7712 }
7713
7714 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7715 TYPE at compile-time. If we're unable to perform the conversion
7716 return NULL_TREE. */
7717
7718 static tree
7719 fold_view_convert_expr (tree type, tree expr)
7720 {
7721 /* We support up to 512-bit values (for V8DFmode). */
7722 unsigned char buffer[64];
7723 int len;
7724
7725 /* Check that the host and target are sane. */
7726 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7727 return NULL_TREE;
7728
7729 len = native_encode_expr (expr, buffer, sizeof (buffer));
7730 if (len == 0)
7731 return NULL_TREE;
7732
7733 return native_interpret_expr (type, buffer, len);
7734 }
7735
7736 /* Build an expression for the address of T. Folds away INDIRECT_REF
7737 to avoid confusing the gimplify process. */
7738
7739 tree
7740 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7741 {
7742 /* The size of the object is not relevant when talking about its address. */
7743 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7744 t = TREE_OPERAND (t, 0);
7745
7746 if (TREE_CODE (t) == INDIRECT_REF)
7747 {
7748 t = TREE_OPERAND (t, 0);
7749
7750 if (TREE_TYPE (t) != ptrtype)
7751 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7752 }
7753 else if (TREE_CODE (t) == MEM_REF
7754 && integer_zerop (TREE_OPERAND (t, 1)))
7755 return TREE_OPERAND (t, 0);
7756 else if (TREE_CODE (t) == MEM_REF
7757 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7758 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7759 TREE_OPERAND (t, 0),
7760 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7761 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7762 {
7763 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7764
7765 if (TREE_TYPE (t) != ptrtype)
7766 t = fold_convert_loc (loc, ptrtype, t);
7767 }
7768 else
7769 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7770
7771 return t;
7772 }
7773
7774 /* Build an expression for the address of T. */
7775
7776 tree
7777 build_fold_addr_expr_loc (location_t loc, tree t)
7778 {
7779 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7780
7781 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7782 }
7783
7784 /* Fold a unary expression of code CODE and type TYPE with operand
7785 OP0. Return the folded expression if folding is successful.
7786 Otherwise, return NULL_TREE. */
7787
7788 tree
7789 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7790 {
7791 tree tem;
7792 tree arg0;
7793 enum tree_code_class kind = TREE_CODE_CLASS (code);
7794
7795 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7796 && TREE_CODE_LENGTH (code) == 1);
7797
7798 arg0 = op0;
7799 if (arg0)
7800 {
7801 if (CONVERT_EXPR_CODE_P (code)
7802 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7803 {
7804 /* Don't use STRIP_NOPS, because signedness of argument type
7805 matters. */
7806 STRIP_SIGN_NOPS (arg0);
7807 }
7808 else
7809 {
7810 /* Strip any conversions that don't change the mode. This
7811 is safe for every expression, except for a comparison
7812 expression because its signedness is derived from its
7813 operands.
7814
7815 Note that this is done as an internal manipulation within
7816 the constant folder, in order to find the simplest
7817 representation of the arguments so that their form can be
7818 studied. In any cases, the appropriate type conversions
7819 should be put back in the tree that will get out of the
7820 constant folder. */
7821 STRIP_NOPS (arg0);
7822 }
7823
7824 if (CONSTANT_CLASS_P (arg0))
7825 {
7826 tree tem = const_unop (code, type, arg0);
7827 if (tem)
7828 {
7829 if (TREE_TYPE (tem) != type)
7830 tem = fold_convert_loc (loc, type, tem);
7831 return tem;
7832 }
7833 }
7834 }
7835
7836 tem = generic_simplify (loc, code, type, op0);
7837 if (tem)
7838 return tem;
7839
7840 if (TREE_CODE_CLASS (code) == tcc_unary)
7841 {
7842 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7843 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7844 fold_build1_loc (loc, code, type,
7845 fold_convert_loc (loc, TREE_TYPE (op0),
7846 TREE_OPERAND (arg0, 1))));
7847 else if (TREE_CODE (arg0) == COND_EXPR)
7848 {
7849 tree arg01 = TREE_OPERAND (arg0, 1);
7850 tree arg02 = TREE_OPERAND (arg0, 2);
7851 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7852 arg01 = fold_build1_loc (loc, code, type,
7853 fold_convert_loc (loc,
7854 TREE_TYPE (op0), arg01));
7855 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7856 arg02 = fold_build1_loc (loc, code, type,
7857 fold_convert_loc (loc,
7858 TREE_TYPE (op0), arg02));
7859 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7860 arg01, arg02);
7861
7862 /* If this was a conversion, and all we did was to move into
7863 inside the COND_EXPR, bring it back out. But leave it if
7864 it is a conversion from integer to integer and the
7865 result precision is no wider than a word since such a
7866 conversion is cheap and may be optimized away by combine,
7867 while it couldn't if it were outside the COND_EXPR. Then return
7868 so we don't get into an infinite recursion loop taking the
7869 conversion out and then back in. */
7870
7871 if ((CONVERT_EXPR_CODE_P (code)
7872 || code == NON_LVALUE_EXPR)
7873 && TREE_CODE (tem) == COND_EXPR
7874 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7875 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7876 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7877 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7878 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7879 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7880 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7881 && (INTEGRAL_TYPE_P
7882 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7883 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7884 || flag_syntax_only))
7885 tem = build1_loc (loc, code, type,
7886 build3 (COND_EXPR,
7887 TREE_TYPE (TREE_OPERAND
7888 (TREE_OPERAND (tem, 1), 0)),
7889 TREE_OPERAND (tem, 0),
7890 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7891 TREE_OPERAND (TREE_OPERAND (tem, 2),
7892 0)));
7893 return tem;
7894 }
7895 }
7896
7897 switch (code)
7898 {
7899 case NON_LVALUE_EXPR:
7900 if (!maybe_lvalue_p (op0))
7901 return fold_convert_loc (loc, type, op0);
7902 return NULL_TREE;
7903
7904 CASE_CONVERT:
7905 case FLOAT_EXPR:
7906 case FIX_TRUNC_EXPR:
7907 if (COMPARISON_CLASS_P (op0))
7908 {
7909 /* If we have (type) (a CMP b) and type is an integral type, return
7910 new expression involving the new type. Canonicalize
7911 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7912 non-integral type.
7913 Do not fold the result as that would not simplify further, also
7914 folding again results in recursions. */
7915 if (TREE_CODE (type) == BOOLEAN_TYPE)
7916 return build2_loc (loc, TREE_CODE (op0), type,
7917 TREE_OPERAND (op0, 0),
7918 TREE_OPERAND (op0, 1));
7919 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7920 && TREE_CODE (type) != VECTOR_TYPE)
7921 return build3_loc (loc, COND_EXPR, type, op0,
7922 constant_boolean_node (true, type),
7923 constant_boolean_node (false, type));
7924 }
7925
7926 /* Handle (T *)&A.B.C for A being of type T and B and C
7927 living at offset zero. This occurs frequently in
7928 C++ upcasting and then accessing the base. */
7929 if (TREE_CODE (op0) == ADDR_EXPR
7930 && POINTER_TYPE_P (type)
7931 && handled_component_p (TREE_OPERAND (op0, 0)))
7932 {
7933 HOST_WIDE_INT bitsize, bitpos;
7934 tree offset;
7935 machine_mode mode;
7936 int unsignedp, volatilep;
7937 tree base = TREE_OPERAND (op0, 0);
7938 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7939 &mode, &unsignedp, &volatilep, false);
7940 /* If the reference was to a (constant) zero offset, we can use
7941 the address of the base if it has the same base type
7942 as the result type and the pointer type is unqualified. */
7943 if (! offset && bitpos == 0
7944 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7945 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7946 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7947 return fold_convert_loc (loc, type,
7948 build_fold_addr_expr_loc (loc, base));
7949 }
7950
7951 if (TREE_CODE (op0) == MODIFY_EXPR
7952 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7953 /* Detect assigning a bitfield. */
7954 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7955 && DECL_BIT_FIELD
7956 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7957 {
7958 /* Don't leave an assignment inside a conversion
7959 unless assigning a bitfield. */
7960 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7961 /* First do the assignment, then return converted constant. */
7962 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7963 TREE_NO_WARNING (tem) = 1;
7964 TREE_USED (tem) = 1;
7965 return tem;
7966 }
7967
7968 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7969 constants (if x has signed type, the sign bit cannot be set
7970 in c). This folds extension into the BIT_AND_EXPR.
7971 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7972 very likely don't have maximal range for their precision and this
7973 transformation effectively doesn't preserve non-maximal ranges. */
7974 if (TREE_CODE (type) == INTEGER_TYPE
7975 && TREE_CODE (op0) == BIT_AND_EXPR
7976 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7977 {
7978 tree and_expr = op0;
7979 tree and0 = TREE_OPERAND (and_expr, 0);
7980 tree and1 = TREE_OPERAND (and_expr, 1);
7981 int change = 0;
7982
7983 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7984 || (TYPE_PRECISION (type)
7985 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7986 change = 1;
7987 else if (TYPE_PRECISION (TREE_TYPE (and1))
7988 <= HOST_BITS_PER_WIDE_INT
7989 && tree_fits_uhwi_p (and1))
7990 {
7991 unsigned HOST_WIDE_INT cst;
7992
7993 cst = tree_to_uhwi (and1);
7994 cst &= HOST_WIDE_INT_M1U
7995 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7996 change = (cst == 0);
7997 #ifdef LOAD_EXTEND_OP
7998 if (change
7999 && !flag_syntax_only
8000 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8001 == ZERO_EXTEND))
8002 {
8003 tree uns = unsigned_type_for (TREE_TYPE (and0));
8004 and0 = fold_convert_loc (loc, uns, and0);
8005 and1 = fold_convert_loc (loc, uns, and1);
8006 }
8007 #endif
8008 }
8009 if (change)
8010 {
8011 tem = force_fit_type (type, wi::to_widest (and1), 0,
8012 TREE_OVERFLOW (and1));
8013 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8014 fold_convert_loc (loc, type, and0), tem);
8015 }
8016 }
8017
8018 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8019 when one of the new casts will fold away. Conservatively we assume
8020 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8021 if (POINTER_TYPE_P (type)
8022 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8023 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8024 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8025 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8026 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8027 {
8028 tree arg00 = TREE_OPERAND (arg0, 0);
8029 tree arg01 = TREE_OPERAND (arg0, 1);
8030
8031 return fold_build_pointer_plus_loc
8032 (loc, fold_convert_loc (loc, type, arg00), arg01);
8033 }
8034
8035 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8036 of the same precision, and X is an integer type not narrower than
8037 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8038 if (INTEGRAL_TYPE_P (type)
8039 && TREE_CODE (op0) == BIT_NOT_EXPR
8040 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8042 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8043 {
8044 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8045 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8046 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8047 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8048 fold_convert_loc (loc, type, tem));
8049 }
8050
8051 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8052 type of X and Y (integer types only). */
8053 if (INTEGRAL_TYPE_P (type)
8054 && TREE_CODE (op0) == MULT_EXPR
8055 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8056 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8057 {
8058 /* Be careful not to introduce new overflows. */
8059 tree mult_type;
8060 if (TYPE_OVERFLOW_WRAPS (type))
8061 mult_type = type;
8062 else
8063 mult_type = unsigned_type_for (type);
8064
8065 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8066 {
8067 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8068 fold_convert_loc (loc, mult_type,
8069 TREE_OPERAND (op0, 0)),
8070 fold_convert_loc (loc, mult_type,
8071 TREE_OPERAND (op0, 1)));
8072 return fold_convert_loc (loc, type, tem);
8073 }
8074 }
8075
8076 return NULL_TREE;
8077
8078 case VIEW_CONVERT_EXPR:
8079 if (TREE_CODE (op0) == MEM_REF)
8080 return fold_build2_loc (loc, MEM_REF, type,
8081 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8082
8083 return NULL_TREE;
8084
8085 case NEGATE_EXPR:
8086 tem = fold_negate_expr (loc, arg0);
8087 if (tem)
8088 return fold_convert_loc (loc, type, tem);
8089 return NULL_TREE;
8090
8091 case ABS_EXPR:
8092 /* Convert fabs((double)float) into (double)fabsf(float). */
8093 if (TREE_CODE (arg0) == NOP_EXPR
8094 && TREE_CODE (type) == REAL_TYPE)
8095 {
8096 tree targ0 = strip_float_extensions (arg0);
8097 if (targ0 != arg0)
8098 return fold_convert_loc (loc, type,
8099 fold_build1_loc (loc, ABS_EXPR,
8100 TREE_TYPE (targ0),
8101 targ0));
8102 }
8103 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8104 else if (TREE_CODE (arg0) == ABS_EXPR)
8105 return arg0;
8106
8107 /* Strip sign ops from argument. */
8108 if (TREE_CODE (type) == REAL_TYPE)
8109 {
8110 tem = fold_strip_sign_ops (arg0);
8111 if (tem)
8112 return fold_build1_loc (loc, ABS_EXPR, type,
8113 fold_convert_loc (loc, type, tem));
8114 }
8115 return NULL_TREE;
8116
8117 case CONJ_EXPR:
8118 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8119 return fold_convert_loc (loc, type, arg0);
8120 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8121 {
8122 tree itype = TREE_TYPE (type);
8123 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8124 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8125 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8126 negate_expr (ipart));
8127 }
8128 if (TREE_CODE (arg0) == CONJ_EXPR)
8129 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8130 return NULL_TREE;
8131
8132 case BIT_NOT_EXPR:
8133 /* Convert ~ (-A) to A - 1. */
8134 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8135 return fold_build2_loc (loc, MINUS_EXPR, type,
8136 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8137 build_int_cst (type, 1));
8138 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8139 else if (INTEGRAL_TYPE_P (type)
8140 && ((TREE_CODE (arg0) == MINUS_EXPR
8141 && integer_onep (TREE_OPERAND (arg0, 1)))
8142 || (TREE_CODE (arg0) == PLUS_EXPR
8143 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8144 {
8145 /* Perform the negation in ARG0's type and only then convert
8146 to TYPE as to avoid introducing undefined behavior. */
8147 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8148 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8149 TREE_OPERAND (arg0, 0));
8150 return fold_convert_loc (loc, type, t);
8151 }
8152 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8153 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8154 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8155 fold_convert_loc (loc, type,
8156 TREE_OPERAND (arg0, 0)))))
8157 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8158 fold_convert_loc (loc, type,
8159 TREE_OPERAND (arg0, 1)));
8160 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8161 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8162 fold_convert_loc (loc, type,
8163 TREE_OPERAND (arg0, 1)))))
8164 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8165 fold_convert_loc (loc, type,
8166 TREE_OPERAND (arg0, 0)), tem);
8167
8168 return NULL_TREE;
8169
8170 case TRUTH_NOT_EXPR:
8171 /* Note that the operand of this must be an int
8172 and its values must be 0 or 1.
8173 ("true" is a fixed value perhaps depending on the language,
8174 but we don't handle values other than 1 correctly yet.) */
8175 tem = fold_truth_not_expr (loc, arg0);
8176 if (!tem)
8177 return NULL_TREE;
8178 return fold_convert_loc (loc, type, tem);
8179
8180 case REALPART_EXPR:
8181 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8182 return fold_convert_loc (loc, type, arg0);
8183 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8184 {
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8187 fold_build1_loc (loc, REALPART_EXPR, itype,
8188 TREE_OPERAND (arg0, 0)),
8189 fold_build1_loc (loc, REALPART_EXPR, itype,
8190 TREE_OPERAND (arg0, 1)));
8191 return fold_convert_loc (loc, type, tem);
8192 }
8193 if (TREE_CODE (arg0) == CONJ_EXPR)
8194 {
8195 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8196 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8197 TREE_OPERAND (arg0, 0));
8198 return fold_convert_loc (loc, type, tem);
8199 }
8200 if (TREE_CODE (arg0) == CALL_EXPR)
8201 {
8202 tree fn = get_callee_fndecl (arg0);
8203 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8204 switch (DECL_FUNCTION_CODE (fn))
8205 {
8206 CASE_FLT_FN (BUILT_IN_CEXPI):
8207 fn = mathfn_built_in (type, BUILT_IN_COS);
8208 if (fn)
8209 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8210 break;
8211
8212 default:
8213 break;
8214 }
8215 }
8216 return NULL_TREE;
8217
8218 case IMAGPART_EXPR:
8219 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8220 return build_zero_cst (type);
8221 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8222 {
8223 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8224 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8225 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8226 TREE_OPERAND (arg0, 0)),
8227 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8228 TREE_OPERAND (arg0, 1)));
8229 return fold_convert_loc (loc, type, tem);
8230 }
8231 if (TREE_CODE (arg0) == CONJ_EXPR)
8232 {
8233 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8234 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8235 return fold_convert_loc (loc, type, negate_expr (tem));
8236 }
8237 if (TREE_CODE (arg0) == CALL_EXPR)
8238 {
8239 tree fn = get_callee_fndecl (arg0);
8240 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8241 switch (DECL_FUNCTION_CODE (fn))
8242 {
8243 CASE_FLT_FN (BUILT_IN_CEXPI):
8244 fn = mathfn_built_in (type, BUILT_IN_SIN);
8245 if (fn)
8246 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8247 break;
8248
8249 default:
8250 break;
8251 }
8252 }
8253 return NULL_TREE;
8254
8255 case INDIRECT_REF:
8256 /* Fold *&X to X if X is an lvalue. */
8257 if (TREE_CODE (op0) == ADDR_EXPR)
8258 {
8259 tree op00 = TREE_OPERAND (op0, 0);
8260 if ((TREE_CODE (op00) == VAR_DECL
8261 || TREE_CODE (op00) == PARM_DECL
8262 || TREE_CODE (op00) == RESULT_DECL)
8263 && !TREE_READONLY (op00))
8264 return op00;
8265 }
8266 return NULL_TREE;
8267
8268 default:
8269 return NULL_TREE;
8270 } /* switch (code) */
8271 }
8272
8273
8274 /* If the operation was a conversion do _not_ mark a resulting constant
8275 with TREE_OVERFLOW if the original constant was not. These conversions
8276 have implementation defined behavior and retaining the TREE_OVERFLOW
8277 flag here would confuse later passes such as VRP. */
8278 tree
8279 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8280 tree type, tree op0)
8281 {
8282 tree res = fold_unary_loc (loc, code, type, op0);
8283 if (res
8284 && TREE_CODE (res) == INTEGER_CST
8285 && TREE_CODE (op0) == INTEGER_CST
8286 && CONVERT_EXPR_CODE_P (code))
8287 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8288
8289 return res;
8290 }
8291
8292 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8293 operands OP0 and OP1. LOC is the location of the resulting expression.
8294 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8295 Return the folded expression if folding is successful. Otherwise,
8296 return NULL_TREE. */
8297 static tree
8298 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8299 tree arg0, tree arg1, tree op0, tree op1)
8300 {
8301 tree tem;
8302
8303 /* We only do these simplifications if we are optimizing. */
8304 if (!optimize)
8305 return NULL_TREE;
8306
8307 /* Check for things like (A || B) && (A || C). We can convert this
8308 to A || (B && C). Note that either operator can be any of the four
8309 truth and/or operations and the transformation will still be
8310 valid. Also note that we only care about order for the
8311 ANDIF and ORIF operators. If B contains side effects, this
8312 might change the truth-value of A. */
8313 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8314 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8315 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8316 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8317 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8318 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8319 {
8320 tree a00 = TREE_OPERAND (arg0, 0);
8321 tree a01 = TREE_OPERAND (arg0, 1);
8322 tree a10 = TREE_OPERAND (arg1, 0);
8323 tree a11 = TREE_OPERAND (arg1, 1);
8324 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8325 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8326 && (code == TRUTH_AND_EXPR
8327 || code == TRUTH_OR_EXPR));
8328
8329 if (operand_equal_p (a00, a10, 0))
8330 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8331 fold_build2_loc (loc, code, type, a01, a11));
8332 else if (commutative && operand_equal_p (a00, a11, 0))
8333 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8334 fold_build2_loc (loc, code, type, a01, a10));
8335 else if (commutative && operand_equal_p (a01, a10, 0))
8336 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8337 fold_build2_loc (loc, code, type, a00, a11));
8338
8339 /* This case if tricky because we must either have commutative
8340 operators or else A10 must not have side-effects. */
8341
8342 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8343 && operand_equal_p (a01, a11, 0))
8344 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8345 fold_build2_loc (loc, code, type, a00, a10),
8346 a01);
8347 }
8348
8349 /* See if we can build a range comparison. */
8350 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8351 return tem;
8352
8353 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8354 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8355 {
8356 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8357 if (tem)
8358 return fold_build2_loc (loc, code, type, tem, arg1);
8359 }
8360
8361 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8362 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8363 {
8364 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8365 if (tem)
8366 return fold_build2_loc (loc, code, type, arg0, tem);
8367 }
8368
8369 /* Check for the possibility of merging component references. If our
8370 lhs is another similar operation, try to merge its rhs with our
8371 rhs. Then try to merge our lhs and rhs. */
8372 if (TREE_CODE (arg0) == code
8373 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8374 TREE_OPERAND (arg0, 1), arg1)))
8375 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8376
8377 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8378 return tem;
8379
8380 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8381 && (code == TRUTH_AND_EXPR
8382 || code == TRUTH_ANDIF_EXPR
8383 || code == TRUTH_OR_EXPR
8384 || code == TRUTH_ORIF_EXPR))
8385 {
8386 enum tree_code ncode, icode;
8387
8388 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8389 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8390 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8391
8392 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8393 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8394 We don't want to pack more than two leafs to a non-IF AND/OR
8395 expression.
8396 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8397 equal to IF-CODE, then we don't want to add right-hand operand.
8398 If the inner right-hand side of left-hand operand has
8399 side-effects, or isn't simple, then we can't add to it,
8400 as otherwise we might destroy if-sequence. */
8401 if (TREE_CODE (arg0) == icode
8402 && simple_operand_p_2 (arg1)
8403 /* Needed for sequence points to handle trappings, and
8404 side-effects. */
8405 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8406 {
8407 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8408 arg1);
8409 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8410 tem);
8411 }
8412 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8413 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8414 else if (TREE_CODE (arg1) == icode
8415 && simple_operand_p_2 (arg0)
8416 /* Needed for sequence points to handle trappings, and
8417 side-effects. */
8418 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8419 {
8420 tem = fold_build2_loc (loc, ncode, type,
8421 arg0, TREE_OPERAND (arg1, 0));
8422 return fold_build2_loc (loc, icode, type, tem,
8423 TREE_OPERAND (arg1, 1));
8424 }
8425 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8426 into (A OR B).
8427 For sequence point consistancy, we need to check for trapping,
8428 and side-effects. */
8429 else if (code == icode && simple_operand_p_2 (arg0)
8430 && simple_operand_p_2 (arg1))
8431 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8432 }
8433
8434 return NULL_TREE;
8435 }
8436
8437 /* Fold a binary expression of code CODE and type TYPE with operands
8438 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8439 Return the folded expression if folding is successful. Otherwise,
8440 return NULL_TREE. */
8441
8442 static tree
8443 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8444 {
8445 enum tree_code compl_code;
8446
8447 if (code == MIN_EXPR)
8448 compl_code = MAX_EXPR;
8449 else if (code == MAX_EXPR)
8450 compl_code = MIN_EXPR;
8451 else
8452 gcc_unreachable ();
8453
8454 /* MIN (MAX (a, b), b) == b. */
8455 if (TREE_CODE (op0) == compl_code
8456 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8457 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8458
8459 /* MIN (MAX (b, a), b) == b. */
8460 if (TREE_CODE (op0) == compl_code
8461 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8462 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8463 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8464
8465 /* MIN (a, MAX (a, b)) == a. */
8466 if (TREE_CODE (op1) == compl_code
8467 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8468 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8469 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8470
8471 /* MIN (a, MAX (b, a)) == a. */
8472 if (TREE_CODE (op1) == compl_code
8473 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8474 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8475 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8476
8477 return NULL_TREE;
8478 }
8479
8480 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8481 by changing CODE to reduce the magnitude of constants involved in
8482 ARG0 of the comparison.
8483 Returns a canonicalized comparison tree if a simplification was
8484 possible, otherwise returns NULL_TREE.
8485 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8486 valid if signed overflow is undefined. */
8487
8488 static tree
8489 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8490 tree arg0, tree arg1,
8491 bool *strict_overflow_p)
8492 {
8493 enum tree_code code0 = TREE_CODE (arg0);
8494 tree t, cst0 = NULL_TREE;
8495 int sgn0;
8496 bool swap = false;
8497
8498 /* Match A +- CST code arg1 and CST code arg1. We can change the
8499 first form only if overflow is undefined. */
8500 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8501 /* In principle pointers also have undefined overflow behavior,
8502 but that causes problems elsewhere. */
8503 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8504 && (code0 == MINUS_EXPR
8505 || code0 == PLUS_EXPR)
8506 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8507 || code0 == INTEGER_CST))
8508 return NULL_TREE;
8509
8510 /* Identify the constant in arg0 and its sign. */
8511 if (code0 == INTEGER_CST)
8512 cst0 = arg0;
8513 else
8514 cst0 = TREE_OPERAND (arg0, 1);
8515 sgn0 = tree_int_cst_sgn (cst0);
8516
8517 /* Overflowed constants and zero will cause problems. */
8518 if (integer_zerop (cst0)
8519 || TREE_OVERFLOW (cst0))
8520 return NULL_TREE;
8521
8522 /* See if we can reduce the magnitude of the constant in
8523 arg0 by changing the comparison code. */
8524 if (code0 == INTEGER_CST)
8525 {
8526 /* CST <= arg1 -> CST-1 < arg1. */
8527 if (code == LE_EXPR && sgn0 == 1)
8528 code = LT_EXPR;
8529 /* -CST < arg1 -> -CST-1 <= arg1. */
8530 else if (code == LT_EXPR && sgn0 == -1)
8531 code = LE_EXPR;
8532 /* CST > arg1 -> CST-1 >= arg1. */
8533 else if (code == GT_EXPR && sgn0 == 1)
8534 code = GE_EXPR;
8535 /* -CST >= arg1 -> -CST-1 > arg1. */
8536 else if (code == GE_EXPR && sgn0 == -1)
8537 code = GT_EXPR;
8538 else
8539 return NULL_TREE;
8540 /* arg1 code' CST' might be more canonical. */
8541 swap = true;
8542 }
8543 else
8544 {
8545 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8546 if (code == LT_EXPR
8547 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8548 code = LE_EXPR;
8549 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8550 else if (code == GT_EXPR
8551 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8552 code = GE_EXPR;
8553 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8554 else if (code == LE_EXPR
8555 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8556 code = LT_EXPR;
8557 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8558 else if (code == GE_EXPR
8559 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8560 code = GT_EXPR;
8561 else
8562 return NULL_TREE;
8563 *strict_overflow_p = true;
8564 }
8565
8566 /* Now build the constant reduced in magnitude. But not if that
8567 would produce one outside of its types range. */
8568 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8569 && ((sgn0 == 1
8570 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8571 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8572 || (sgn0 == -1
8573 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8574 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8575 /* We cannot swap the comparison here as that would cause us to
8576 endlessly recurse. */
8577 return NULL_TREE;
8578
8579 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8580 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8581 if (code0 != INTEGER_CST)
8582 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8583 t = fold_convert (TREE_TYPE (arg1), t);
8584
8585 /* If swapping might yield to a more canonical form, do so. */
8586 if (swap)
8587 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8588 else
8589 return fold_build2_loc (loc, code, type, t, arg1);
8590 }
8591
8592 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8593 overflow further. Try to decrease the magnitude of constants involved
8594 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8595 and put sole constants at the second argument position.
8596 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8597
8598 static tree
8599 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8600 tree arg0, tree arg1)
8601 {
8602 tree t;
8603 bool strict_overflow_p;
8604 const char * const warnmsg = G_("assuming signed overflow does not occur "
8605 "when reducing constant in comparison");
8606
8607 /* Try canonicalization by simplifying arg0. */
8608 strict_overflow_p = false;
8609 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8610 &strict_overflow_p);
8611 if (t)
8612 {
8613 if (strict_overflow_p)
8614 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8615 return t;
8616 }
8617
8618 /* Try canonicalization by simplifying arg1 using the swapped
8619 comparison. */
8620 code = swap_tree_comparison (code);
8621 strict_overflow_p = false;
8622 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8623 &strict_overflow_p);
8624 if (t && strict_overflow_p)
8625 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8626 return t;
8627 }
8628
8629 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8630 space. This is used to avoid issuing overflow warnings for
8631 expressions like &p->x which can not wrap. */
8632
8633 static bool
8634 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8635 {
8636 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8637 return true;
8638
8639 if (bitpos < 0)
8640 return true;
8641
8642 wide_int wi_offset;
8643 int precision = TYPE_PRECISION (TREE_TYPE (base));
8644 if (offset == NULL_TREE)
8645 wi_offset = wi::zero (precision);
8646 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8647 return true;
8648 else
8649 wi_offset = offset;
8650
8651 bool overflow;
8652 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8653 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8654 if (overflow)
8655 return true;
8656
8657 if (!wi::fits_uhwi_p (total))
8658 return true;
8659
8660 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8661 if (size <= 0)
8662 return true;
8663
8664 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8665 array. */
8666 if (TREE_CODE (base) == ADDR_EXPR)
8667 {
8668 HOST_WIDE_INT base_size;
8669
8670 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8671 if (base_size > 0 && size < base_size)
8672 size = base_size;
8673 }
8674
8675 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8676 }
8677
8678 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8679 kind INTEGER_CST. This makes sure to properly sign-extend the
8680 constant. */
8681
8682 static HOST_WIDE_INT
8683 size_low_cst (const_tree t)
8684 {
8685 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8686 int prec = TYPE_PRECISION (TREE_TYPE (t));
8687 if (prec < HOST_BITS_PER_WIDE_INT)
8688 return sext_hwi (w, prec);
8689 return w;
8690 }
8691
8692 /* Subroutine of fold_binary. This routine performs all of the
8693 transformations that are common to the equality/inequality
8694 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8695 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8696 fold_binary should call fold_binary. Fold a comparison with
8697 tree code CODE and type TYPE with operands OP0 and OP1. Return
8698 the folded comparison or NULL_TREE. */
8699
8700 static tree
8701 fold_comparison (location_t loc, enum tree_code code, tree type,
8702 tree op0, tree op1)
8703 {
8704 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8705 tree arg0, arg1, tem;
8706
8707 arg0 = op0;
8708 arg1 = op1;
8709
8710 STRIP_SIGN_NOPS (arg0);
8711 STRIP_SIGN_NOPS (arg1);
8712
8713 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8714 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8715 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8716 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8717 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8718 && TREE_CODE (arg1) == INTEGER_CST
8719 && !TREE_OVERFLOW (arg1))
8720 {
8721 const enum tree_code
8722 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8723 tree const1 = TREE_OPERAND (arg0, 1);
8724 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8725 tree variable = TREE_OPERAND (arg0, 0);
8726 tree new_const = int_const_binop (reverse_op, const2, const1);
8727
8728 /* If the constant operation overflowed this can be
8729 simplified as a comparison against INT_MAX/INT_MIN. */
8730 if (TREE_OVERFLOW (new_const)
8731 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8732 {
8733 int const1_sgn = tree_int_cst_sgn (const1);
8734 enum tree_code code2 = code;
8735
8736 /* Get the sign of the constant on the lhs if the
8737 operation were VARIABLE + CONST1. */
8738 if (TREE_CODE (arg0) == MINUS_EXPR)
8739 const1_sgn = -const1_sgn;
8740
8741 /* The sign of the constant determines if we overflowed
8742 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8743 Canonicalize to the INT_MIN overflow by swapping the comparison
8744 if necessary. */
8745 if (const1_sgn == -1)
8746 code2 = swap_tree_comparison (code);
8747
8748 /* We now can look at the canonicalized case
8749 VARIABLE + 1 CODE2 INT_MIN
8750 and decide on the result. */
8751 switch (code2)
8752 {
8753 case EQ_EXPR:
8754 case LT_EXPR:
8755 case LE_EXPR:
8756 return
8757 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8758
8759 case NE_EXPR:
8760 case GE_EXPR:
8761 case GT_EXPR:
8762 return
8763 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8764
8765 default:
8766 gcc_unreachable ();
8767 }
8768 }
8769 else
8770 {
8771 if (!equality_code)
8772 fold_overflow_warning ("assuming signed overflow does not occur "
8773 "when changing X +- C1 cmp C2 to "
8774 "X cmp C2 -+ C1",
8775 WARN_STRICT_OVERFLOW_COMPARISON);
8776 return fold_build2_loc (loc, code, type, variable, new_const);
8777 }
8778 }
8779
8780 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8781 if (TREE_CODE (arg0) == MINUS_EXPR
8782 && equality_code
8783 && integer_zerop (arg1))
8784 {
8785 /* ??? The transformation is valid for the other operators if overflow
8786 is undefined for the type, but performing it here badly interacts
8787 with the transformation in fold_cond_expr_with_comparison which
8788 attempts to synthetize ABS_EXPR. */
8789 if (!equality_code)
8790 fold_overflow_warning ("assuming signed overflow does not occur "
8791 "when changing X - Y cmp 0 to X cmp Y",
8792 WARN_STRICT_OVERFLOW_COMPARISON);
8793 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8794 TREE_OPERAND (arg0, 1));
8795 }
8796
8797 /* For comparisons of pointers we can decompose it to a compile time
8798 comparison of the base objects and the offsets into the object.
8799 This requires at least one operand being an ADDR_EXPR or a
8800 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8801 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8802 && (TREE_CODE (arg0) == ADDR_EXPR
8803 || TREE_CODE (arg1) == ADDR_EXPR
8804 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8805 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8806 {
8807 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8808 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8809 machine_mode mode;
8810 int volatilep, unsignedp;
8811 bool indirect_base0 = false, indirect_base1 = false;
8812
8813 /* Get base and offset for the access. Strip ADDR_EXPR for
8814 get_inner_reference, but put it back by stripping INDIRECT_REF
8815 off the base object if possible. indirect_baseN will be true
8816 if baseN is not an address but refers to the object itself. */
8817 base0 = arg0;
8818 if (TREE_CODE (arg0) == ADDR_EXPR)
8819 {
8820 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8821 &bitsize, &bitpos0, &offset0, &mode,
8822 &unsignedp, &volatilep, false);
8823 if (TREE_CODE (base0) == INDIRECT_REF)
8824 base0 = TREE_OPERAND (base0, 0);
8825 else
8826 indirect_base0 = true;
8827 }
8828 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8829 {
8830 base0 = TREE_OPERAND (arg0, 0);
8831 STRIP_SIGN_NOPS (base0);
8832 if (TREE_CODE (base0) == ADDR_EXPR)
8833 {
8834 base0 = TREE_OPERAND (base0, 0);
8835 indirect_base0 = true;
8836 }
8837 offset0 = TREE_OPERAND (arg0, 1);
8838 if (tree_fits_shwi_p (offset0))
8839 {
8840 HOST_WIDE_INT off = size_low_cst (offset0);
8841 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8842 * BITS_PER_UNIT)
8843 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8844 {
8845 bitpos0 = off * BITS_PER_UNIT;
8846 offset0 = NULL_TREE;
8847 }
8848 }
8849 }
8850
8851 base1 = arg1;
8852 if (TREE_CODE (arg1) == ADDR_EXPR)
8853 {
8854 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8855 &bitsize, &bitpos1, &offset1, &mode,
8856 &unsignedp, &volatilep, false);
8857 if (TREE_CODE (base1) == INDIRECT_REF)
8858 base1 = TREE_OPERAND (base1, 0);
8859 else
8860 indirect_base1 = true;
8861 }
8862 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8863 {
8864 base1 = TREE_OPERAND (arg1, 0);
8865 STRIP_SIGN_NOPS (base1);
8866 if (TREE_CODE (base1) == ADDR_EXPR)
8867 {
8868 base1 = TREE_OPERAND (base1, 0);
8869 indirect_base1 = true;
8870 }
8871 offset1 = TREE_OPERAND (arg1, 1);
8872 if (tree_fits_shwi_p (offset1))
8873 {
8874 HOST_WIDE_INT off = size_low_cst (offset1);
8875 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8876 * BITS_PER_UNIT)
8877 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8878 {
8879 bitpos1 = off * BITS_PER_UNIT;
8880 offset1 = NULL_TREE;
8881 }
8882 }
8883 }
8884
8885 /* A local variable can never be pointed to by
8886 the default SSA name of an incoming parameter. */
8887 if ((TREE_CODE (arg0) == ADDR_EXPR
8888 && indirect_base0
8889 && TREE_CODE (base0) == VAR_DECL
8890 && auto_var_in_fn_p (base0, current_function_decl)
8891 && !indirect_base1
8892 && TREE_CODE (base1) == SSA_NAME
8893 && SSA_NAME_IS_DEFAULT_DEF (base1)
8894 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8895 || (TREE_CODE (arg1) == ADDR_EXPR
8896 && indirect_base1
8897 && TREE_CODE (base1) == VAR_DECL
8898 && auto_var_in_fn_p (base1, current_function_decl)
8899 && !indirect_base0
8900 && TREE_CODE (base0) == SSA_NAME
8901 && SSA_NAME_IS_DEFAULT_DEF (base0)
8902 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8903 {
8904 if (code == NE_EXPR)
8905 return constant_boolean_node (1, type);
8906 else if (code == EQ_EXPR)
8907 return constant_boolean_node (0, type);
8908 }
8909 /* If we have equivalent bases we might be able to simplify. */
8910 else if (indirect_base0 == indirect_base1
8911 && operand_equal_p (base0, base1, 0))
8912 {
8913 /* We can fold this expression to a constant if the non-constant
8914 offset parts are equal. */
8915 if ((offset0 == offset1
8916 || (offset0 && offset1
8917 && operand_equal_p (offset0, offset1, 0)))
8918 && (code == EQ_EXPR
8919 || code == NE_EXPR
8920 || (indirect_base0 && DECL_P (base0))
8921 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8922
8923 {
8924 if (!equality_code
8925 && bitpos0 != bitpos1
8926 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8927 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8928 fold_overflow_warning (("assuming pointer wraparound does not "
8929 "occur when comparing P +- C1 with "
8930 "P +- C2"),
8931 WARN_STRICT_OVERFLOW_CONDITIONAL);
8932
8933 switch (code)
8934 {
8935 case EQ_EXPR:
8936 return constant_boolean_node (bitpos0 == bitpos1, type);
8937 case NE_EXPR:
8938 return constant_boolean_node (bitpos0 != bitpos1, type);
8939 case LT_EXPR:
8940 return constant_boolean_node (bitpos0 < bitpos1, type);
8941 case LE_EXPR:
8942 return constant_boolean_node (bitpos0 <= bitpos1, type);
8943 case GE_EXPR:
8944 return constant_boolean_node (bitpos0 >= bitpos1, type);
8945 case GT_EXPR:
8946 return constant_boolean_node (bitpos0 > bitpos1, type);
8947 default:;
8948 }
8949 }
8950 /* We can simplify the comparison to a comparison of the variable
8951 offset parts if the constant offset parts are equal.
8952 Be careful to use signed sizetype here because otherwise we
8953 mess with array offsets in the wrong way. This is possible
8954 because pointer arithmetic is restricted to retain within an
8955 object and overflow on pointer differences is undefined as of
8956 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8957 else if (bitpos0 == bitpos1
8958 && (equality_code
8959 || (indirect_base0 && DECL_P (base0))
8960 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8961 {
8962 /* By converting to signed sizetype we cover middle-end pointer
8963 arithmetic which operates on unsigned pointer types of size
8964 type size and ARRAY_REF offsets which are properly sign or
8965 zero extended from their type in case it is narrower than
8966 sizetype. */
8967 if (offset0 == NULL_TREE)
8968 offset0 = build_int_cst (ssizetype, 0);
8969 else
8970 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8971 if (offset1 == NULL_TREE)
8972 offset1 = build_int_cst (ssizetype, 0);
8973 else
8974 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8975
8976 if (!equality_code
8977 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8978 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8979 fold_overflow_warning (("assuming pointer wraparound does not "
8980 "occur when comparing P +- C1 with "
8981 "P +- C2"),
8982 WARN_STRICT_OVERFLOW_COMPARISON);
8983
8984 return fold_build2_loc (loc, code, type, offset0, offset1);
8985 }
8986 }
8987 /* For non-equal bases we can simplify if they are addresses
8988 of local binding decls or constants. */
8989 else if (indirect_base0 && indirect_base1
8990 /* We know that !operand_equal_p (base0, base1, 0)
8991 because the if condition was false. But make
8992 sure two decls are not the same. */
8993 && base0 != base1
8994 && TREE_CODE (arg0) == ADDR_EXPR
8995 && TREE_CODE (arg1) == ADDR_EXPR
8996 && (((TREE_CODE (base0) == VAR_DECL
8997 || TREE_CODE (base0) == PARM_DECL)
8998 && (targetm.binds_local_p (base0)
8999 || CONSTANT_CLASS_P (base1)))
9000 || CONSTANT_CLASS_P (base0))
9001 && (((TREE_CODE (base1) == VAR_DECL
9002 || TREE_CODE (base1) == PARM_DECL)
9003 && (targetm.binds_local_p (base1)
9004 || CONSTANT_CLASS_P (base0)))
9005 || CONSTANT_CLASS_P (base1)))
9006 {
9007 if (code == EQ_EXPR)
9008 return omit_two_operands_loc (loc, type, boolean_false_node,
9009 arg0, arg1);
9010 else if (code == NE_EXPR)
9011 return omit_two_operands_loc (loc, type, boolean_true_node,
9012 arg0, arg1);
9013 }
9014 /* For equal offsets we can simplify to a comparison of the
9015 base addresses. */
9016 else if (bitpos0 == bitpos1
9017 && (indirect_base0
9018 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9019 && (indirect_base1
9020 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9021 && ((offset0 == offset1)
9022 || (offset0 && offset1
9023 && operand_equal_p (offset0, offset1, 0))))
9024 {
9025 if (indirect_base0)
9026 base0 = build_fold_addr_expr_loc (loc, base0);
9027 if (indirect_base1)
9028 base1 = build_fold_addr_expr_loc (loc, base1);
9029 return fold_build2_loc (loc, code, type, base0, base1);
9030 }
9031 }
9032
9033 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9034 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9035 the resulting offset is smaller in absolute value than the
9036 original one and has the same sign. */
9037 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9038 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9039 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9040 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9041 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9042 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9043 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9044 {
9045 tree const1 = TREE_OPERAND (arg0, 1);
9046 tree const2 = TREE_OPERAND (arg1, 1);
9047 tree variable1 = TREE_OPERAND (arg0, 0);
9048 tree variable2 = TREE_OPERAND (arg1, 0);
9049 tree cst;
9050 const char * const warnmsg = G_("assuming signed overflow does not "
9051 "occur when combining constants around "
9052 "a comparison");
9053
9054 /* Put the constant on the side where it doesn't overflow and is
9055 of lower absolute value and of same sign than before. */
9056 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9057 ? MINUS_EXPR : PLUS_EXPR,
9058 const2, const1);
9059 if (!TREE_OVERFLOW (cst)
9060 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9061 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9062 {
9063 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9064 return fold_build2_loc (loc, code, type,
9065 variable1,
9066 fold_build2_loc (loc, TREE_CODE (arg1),
9067 TREE_TYPE (arg1),
9068 variable2, cst));
9069 }
9070
9071 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9072 ? MINUS_EXPR : PLUS_EXPR,
9073 const1, const2);
9074 if (!TREE_OVERFLOW (cst)
9075 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9076 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9077 {
9078 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9079 return fold_build2_loc (loc, code, type,
9080 fold_build2_loc (loc, TREE_CODE (arg0),
9081 TREE_TYPE (arg0),
9082 variable1, cst),
9083 variable2);
9084 }
9085 }
9086
9087 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9088 signed arithmetic case. That form is created by the compiler
9089 often enough for folding it to be of value. One example is in
9090 computing loop trip counts after Operator Strength Reduction. */
9091 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9092 && TREE_CODE (arg0) == MULT_EXPR
9093 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9094 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9095 && integer_zerop (arg1))
9096 {
9097 tree const1 = TREE_OPERAND (arg0, 1);
9098 tree const2 = arg1; /* zero */
9099 tree variable1 = TREE_OPERAND (arg0, 0);
9100 enum tree_code cmp_code = code;
9101
9102 /* Handle unfolded multiplication by zero. */
9103 if (integer_zerop (const1))
9104 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9105
9106 fold_overflow_warning (("assuming signed overflow does not occur when "
9107 "eliminating multiplication in comparison "
9108 "with zero"),
9109 WARN_STRICT_OVERFLOW_COMPARISON);
9110
9111 /* If const1 is negative we swap the sense of the comparison. */
9112 if (tree_int_cst_sgn (const1) < 0)
9113 cmp_code = swap_tree_comparison (cmp_code);
9114
9115 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9116 }
9117
9118 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9119 if (tem)
9120 return tem;
9121
9122 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9123 {
9124 tree targ0 = strip_float_extensions (arg0);
9125 tree targ1 = strip_float_extensions (arg1);
9126 tree newtype = TREE_TYPE (targ0);
9127
9128 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9129 newtype = TREE_TYPE (targ1);
9130
9131 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9132 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9133 return fold_build2_loc (loc, code, type,
9134 fold_convert_loc (loc, newtype, targ0),
9135 fold_convert_loc (loc, newtype, targ1));
9136
9137 /* (-a) CMP (-b) -> b CMP a */
9138 if (TREE_CODE (arg0) == NEGATE_EXPR
9139 && TREE_CODE (arg1) == NEGATE_EXPR)
9140 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9141 TREE_OPERAND (arg0, 0));
9142
9143 if (TREE_CODE (arg1) == REAL_CST)
9144 {
9145 REAL_VALUE_TYPE cst;
9146 cst = TREE_REAL_CST (arg1);
9147
9148 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9149 if (TREE_CODE (arg0) == NEGATE_EXPR)
9150 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9151 TREE_OPERAND (arg0, 0),
9152 build_real (TREE_TYPE (arg1),
9153 real_value_negate (&cst)));
9154
9155 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9156 /* a CMP (-0) -> a CMP 0 */
9157 if (REAL_VALUE_MINUS_ZERO (cst))
9158 return fold_build2_loc (loc, code, type, arg0,
9159 build_real (TREE_TYPE (arg1), dconst0));
9160
9161 /* x != NaN is always true, other ops are always false. */
9162 if (REAL_VALUE_ISNAN (cst)
9163 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9164 {
9165 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9166 return omit_one_operand_loc (loc, type, tem, arg0);
9167 }
9168
9169 /* Fold comparisons against infinity. */
9170 if (REAL_VALUE_ISINF (cst)
9171 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9172 {
9173 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9174 if (tem != NULL_TREE)
9175 return tem;
9176 }
9177 }
9178
9179 /* If this is a comparison of a real constant with a PLUS_EXPR
9180 or a MINUS_EXPR of a real constant, we can convert it into a
9181 comparison with a revised real constant as long as no overflow
9182 occurs when unsafe_math_optimizations are enabled. */
9183 if (flag_unsafe_math_optimizations
9184 && TREE_CODE (arg1) == REAL_CST
9185 && (TREE_CODE (arg0) == PLUS_EXPR
9186 || TREE_CODE (arg0) == MINUS_EXPR)
9187 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9188 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9189 ? MINUS_EXPR : PLUS_EXPR,
9190 arg1, TREE_OPERAND (arg0, 1)))
9191 && !TREE_OVERFLOW (tem))
9192 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9193
9194 /* Likewise, we can simplify a comparison of a real constant with
9195 a MINUS_EXPR whose first operand is also a real constant, i.e.
9196 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9197 floating-point types only if -fassociative-math is set. */
9198 if (flag_associative_math
9199 && TREE_CODE (arg1) == REAL_CST
9200 && TREE_CODE (arg0) == MINUS_EXPR
9201 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9202 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9203 arg1))
9204 && !TREE_OVERFLOW (tem))
9205 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9206 TREE_OPERAND (arg0, 1), tem);
9207
9208 /* Fold comparisons against built-in math functions. */
9209 if (TREE_CODE (arg1) == REAL_CST
9210 && flag_unsafe_math_optimizations
9211 && ! flag_errno_math)
9212 {
9213 enum built_in_function fcode = builtin_mathfn_code (arg0);
9214
9215 if (fcode != END_BUILTINS)
9216 {
9217 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9218 if (tem != NULL_TREE)
9219 return tem;
9220 }
9221 }
9222 }
9223
9224 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9225 && CONVERT_EXPR_P (arg0))
9226 {
9227 /* If we are widening one operand of an integer comparison,
9228 see if the other operand is similarly being widened. Perhaps we
9229 can do the comparison in the narrower type. */
9230 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9231 if (tem)
9232 return tem;
9233
9234 /* Or if we are changing signedness. */
9235 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9236 if (tem)
9237 return tem;
9238 }
9239
9240 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9241 constant, we can simplify it. */
9242 if (TREE_CODE (arg1) == INTEGER_CST
9243 && (TREE_CODE (arg0) == MIN_EXPR
9244 || TREE_CODE (arg0) == MAX_EXPR)
9245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9246 {
9247 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9248 if (tem)
9249 return tem;
9250 }
9251
9252 /* Simplify comparison of something with itself. (For IEEE
9253 floating-point, we can only do some of these simplifications.) */
9254 if (operand_equal_p (arg0, arg1, 0))
9255 {
9256 switch (code)
9257 {
9258 case EQ_EXPR:
9259 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9260 || ! HONOR_NANS (element_mode (arg0)))
9261 return constant_boolean_node (1, type);
9262 break;
9263
9264 case GE_EXPR:
9265 case LE_EXPR:
9266 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9267 || ! HONOR_NANS (element_mode (arg0)))
9268 return constant_boolean_node (1, type);
9269 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9270
9271 case NE_EXPR:
9272 /* For NE, we can only do this simplification if integer
9273 or we don't honor IEEE floating point NaNs. */
9274 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9275 && HONOR_NANS (element_mode (arg0)))
9276 break;
9277 /* ... fall through ... */
9278 case GT_EXPR:
9279 case LT_EXPR:
9280 return constant_boolean_node (0, type);
9281 default:
9282 gcc_unreachable ();
9283 }
9284 }
9285
9286 /* If we are comparing an expression that just has comparisons
9287 of two integer values, arithmetic expressions of those comparisons,
9288 and constants, we can simplify it. There are only three cases
9289 to check: the two values can either be equal, the first can be
9290 greater, or the second can be greater. Fold the expression for
9291 those three values. Since each value must be 0 or 1, we have
9292 eight possibilities, each of which corresponds to the constant 0
9293 or 1 or one of the six possible comparisons.
9294
9295 This handles common cases like (a > b) == 0 but also handles
9296 expressions like ((x > y) - (y > x)) > 0, which supposedly
9297 occur in macroized code. */
9298
9299 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9300 {
9301 tree cval1 = 0, cval2 = 0;
9302 int save_p = 0;
9303
9304 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9305 /* Don't handle degenerate cases here; they should already
9306 have been handled anyway. */
9307 && cval1 != 0 && cval2 != 0
9308 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9309 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9310 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9311 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9312 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9313 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9314 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9315 {
9316 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9317 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9318
9319 /* We can't just pass T to eval_subst in case cval1 or cval2
9320 was the same as ARG1. */
9321
9322 tree high_result
9323 = fold_build2_loc (loc, code, type,
9324 eval_subst (loc, arg0, cval1, maxval,
9325 cval2, minval),
9326 arg1);
9327 tree equal_result
9328 = fold_build2_loc (loc, code, type,
9329 eval_subst (loc, arg0, cval1, maxval,
9330 cval2, maxval),
9331 arg1);
9332 tree low_result
9333 = fold_build2_loc (loc, code, type,
9334 eval_subst (loc, arg0, cval1, minval,
9335 cval2, maxval),
9336 arg1);
9337
9338 /* All three of these results should be 0 or 1. Confirm they are.
9339 Then use those values to select the proper code to use. */
9340
9341 if (TREE_CODE (high_result) == INTEGER_CST
9342 && TREE_CODE (equal_result) == INTEGER_CST
9343 && TREE_CODE (low_result) == INTEGER_CST)
9344 {
9345 /* Make a 3-bit mask with the high-order bit being the
9346 value for `>', the next for '=', and the low for '<'. */
9347 switch ((integer_onep (high_result) * 4)
9348 + (integer_onep (equal_result) * 2)
9349 + integer_onep (low_result))
9350 {
9351 case 0:
9352 /* Always false. */
9353 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9354 case 1:
9355 code = LT_EXPR;
9356 break;
9357 case 2:
9358 code = EQ_EXPR;
9359 break;
9360 case 3:
9361 code = LE_EXPR;
9362 break;
9363 case 4:
9364 code = GT_EXPR;
9365 break;
9366 case 5:
9367 code = NE_EXPR;
9368 break;
9369 case 6:
9370 code = GE_EXPR;
9371 break;
9372 case 7:
9373 /* Always true. */
9374 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9375 }
9376
9377 if (save_p)
9378 {
9379 tem = save_expr (build2 (code, type, cval1, cval2));
9380 SET_EXPR_LOCATION (tem, loc);
9381 return tem;
9382 }
9383 return fold_build2_loc (loc, code, type, cval1, cval2);
9384 }
9385 }
9386 }
9387
9388 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9389 into a single range test. */
9390 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9391 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9392 && TREE_CODE (arg1) == INTEGER_CST
9393 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9394 && !integer_zerop (TREE_OPERAND (arg0, 1))
9395 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9396 && !TREE_OVERFLOW (arg1))
9397 {
9398 tem = fold_div_compare (loc, code, type, arg0, arg1);
9399 if (tem != NULL_TREE)
9400 return tem;
9401 }
9402
9403 /* Fold ~X op ~Y as Y op X. */
9404 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9405 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9406 {
9407 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9408 return fold_build2_loc (loc, code, type,
9409 fold_convert_loc (loc, cmp_type,
9410 TREE_OPERAND (arg1, 0)),
9411 TREE_OPERAND (arg0, 0));
9412 }
9413
9414 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9415 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9416 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9417 {
9418 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9419 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9420 TREE_OPERAND (arg0, 0),
9421 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9422 fold_convert_loc (loc, cmp_type, arg1)));
9423 }
9424
9425 return NULL_TREE;
9426 }
9427
9428
9429 /* Subroutine of fold_binary. Optimize complex multiplications of the
9430 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9431 argument EXPR represents the expression "z" of type TYPE. */
9432
9433 static tree
9434 fold_mult_zconjz (location_t loc, tree type, tree expr)
9435 {
9436 tree itype = TREE_TYPE (type);
9437 tree rpart, ipart, tem;
9438
9439 if (TREE_CODE (expr) == COMPLEX_EXPR)
9440 {
9441 rpart = TREE_OPERAND (expr, 0);
9442 ipart = TREE_OPERAND (expr, 1);
9443 }
9444 else if (TREE_CODE (expr) == COMPLEX_CST)
9445 {
9446 rpart = TREE_REALPART (expr);
9447 ipart = TREE_IMAGPART (expr);
9448 }
9449 else
9450 {
9451 expr = save_expr (expr);
9452 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9453 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9454 }
9455
9456 rpart = save_expr (rpart);
9457 ipart = save_expr (ipart);
9458 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9459 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9460 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9461 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9462 build_zero_cst (itype));
9463 }
9464
9465
9466 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9467 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9468 guarantees that P and N have the same least significant log2(M) bits.
9469 N is not otherwise constrained. In particular, N is not normalized to
9470 0 <= N < M as is common. In general, the precise value of P is unknown.
9471 M is chosen as large as possible such that constant N can be determined.
9472
9473 Returns M and sets *RESIDUE to N.
9474
9475 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9476 account. This is not always possible due to PR 35705.
9477 */
9478
9479 static unsigned HOST_WIDE_INT
9480 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9481 bool allow_func_align)
9482 {
9483 enum tree_code code;
9484
9485 *residue = 0;
9486
9487 code = TREE_CODE (expr);
9488 if (code == ADDR_EXPR)
9489 {
9490 unsigned int bitalign;
9491 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9492 *residue /= BITS_PER_UNIT;
9493 return bitalign / BITS_PER_UNIT;
9494 }
9495 else if (code == POINTER_PLUS_EXPR)
9496 {
9497 tree op0, op1;
9498 unsigned HOST_WIDE_INT modulus;
9499 enum tree_code inner_code;
9500
9501 op0 = TREE_OPERAND (expr, 0);
9502 STRIP_NOPS (op0);
9503 modulus = get_pointer_modulus_and_residue (op0, residue,
9504 allow_func_align);
9505
9506 op1 = TREE_OPERAND (expr, 1);
9507 STRIP_NOPS (op1);
9508 inner_code = TREE_CODE (op1);
9509 if (inner_code == INTEGER_CST)
9510 {
9511 *residue += TREE_INT_CST_LOW (op1);
9512 return modulus;
9513 }
9514 else if (inner_code == MULT_EXPR)
9515 {
9516 op1 = TREE_OPERAND (op1, 1);
9517 if (TREE_CODE (op1) == INTEGER_CST)
9518 {
9519 unsigned HOST_WIDE_INT align;
9520
9521 /* Compute the greatest power-of-2 divisor of op1. */
9522 align = TREE_INT_CST_LOW (op1);
9523 align &= -align;
9524
9525 /* If align is non-zero and less than *modulus, replace
9526 *modulus with align., If align is 0, then either op1 is 0
9527 or the greatest power-of-2 divisor of op1 doesn't fit in an
9528 unsigned HOST_WIDE_INT. In either case, no additional
9529 constraint is imposed. */
9530 if (align)
9531 modulus = MIN (modulus, align);
9532
9533 return modulus;
9534 }
9535 }
9536 }
9537
9538 /* If we get here, we were unable to determine anything useful about the
9539 expression. */
9540 return 1;
9541 }
9542
9543 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9544 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9545
9546 static bool
9547 vec_cst_ctor_to_array (tree arg, tree *elts)
9548 {
9549 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9550
9551 if (TREE_CODE (arg) == VECTOR_CST)
9552 {
9553 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9554 elts[i] = VECTOR_CST_ELT (arg, i);
9555 }
9556 else if (TREE_CODE (arg) == CONSTRUCTOR)
9557 {
9558 constructor_elt *elt;
9559
9560 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9561 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9562 return false;
9563 else
9564 elts[i] = elt->value;
9565 }
9566 else
9567 return false;
9568 for (; i < nelts; i++)
9569 elts[i]
9570 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9571 return true;
9572 }
9573
9574 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9575 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9576 NULL_TREE otherwise. */
9577
9578 static tree
9579 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9580 {
9581 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9582 tree *elts;
9583 bool need_ctor = false;
9584
9585 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9586 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9587 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9588 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9589 return NULL_TREE;
9590
9591 elts = XALLOCAVEC (tree, nelts * 3);
9592 if (!vec_cst_ctor_to_array (arg0, elts)
9593 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9594 return NULL_TREE;
9595
9596 for (i = 0; i < nelts; i++)
9597 {
9598 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9599 need_ctor = true;
9600 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9601 }
9602
9603 if (need_ctor)
9604 {
9605 vec<constructor_elt, va_gc> *v;
9606 vec_alloc (v, nelts);
9607 for (i = 0; i < nelts; i++)
9608 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9609 return build_constructor (type, v);
9610 }
9611 else
9612 return build_vector (type, &elts[2 * nelts]);
9613 }
9614
9615 /* Try to fold a pointer difference of type TYPE two address expressions of
9616 array references AREF0 and AREF1 using location LOC. Return a
9617 simplified expression for the difference or NULL_TREE. */
9618
9619 static tree
9620 fold_addr_of_array_ref_difference (location_t loc, tree type,
9621 tree aref0, tree aref1)
9622 {
9623 tree base0 = TREE_OPERAND (aref0, 0);
9624 tree base1 = TREE_OPERAND (aref1, 0);
9625 tree base_offset = build_int_cst (type, 0);
9626
9627 /* If the bases are array references as well, recurse. If the bases
9628 are pointer indirections compute the difference of the pointers.
9629 If the bases are equal, we are set. */
9630 if ((TREE_CODE (base0) == ARRAY_REF
9631 && TREE_CODE (base1) == ARRAY_REF
9632 && (base_offset
9633 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9634 || (INDIRECT_REF_P (base0)
9635 && INDIRECT_REF_P (base1)
9636 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9637 TREE_OPERAND (base0, 0),
9638 TREE_OPERAND (base1, 0))))
9639 || operand_equal_p (base0, base1, 0))
9640 {
9641 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9642 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9643 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9644 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9645 return fold_build2_loc (loc, PLUS_EXPR, type,
9646 base_offset,
9647 fold_build2_loc (loc, MULT_EXPR, type,
9648 diff, esz));
9649 }
9650 return NULL_TREE;
9651 }
9652
9653 /* If the real or vector real constant CST of type TYPE has an exact
9654 inverse, return it, else return NULL. */
9655
9656 tree
9657 exact_inverse (tree type, tree cst)
9658 {
9659 REAL_VALUE_TYPE r;
9660 tree unit_type, *elts;
9661 machine_mode mode;
9662 unsigned vec_nelts, i;
9663
9664 switch (TREE_CODE (cst))
9665 {
9666 case REAL_CST:
9667 r = TREE_REAL_CST (cst);
9668
9669 if (exact_real_inverse (TYPE_MODE (type), &r))
9670 return build_real (type, r);
9671
9672 return NULL_TREE;
9673
9674 case VECTOR_CST:
9675 vec_nelts = VECTOR_CST_NELTS (cst);
9676 elts = XALLOCAVEC (tree, vec_nelts);
9677 unit_type = TREE_TYPE (type);
9678 mode = TYPE_MODE (unit_type);
9679
9680 for (i = 0; i < vec_nelts; i++)
9681 {
9682 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9683 if (!exact_real_inverse (mode, &r))
9684 return NULL_TREE;
9685 elts[i] = build_real (unit_type, r);
9686 }
9687
9688 return build_vector (type, elts);
9689
9690 default:
9691 return NULL_TREE;
9692 }
9693 }
9694
9695 /* Mask out the tz least significant bits of X of type TYPE where
9696 tz is the number of trailing zeroes in Y. */
9697 static wide_int
9698 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9699 {
9700 int tz = wi::ctz (y);
9701 if (tz > 0)
9702 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9703 return x;
9704 }
9705
9706 /* Return true when T is an address and is known to be nonzero.
9707 For floating point we further ensure that T is not denormal.
9708 Similar logic is present in nonzero_address in rtlanal.h.
9709
9710 If the return value is based on the assumption that signed overflow
9711 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9712 change *STRICT_OVERFLOW_P. */
9713
9714 static bool
9715 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9716 {
9717 tree type = TREE_TYPE (t);
9718 enum tree_code code;
9719
9720 /* Doing something useful for floating point would need more work. */
9721 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9722 return false;
9723
9724 code = TREE_CODE (t);
9725 switch (TREE_CODE_CLASS (code))
9726 {
9727 case tcc_unary:
9728 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9729 strict_overflow_p);
9730 case tcc_binary:
9731 case tcc_comparison:
9732 return tree_binary_nonzero_warnv_p (code, type,
9733 TREE_OPERAND (t, 0),
9734 TREE_OPERAND (t, 1),
9735 strict_overflow_p);
9736 case tcc_constant:
9737 case tcc_declaration:
9738 case tcc_reference:
9739 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9740
9741 default:
9742 break;
9743 }
9744
9745 switch (code)
9746 {
9747 case TRUTH_NOT_EXPR:
9748 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9749 strict_overflow_p);
9750
9751 case TRUTH_AND_EXPR:
9752 case TRUTH_OR_EXPR:
9753 case TRUTH_XOR_EXPR:
9754 return tree_binary_nonzero_warnv_p (code, type,
9755 TREE_OPERAND (t, 0),
9756 TREE_OPERAND (t, 1),
9757 strict_overflow_p);
9758
9759 case COND_EXPR:
9760 case CONSTRUCTOR:
9761 case OBJ_TYPE_REF:
9762 case ASSERT_EXPR:
9763 case ADDR_EXPR:
9764 case WITH_SIZE_EXPR:
9765 case SSA_NAME:
9766 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9767
9768 case COMPOUND_EXPR:
9769 case MODIFY_EXPR:
9770 case BIND_EXPR:
9771 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9772 strict_overflow_p);
9773
9774 case SAVE_EXPR:
9775 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9776 strict_overflow_p);
9777
9778 case CALL_EXPR:
9779 {
9780 tree fndecl = get_callee_fndecl (t);
9781 if (!fndecl) return false;
9782 if (flag_delete_null_pointer_checks && !flag_check_new
9783 && DECL_IS_OPERATOR_NEW (fndecl)
9784 && !TREE_NOTHROW (fndecl))
9785 return true;
9786 if (flag_delete_null_pointer_checks
9787 && lookup_attribute ("returns_nonnull",
9788 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9789 return true;
9790 return alloca_call_p (t);
9791 }
9792
9793 default:
9794 break;
9795 }
9796 return false;
9797 }
9798
9799 /* Return true when T is an address and is known to be nonzero.
9800 Handle warnings about undefined signed overflow. */
9801
9802 static bool
9803 tree_expr_nonzero_p (tree t)
9804 {
9805 bool ret, strict_overflow_p;
9806
9807 strict_overflow_p = false;
9808 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9809 if (strict_overflow_p)
9810 fold_overflow_warning (("assuming signed overflow does not occur when "
9811 "determining that expression is always "
9812 "non-zero"),
9813 WARN_STRICT_OVERFLOW_MISC);
9814 return ret;
9815 }
9816
9817 /* Fold a binary expression of code CODE and type TYPE with operands
9818 OP0 and OP1. LOC is the location of the resulting expression.
9819 Return the folded expression if folding is successful. Otherwise,
9820 return NULL_TREE. */
9821
9822 tree
9823 fold_binary_loc (location_t loc,
9824 enum tree_code code, tree type, tree op0, tree op1)
9825 {
9826 enum tree_code_class kind = TREE_CODE_CLASS (code);
9827 tree arg0, arg1, tem;
9828 tree t1 = NULL_TREE;
9829 bool strict_overflow_p;
9830 unsigned int prec;
9831
9832 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9833 && TREE_CODE_LENGTH (code) == 2
9834 && op0 != NULL_TREE
9835 && op1 != NULL_TREE);
9836
9837 arg0 = op0;
9838 arg1 = op1;
9839
9840 /* Strip any conversions that don't change the mode. This is
9841 safe for every expression, except for a comparison expression
9842 because its signedness is derived from its operands. So, in
9843 the latter case, only strip conversions that don't change the
9844 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9845 preserved.
9846
9847 Note that this is done as an internal manipulation within the
9848 constant folder, in order to find the simplest representation
9849 of the arguments so that their form can be studied. In any
9850 cases, the appropriate type conversions should be put back in
9851 the tree that will get out of the constant folder. */
9852
9853 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9854 {
9855 STRIP_SIGN_NOPS (arg0);
9856 STRIP_SIGN_NOPS (arg1);
9857 }
9858 else
9859 {
9860 STRIP_NOPS (arg0);
9861 STRIP_NOPS (arg1);
9862 }
9863
9864 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9865 constant but we can't do arithmetic on them. */
9866 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9867 {
9868 tem = const_binop (code, type, arg0, arg1);
9869 if (tem != NULL_TREE)
9870 {
9871 if (TREE_TYPE (tem) != type)
9872 tem = fold_convert_loc (loc, type, tem);
9873 return tem;
9874 }
9875 }
9876
9877 /* If this is a commutative operation, and ARG0 is a constant, move it
9878 to ARG1 to reduce the number of tests below. */
9879 if (commutative_tree_code (code)
9880 && tree_swap_operands_p (arg0, arg1, true))
9881 return fold_build2_loc (loc, code, type, op1, op0);
9882
9883 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9884 to ARG1 to reduce the number of tests below. */
9885 if (kind == tcc_comparison
9886 && tree_swap_operands_p (arg0, arg1, true))
9887 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9888
9889 tem = generic_simplify (loc, code, type, op0, op1);
9890 if (tem)
9891 return tem;
9892
9893 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9894
9895 First check for cases where an arithmetic operation is applied to a
9896 compound, conditional, or comparison operation. Push the arithmetic
9897 operation inside the compound or conditional to see if any folding
9898 can then be done. Convert comparison to conditional for this purpose.
9899 The also optimizes non-constant cases that used to be done in
9900 expand_expr.
9901
9902 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9903 one of the operands is a comparison and the other is a comparison, a
9904 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9905 code below would make the expression more complex. Change it to a
9906 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9907 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9908
9909 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9910 || code == EQ_EXPR || code == NE_EXPR)
9911 && TREE_CODE (type) != VECTOR_TYPE
9912 && ((truth_value_p (TREE_CODE (arg0))
9913 && (truth_value_p (TREE_CODE (arg1))
9914 || (TREE_CODE (arg1) == BIT_AND_EXPR
9915 && integer_onep (TREE_OPERAND (arg1, 1)))))
9916 || (truth_value_p (TREE_CODE (arg1))
9917 && (truth_value_p (TREE_CODE (arg0))
9918 || (TREE_CODE (arg0) == BIT_AND_EXPR
9919 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9920 {
9921 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9922 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9923 : TRUTH_XOR_EXPR,
9924 boolean_type_node,
9925 fold_convert_loc (loc, boolean_type_node, arg0),
9926 fold_convert_loc (loc, boolean_type_node, arg1));
9927
9928 if (code == EQ_EXPR)
9929 tem = invert_truthvalue_loc (loc, tem);
9930
9931 return fold_convert_loc (loc, type, tem);
9932 }
9933
9934 if (TREE_CODE_CLASS (code) == tcc_binary
9935 || TREE_CODE_CLASS (code) == tcc_comparison)
9936 {
9937 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9938 {
9939 tem = fold_build2_loc (loc, code, type,
9940 fold_convert_loc (loc, TREE_TYPE (op0),
9941 TREE_OPERAND (arg0, 1)), op1);
9942 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9943 tem);
9944 }
9945 if (TREE_CODE (arg1) == COMPOUND_EXPR
9946 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9947 {
9948 tem = fold_build2_loc (loc, code, type, op0,
9949 fold_convert_loc (loc, TREE_TYPE (op1),
9950 TREE_OPERAND (arg1, 1)));
9951 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9952 tem);
9953 }
9954
9955 if (TREE_CODE (arg0) == COND_EXPR
9956 || TREE_CODE (arg0) == VEC_COND_EXPR
9957 || COMPARISON_CLASS_P (arg0))
9958 {
9959 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9960 arg0, arg1,
9961 /*cond_first_p=*/1);
9962 if (tem != NULL_TREE)
9963 return tem;
9964 }
9965
9966 if (TREE_CODE (arg1) == COND_EXPR
9967 || TREE_CODE (arg1) == VEC_COND_EXPR
9968 || COMPARISON_CLASS_P (arg1))
9969 {
9970 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9971 arg1, arg0,
9972 /*cond_first_p=*/0);
9973 if (tem != NULL_TREE)
9974 return tem;
9975 }
9976 }
9977
9978 switch (code)
9979 {
9980 case MEM_REF:
9981 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9982 if (TREE_CODE (arg0) == ADDR_EXPR
9983 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9984 {
9985 tree iref = TREE_OPERAND (arg0, 0);
9986 return fold_build2 (MEM_REF, type,
9987 TREE_OPERAND (iref, 0),
9988 int_const_binop (PLUS_EXPR, arg1,
9989 TREE_OPERAND (iref, 1)));
9990 }
9991
9992 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9993 if (TREE_CODE (arg0) == ADDR_EXPR
9994 && handled_component_p (TREE_OPERAND (arg0, 0)))
9995 {
9996 tree base;
9997 HOST_WIDE_INT coffset;
9998 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9999 &coffset);
10000 if (!base)
10001 return NULL_TREE;
10002 return fold_build2 (MEM_REF, type,
10003 build_fold_addr_expr (base),
10004 int_const_binop (PLUS_EXPR, arg1,
10005 size_int (coffset)));
10006 }
10007
10008 return NULL_TREE;
10009
10010 case POINTER_PLUS_EXPR:
10011 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10012 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10013 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10014 return fold_convert_loc (loc, type,
10015 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10016 fold_convert_loc (loc, sizetype,
10017 arg1),
10018 fold_convert_loc (loc, sizetype,
10019 arg0)));
10020
10021 return NULL_TREE;
10022
10023 case PLUS_EXPR:
10024 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10025 {
10026 /* X + (X / CST) * -CST is X % CST. */
10027 if (TREE_CODE (arg1) == MULT_EXPR
10028 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10029 && operand_equal_p (arg0,
10030 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10031 {
10032 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10033 tree cst1 = TREE_OPERAND (arg1, 1);
10034 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10035 cst1, cst0);
10036 if (sum && integer_zerop (sum))
10037 return fold_convert_loc (loc, type,
10038 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10039 TREE_TYPE (arg0), arg0,
10040 cst0));
10041 }
10042 }
10043
10044 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10045 one. Make sure the type is not saturating and has the signedness of
10046 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10047 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10048 if ((TREE_CODE (arg0) == MULT_EXPR
10049 || TREE_CODE (arg1) == MULT_EXPR)
10050 && !TYPE_SATURATING (type)
10051 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10052 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10053 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10054 {
10055 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10056 if (tem)
10057 return tem;
10058 }
10059
10060 if (! FLOAT_TYPE_P (type))
10061 {
10062 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10063 with a constant, and the two constants have no bits in common,
10064 we should treat this as a BIT_IOR_EXPR since this may produce more
10065 simplifications. */
10066 if (TREE_CODE (arg0) == BIT_AND_EXPR
10067 && TREE_CODE (arg1) == BIT_AND_EXPR
10068 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10069 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10070 && wi::bit_and (TREE_OPERAND (arg0, 1),
10071 TREE_OPERAND (arg1, 1)) == 0)
10072 {
10073 code = BIT_IOR_EXPR;
10074 goto bit_ior;
10075 }
10076
10077 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10078 (plus (plus (mult) (mult)) (foo)) so that we can
10079 take advantage of the factoring cases below. */
10080 if (TYPE_OVERFLOW_WRAPS (type)
10081 && (((TREE_CODE (arg0) == PLUS_EXPR
10082 || TREE_CODE (arg0) == MINUS_EXPR)
10083 && TREE_CODE (arg1) == MULT_EXPR)
10084 || ((TREE_CODE (arg1) == PLUS_EXPR
10085 || TREE_CODE (arg1) == MINUS_EXPR)
10086 && TREE_CODE (arg0) == MULT_EXPR)))
10087 {
10088 tree parg0, parg1, parg, marg;
10089 enum tree_code pcode;
10090
10091 if (TREE_CODE (arg1) == MULT_EXPR)
10092 parg = arg0, marg = arg1;
10093 else
10094 parg = arg1, marg = arg0;
10095 pcode = TREE_CODE (parg);
10096 parg0 = TREE_OPERAND (parg, 0);
10097 parg1 = TREE_OPERAND (parg, 1);
10098 STRIP_NOPS (parg0);
10099 STRIP_NOPS (parg1);
10100
10101 if (TREE_CODE (parg0) == MULT_EXPR
10102 && TREE_CODE (parg1) != MULT_EXPR)
10103 return fold_build2_loc (loc, pcode, type,
10104 fold_build2_loc (loc, PLUS_EXPR, type,
10105 fold_convert_loc (loc, type,
10106 parg0),
10107 fold_convert_loc (loc, type,
10108 marg)),
10109 fold_convert_loc (loc, type, parg1));
10110 if (TREE_CODE (parg0) != MULT_EXPR
10111 && TREE_CODE (parg1) == MULT_EXPR)
10112 return
10113 fold_build2_loc (loc, PLUS_EXPR, type,
10114 fold_convert_loc (loc, type, parg0),
10115 fold_build2_loc (loc, pcode, type,
10116 fold_convert_loc (loc, type, marg),
10117 fold_convert_loc (loc, type,
10118 parg1)));
10119 }
10120 }
10121 else
10122 {
10123 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10124 to __complex__ ( x, y ). This is not the same for SNaNs or
10125 if signed zeros are involved. */
10126 if (!HONOR_SNANS (element_mode (arg0))
10127 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10128 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10129 {
10130 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10131 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10132 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10133 bool arg0rz = false, arg0iz = false;
10134 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10135 || (arg0i && (arg0iz = real_zerop (arg0i))))
10136 {
10137 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10138 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10139 if (arg0rz && arg1i && real_zerop (arg1i))
10140 {
10141 tree rp = arg1r ? arg1r
10142 : build1 (REALPART_EXPR, rtype, arg1);
10143 tree ip = arg0i ? arg0i
10144 : build1 (IMAGPART_EXPR, rtype, arg0);
10145 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10146 }
10147 else if (arg0iz && arg1r && real_zerop (arg1r))
10148 {
10149 tree rp = arg0r ? arg0r
10150 : build1 (REALPART_EXPR, rtype, arg0);
10151 tree ip = arg1i ? arg1i
10152 : build1 (IMAGPART_EXPR, rtype, arg1);
10153 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10154 }
10155 }
10156 }
10157
10158 if (flag_unsafe_math_optimizations
10159 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10160 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10161 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10162 return tem;
10163
10164 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10165 We associate floats only if the user has specified
10166 -fassociative-math. */
10167 if (flag_associative_math
10168 && TREE_CODE (arg1) == PLUS_EXPR
10169 && TREE_CODE (arg0) != MULT_EXPR)
10170 {
10171 tree tree10 = TREE_OPERAND (arg1, 0);
10172 tree tree11 = TREE_OPERAND (arg1, 1);
10173 if (TREE_CODE (tree11) == MULT_EXPR
10174 && TREE_CODE (tree10) == MULT_EXPR)
10175 {
10176 tree tree0;
10177 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10178 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10179 }
10180 }
10181 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10182 We associate floats only if the user has specified
10183 -fassociative-math. */
10184 if (flag_associative_math
10185 && TREE_CODE (arg0) == PLUS_EXPR
10186 && TREE_CODE (arg1) != MULT_EXPR)
10187 {
10188 tree tree00 = TREE_OPERAND (arg0, 0);
10189 tree tree01 = TREE_OPERAND (arg0, 1);
10190 if (TREE_CODE (tree01) == MULT_EXPR
10191 && TREE_CODE (tree00) == MULT_EXPR)
10192 {
10193 tree tree0;
10194 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10195 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10196 }
10197 }
10198 }
10199
10200 bit_rotate:
10201 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10202 is a rotate of A by C1 bits. */
10203 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10204 is a rotate of A by B bits. */
10205 {
10206 enum tree_code code0, code1;
10207 tree rtype;
10208 code0 = TREE_CODE (arg0);
10209 code1 = TREE_CODE (arg1);
10210 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10211 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10212 && operand_equal_p (TREE_OPERAND (arg0, 0),
10213 TREE_OPERAND (arg1, 0), 0)
10214 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10215 TYPE_UNSIGNED (rtype))
10216 /* Only create rotates in complete modes. Other cases are not
10217 expanded properly. */
10218 && (element_precision (rtype)
10219 == element_precision (TYPE_MODE (rtype))))
10220 {
10221 tree tree01, tree11;
10222 enum tree_code code01, code11;
10223
10224 tree01 = TREE_OPERAND (arg0, 1);
10225 tree11 = TREE_OPERAND (arg1, 1);
10226 STRIP_NOPS (tree01);
10227 STRIP_NOPS (tree11);
10228 code01 = TREE_CODE (tree01);
10229 code11 = TREE_CODE (tree11);
10230 if (code01 == INTEGER_CST
10231 && code11 == INTEGER_CST
10232 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10233 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10234 {
10235 tem = build2_loc (loc, LROTATE_EXPR,
10236 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10237 TREE_OPERAND (arg0, 0),
10238 code0 == LSHIFT_EXPR ? tree01 : tree11);
10239 return fold_convert_loc (loc, type, tem);
10240 }
10241 else if (code11 == MINUS_EXPR)
10242 {
10243 tree tree110, tree111;
10244 tree110 = TREE_OPERAND (tree11, 0);
10245 tree111 = TREE_OPERAND (tree11, 1);
10246 STRIP_NOPS (tree110);
10247 STRIP_NOPS (tree111);
10248 if (TREE_CODE (tree110) == INTEGER_CST
10249 && 0 == compare_tree_int (tree110,
10250 element_precision
10251 (TREE_TYPE (TREE_OPERAND
10252 (arg0, 0))))
10253 && operand_equal_p (tree01, tree111, 0))
10254 return
10255 fold_convert_loc (loc, type,
10256 build2 ((code0 == LSHIFT_EXPR
10257 ? LROTATE_EXPR
10258 : RROTATE_EXPR),
10259 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10260 TREE_OPERAND (arg0, 0), tree01));
10261 }
10262 else if (code01 == MINUS_EXPR)
10263 {
10264 tree tree010, tree011;
10265 tree010 = TREE_OPERAND (tree01, 0);
10266 tree011 = TREE_OPERAND (tree01, 1);
10267 STRIP_NOPS (tree010);
10268 STRIP_NOPS (tree011);
10269 if (TREE_CODE (tree010) == INTEGER_CST
10270 && 0 == compare_tree_int (tree010,
10271 element_precision
10272 (TREE_TYPE (TREE_OPERAND
10273 (arg0, 0))))
10274 && operand_equal_p (tree11, tree011, 0))
10275 return fold_convert_loc
10276 (loc, type,
10277 build2 ((code0 != LSHIFT_EXPR
10278 ? LROTATE_EXPR
10279 : RROTATE_EXPR),
10280 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10281 TREE_OPERAND (arg0, 0), tree11));
10282 }
10283 }
10284 }
10285
10286 associate:
10287 /* In most languages, can't associate operations on floats through
10288 parentheses. Rather than remember where the parentheses were, we
10289 don't associate floats at all, unless the user has specified
10290 -fassociative-math.
10291 And, we need to make sure type is not saturating. */
10292
10293 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10294 && !TYPE_SATURATING (type))
10295 {
10296 tree var0, con0, lit0, minus_lit0;
10297 tree var1, con1, lit1, minus_lit1;
10298 tree atype = type;
10299 bool ok = true;
10300
10301 /* Split both trees into variables, constants, and literals. Then
10302 associate each group together, the constants with literals,
10303 then the result with variables. This increases the chances of
10304 literals being recombined later and of generating relocatable
10305 expressions for the sum of a constant and literal. */
10306 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10307 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10308 code == MINUS_EXPR);
10309
10310 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10311 if (code == MINUS_EXPR)
10312 code = PLUS_EXPR;
10313
10314 /* With undefined overflow prefer doing association in a type
10315 which wraps on overflow, if that is one of the operand types. */
10316 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10317 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10318 {
10319 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10320 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10321 atype = TREE_TYPE (arg0);
10322 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10323 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10324 atype = TREE_TYPE (arg1);
10325 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10326 }
10327
10328 /* With undefined overflow we can only associate constants with one
10329 variable, and constants whose association doesn't overflow. */
10330 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10331 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10332 {
10333 if (var0 && var1)
10334 {
10335 tree tmp0 = var0;
10336 tree tmp1 = var1;
10337
10338 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10339 tmp0 = TREE_OPERAND (tmp0, 0);
10340 if (CONVERT_EXPR_P (tmp0)
10341 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10342 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10343 <= TYPE_PRECISION (atype)))
10344 tmp0 = TREE_OPERAND (tmp0, 0);
10345 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10346 tmp1 = TREE_OPERAND (tmp1, 0);
10347 if (CONVERT_EXPR_P (tmp1)
10348 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10349 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10350 <= TYPE_PRECISION (atype)))
10351 tmp1 = TREE_OPERAND (tmp1, 0);
10352 /* The only case we can still associate with two variables
10353 is if they are the same, modulo negation and bit-pattern
10354 preserving conversions. */
10355 if (!operand_equal_p (tmp0, tmp1, 0))
10356 ok = false;
10357 }
10358 }
10359
10360 /* Only do something if we found more than two objects. Otherwise,
10361 nothing has changed and we risk infinite recursion. */
10362 if (ok
10363 && (2 < ((var0 != 0) + (var1 != 0)
10364 + (con0 != 0) + (con1 != 0)
10365 + (lit0 != 0) + (lit1 != 0)
10366 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10367 {
10368 bool any_overflows = false;
10369 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10370 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10371 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10372 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10373 var0 = associate_trees (loc, var0, var1, code, atype);
10374 con0 = associate_trees (loc, con0, con1, code, atype);
10375 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10376 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10377 code, atype);
10378
10379 /* Preserve the MINUS_EXPR if the negative part of the literal is
10380 greater than the positive part. Otherwise, the multiplicative
10381 folding code (i.e extract_muldiv) may be fooled in case
10382 unsigned constants are subtracted, like in the following
10383 example: ((X*2 + 4) - 8U)/2. */
10384 if (minus_lit0 && lit0)
10385 {
10386 if (TREE_CODE (lit0) == INTEGER_CST
10387 && TREE_CODE (minus_lit0) == INTEGER_CST
10388 && tree_int_cst_lt (lit0, minus_lit0))
10389 {
10390 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10391 MINUS_EXPR, atype);
10392 lit0 = 0;
10393 }
10394 else
10395 {
10396 lit0 = associate_trees (loc, lit0, minus_lit0,
10397 MINUS_EXPR, atype);
10398 minus_lit0 = 0;
10399 }
10400 }
10401
10402 /* Don't introduce overflows through reassociation. */
10403 if (!any_overflows
10404 && ((lit0 && TREE_OVERFLOW (lit0))
10405 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10406 return NULL_TREE;
10407
10408 if (minus_lit0)
10409 {
10410 if (con0 == 0)
10411 return
10412 fold_convert_loc (loc, type,
10413 associate_trees (loc, var0, minus_lit0,
10414 MINUS_EXPR, atype));
10415 else
10416 {
10417 con0 = associate_trees (loc, con0, minus_lit0,
10418 MINUS_EXPR, atype);
10419 return
10420 fold_convert_loc (loc, type,
10421 associate_trees (loc, var0, con0,
10422 PLUS_EXPR, atype));
10423 }
10424 }
10425
10426 con0 = associate_trees (loc, con0, lit0, code, atype);
10427 return
10428 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10429 code, atype));
10430 }
10431 }
10432
10433 return NULL_TREE;
10434
10435 case MINUS_EXPR:
10436 /* Pointer simplifications for subtraction, simple reassociations. */
10437 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10438 {
10439 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10440 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10441 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10442 {
10443 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10444 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10445 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10446 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10447 return fold_build2_loc (loc, PLUS_EXPR, type,
10448 fold_build2_loc (loc, MINUS_EXPR, type,
10449 arg00, arg10),
10450 fold_build2_loc (loc, MINUS_EXPR, type,
10451 arg01, arg11));
10452 }
10453 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10454 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10455 {
10456 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10457 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10458 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10459 fold_convert_loc (loc, type, arg1));
10460 if (tmp)
10461 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10462 }
10463 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10464 simplifies. */
10465 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10466 {
10467 tree arg10 = fold_convert_loc (loc, type,
10468 TREE_OPERAND (arg1, 0));
10469 tree arg11 = fold_convert_loc (loc, type,
10470 TREE_OPERAND (arg1, 1));
10471 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10472 fold_convert_loc (loc, type, arg0),
10473 arg10);
10474 if (tmp)
10475 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10476 }
10477 }
10478 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10479 if (TREE_CODE (arg0) == NEGATE_EXPR
10480 && negate_expr_p (arg1)
10481 && reorder_operands_p (arg0, arg1))
10482 return fold_build2_loc (loc, MINUS_EXPR, type,
10483 fold_convert_loc (loc, type,
10484 negate_expr (arg1)),
10485 fold_convert_loc (loc, type,
10486 TREE_OPERAND (arg0, 0)));
10487
10488 /* X - (X / Y) * Y is X % Y. */
10489 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10490 && TREE_CODE (arg1) == MULT_EXPR
10491 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10492 && operand_equal_p (arg0,
10493 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10494 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10495 TREE_OPERAND (arg1, 1), 0))
10496 return
10497 fold_convert_loc (loc, type,
10498 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10499 arg0, TREE_OPERAND (arg1, 1)));
10500
10501 if (! FLOAT_TYPE_P (type))
10502 {
10503 /* Fold A - (A & B) into ~B & A. */
10504 if (!TREE_SIDE_EFFECTS (arg0)
10505 && TREE_CODE (arg1) == BIT_AND_EXPR)
10506 {
10507 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10508 {
10509 tree arg10 = fold_convert_loc (loc, type,
10510 TREE_OPERAND (arg1, 0));
10511 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10512 fold_build1_loc (loc, BIT_NOT_EXPR,
10513 type, arg10),
10514 fold_convert_loc (loc, type, arg0));
10515 }
10516 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10517 {
10518 tree arg11 = fold_convert_loc (loc,
10519 type, TREE_OPERAND (arg1, 1));
10520 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10521 fold_build1_loc (loc, BIT_NOT_EXPR,
10522 type, arg11),
10523 fold_convert_loc (loc, type, arg0));
10524 }
10525 }
10526
10527 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10528 any power of 2 minus 1. */
10529 if (TREE_CODE (arg0) == BIT_AND_EXPR
10530 && TREE_CODE (arg1) == BIT_AND_EXPR
10531 && operand_equal_p (TREE_OPERAND (arg0, 0),
10532 TREE_OPERAND (arg1, 0), 0))
10533 {
10534 tree mask0 = TREE_OPERAND (arg0, 1);
10535 tree mask1 = TREE_OPERAND (arg1, 1);
10536 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10537
10538 if (operand_equal_p (tem, mask1, 0))
10539 {
10540 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10541 TREE_OPERAND (arg0, 0), mask1);
10542 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10543 }
10544 }
10545 }
10546
10547 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10548 __complex__ ( x, -y ). This is not the same for SNaNs or if
10549 signed zeros are involved. */
10550 if (!HONOR_SNANS (element_mode (arg0))
10551 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10552 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10553 {
10554 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10555 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10556 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10557 bool arg0rz = false, arg0iz = false;
10558 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10559 || (arg0i && (arg0iz = real_zerop (arg0i))))
10560 {
10561 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10562 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10563 if (arg0rz && arg1i && real_zerop (arg1i))
10564 {
10565 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10566 arg1r ? arg1r
10567 : build1 (REALPART_EXPR, rtype, arg1));
10568 tree ip = arg0i ? arg0i
10569 : build1 (IMAGPART_EXPR, rtype, arg0);
10570 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10571 }
10572 else if (arg0iz && arg1r && real_zerop (arg1r))
10573 {
10574 tree rp = arg0r ? arg0r
10575 : build1 (REALPART_EXPR, rtype, arg0);
10576 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10577 arg1i ? arg1i
10578 : build1 (IMAGPART_EXPR, rtype, arg1));
10579 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10580 }
10581 }
10582 }
10583
10584 /* A - B -> A + (-B) if B is easily negatable. */
10585 if (negate_expr_p (arg1)
10586 && !TYPE_OVERFLOW_SANITIZED (type)
10587 && ((FLOAT_TYPE_P (type)
10588 /* Avoid this transformation if B is a positive REAL_CST. */
10589 && (TREE_CODE (arg1) != REAL_CST
10590 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10591 || INTEGRAL_TYPE_P (type)))
10592 return fold_build2_loc (loc, PLUS_EXPR, type,
10593 fold_convert_loc (loc, type, arg0),
10594 fold_convert_loc (loc, type,
10595 negate_expr (arg1)));
10596
10597 /* Try folding difference of addresses. */
10598 {
10599 HOST_WIDE_INT diff;
10600
10601 if ((TREE_CODE (arg0) == ADDR_EXPR
10602 || TREE_CODE (arg1) == ADDR_EXPR)
10603 && ptr_difference_const (arg0, arg1, &diff))
10604 return build_int_cst_type (type, diff);
10605 }
10606
10607 /* Fold &a[i] - &a[j] to i-j. */
10608 if (TREE_CODE (arg0) == ADDR_EXPR
10609 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10610 && TREE_CODE (arg1) == ADDR_EXPR
10611 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10612 {
10613 tree tem = fold_addr_of_array_ref_difference (loc, type,
10614 TREE_OPERAND (arg0, 0),
10615 TREE_OPERAND (arg1, 0));
10616 if (tem)
10617 return tem;
10618 }
10619
10620 if (FLOAT_TYPE_P (type)
10621 && flag_unsafe_math_optimizations
10622 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10623 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10624 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10625 return tem;
10626
10627 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10628 one. Make sure the type is not saturating and has the signedness of
10629 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10630 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10631 if ((TREE_CODE (arg0) == MULT_EXPR
10632 || TREE_CODE (arg1) == MULT_EXPR)
10633 && !TYPE_SATURATING (type)
10634 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10635 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10636 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10637 {
10638 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10639 if (tem)
10640 return tem;
10641 }
10642
10643 goto associate;
10644
10645 case MULT_EXPR:
10646 /* (-A) * (-B) -> A * B */
10647 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10648 return fold_build2_loc (loc, MULT_EXPR, type,
10649 fold_convert_loc (loc, type,
10650 TREE_OPERAND (arg0, 0)),
10651 fold_convert_loc (loc, type,
10652 negate_expr (arg1)));
10653 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10654 return fold_build2_loc (loc, MULT_EXPR, type,
10655 fold_convert_loc (loc, type,
10656 negate_expr (arg0)),
10657 fold_convert_loc (loc, type,
10658 TREE_OPERAND (arg1, 0)));
10659
10660 if (! FLOAT_TYPE_P (type))
10661 {
10662 /* Transform x * -C into -x * C if x is easily negatable. */
10663 if (TREE_CODE (arg1) == INTEGER_CST
10664 && tree_int_cst_sgn (arg1) == -1
10665 && negate_expr_p (arg0)
10666 && (tem = negate_expr (arg1)) != arg1
10667 && !TREE_OVERFLOW (tem))
10668 return fold_build2_loc (loc, MULT_EXPR, type,
10669 fold_convert_loc (loc, type,
10670 negate_expr (arg0)),
10671 tem);
10672
10673 /* (a * (1 << b)) is (a << b) */
10674 if (TREE_CODE (arg1) == LSHIFT_EXPR
10675 && integer_onep (TREE_OPERAND (arg1, 0)))
10676 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10677 TREE_OPERAND (arg1, 1));
10678 if (TREE_CODE (arg0) == LSHIFT_EXPR
10679 && integer_onep (TREE_OPERAND (arg0, 0)))
10680 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10681 TREE_OPERAND (arg0, 1));
10682
10683 /* (A + A) * C -> A * 2 * C */
10684 if (TREE_CODE (arg0) == PLUS_EXPR
10685 && TREE_CODE (arg1) == INTEGER_CST
10686 && operand_equal_p (TREE_OPERAND (arg0, 0),
10687 TREE_OPERAND (arg0, 1), 0))
10688 return fold_build2_loc (loc, MULT_EXPR, type,
10689 omit_one_operand_loc (loc, type,
10690 TREE_OPERAND (arg0, 0),
10691 TREE_OPERAND (arg0, 1)),
10692 fold_build2_loc (loc, MULT_EXPR, type,
10693 build_int_cst (type, 2) , arg1));
10694
10695 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10696 sign-changing only. */
10697 if (TREE_CODE (arg1) == INTEGER_CST
10698 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10699 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10700 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10701
10702 strict_overflow_p = false;
10703 if (TREE_CODE (arg1) == INTEGER_CST
10704 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10705 &strict_overflow_p)))
10706 {
10707 if (strict_overflow_p)
10708 fold_overflow_warning (("assuming signed overflow does not "
10709 "occur when simplifying "
10710 "multiplication"),
10711 WARN_STRICT_OVERFLOW_MISC);
10712 return fold_convert_loc (loc, type, tem);
10713 }
10714
10715 /* Optimize z * conj(z) for integer complex numbers. */
10716 if (TREE_CODE (arg0) == CONJ_EXPR
10717 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10718 return fold_mult_zconjz (loc, type, arg1);
10719 if (TREE_CODE (arg1) == CONJ_EXPR
10720 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10721 return fold_mult_zconjz (loc, type, arg0);
10722 }
10723 else
10724 {
10725 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10726 the result for floating point types due to rounding so it is applied
10727 only if -fassociative-math was specify. */
10728 if (flag_associative_math
10729 && TREE_CODE (arg0) == RDIV_EXPR
10730 && TREE_CODE (arg1) == REAL_CST
10731 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10732 {
10733 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10734 arg1);
10735 if (tem)
10736 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10737 TREE_OPERAND (arg0, 1));
10738 }
10739
10740 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10741 if (operand_equal_p (arg0, arg1, 0))
10742 {
10743 tree tem = fold_strip_sign_ops (arg0);
10744 if (tem != NULL_TREE)
10745 {
10746 tem = fold_convert_loc (loc, type, tem);
10747 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10748 }
10749 }
10750
10751 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10752 This is not the same for NaNs or if signed zeros are
10753 involved. */
10754 if (!HONOR_NANS (element_mode (arg0))
10755 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10756 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10757 && TREE_CODE (arg1) == COMPLEX_CST
10758 && real_zerop (TREE_REALPART (arg1)))
10759 {
10760 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10761 if (real_onep (TREE_IMAGPART (arg1)))
10762 return
10763 fold_build2_loc (loc, COMPLEX_EXPR, type,
10764 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10765 rtype, arg0)),
10766 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10767 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10768 return
10769 fold_build2_loc (loc, COMPLEX_EXPR, type,
10770 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10771 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10772 rtype, arg0)));
10773 }
10774
10775 /* Optimize z * conj(z) for floating point complex numbers.
10776 Guarded by flag_unsafe_math_optimizations as non-finite
10777 imaginary components don't produce scalar results. */
10778 if (flag_unsafe_math_optimizations
10779 && TREE_CODE (arg0) == CONJ_EXPR
10780 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10781 return fold_mult_zconjz (loc, type, arg1);
10782 if (flag_unsafe_math_optimizations
10783 && TREE_CODE (arg1) == CONJ_EXPR
10784 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10785 return fold_mult_zconjz (loc, type, arg0);
10786
10787 if (flag_unsafe_math_optimizations)
10788 {
10789 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10790 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10791
10792 /* Optimizations of root(...)*root(...). */
10793 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10794 {
10795 tree rootfn, arg;
10796 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10797 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10798
10799 /* Optimize sqrt(x)*sqrt(x) as x. */
10800 if (BUILTIN_SQRT_P (fcode0)
10801 && operand_equal_p (arg00, arg10, 0)
10802 && ! HONOR_SNANS (element_mode (type)))
10803 return arg00;
10804
10805 /* Optimize root(x)*root(y) as root(x*y). */
10806 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10807 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10808 return build_call_expr_loc (loc, rootfn, 1, arg);
10809 }
10810
10811 /* Optimize expN(x)*expN(y) as expN(x+y). */
10812 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10813 {
10814 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10815 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10816 CALL_EXPR_ARG (arg0, 0),
10817 CALL_EXPR_ARG (arg1, 0));
10818 return build_call_expr_loc (loc, expfn, 1, arg);
10819 }
10820
10821 /* Optimizations of pow(...)*pow(...). */
10822 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10823 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10824 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10825 {
10826 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10827 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10828 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10829 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10830
10831 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10832 if (operand_equal_p (arg01, arg11, 0))
10833 {
10834 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10835 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10836 arg00, arg10);
10837 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10838 }
10839
10840 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10841 if (operand_equal_p (arg00, arg10, 0))
10842 {
10843 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10844 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10845 arg01, arg11);
10846 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10847 }
10848 }
10849
10850 /* Optimize tan(x)*cos(x) as sin(x). */
10851 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10852 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10853 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10854 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10855 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10856 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10857 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10858 CALL_EXPR_ARG (arg1, 0), 0))
10859 {
10860 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10861
10862 if (sinfn != NULL_TREE)
10863 return build_call_expr_loc (loc, sinfn, 1,
10864 CALL_EXPR_ARG (arg0, 0));
10865 }
10866
10867 /* Optimize x*pow(x,c) as pow(x,c+1). */
10868 if (fcode1 == BUILT_IN_POW
10869 || fcode1 == BUILT_IN_POWF
10870 || fcode1 == BUILT_IN_POWL)
10871 {
10872 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10873 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10874 if (TREE_CODE (arg11) == REAL_CST
10875 && !TREE_OVERFLOW (arg11)
10876 && operand_equal_p (arg0, arg10, 0))
10877 {
10878 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10879 REAL_VALUE_TYPE c;
10880 tree arg;
10881
10882 c = TREE_REAL_CST (arg11);
10883 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10884 arg = build_real (type, c);
10885 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10886 }
10887 }
10888
10889 /* Optimize pow(x,c)*x as pow(x,c+1). */
10890 if (fcode0 == BUILT_IN_POW
10891 || fcode0 == BUILT_IN_POWF
10892 || fcode0 == BUILT_IN_POWL)
10893 {
10894 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10895 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10896 if (TREE_CODE (arg01) == REAL_CST
10897 && !TREE_OVERFLOW (arg01)
10898 && operand_equal_p (arg1, arg00, 0))
10899 {
10900 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10901 REAL_VALUE_TYPE c;
10902 tree arg;
10903
10904 c = TREE_REAL_CST (arg01);
10905 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10906 arg = build_real (type, c);
10907 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10908 }
10909 }
10910
10911 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10912 if (!in_gimple_form
10913 && optimize
10914 && operand_equal_p (arg0, arg1, 0))
10915 {
10916 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10917
10918 if (powfn)
10919 {
10920 tree arg = build_real (type, dconst2);
10921 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10922 }
10923 }
10924 }
10925 }
10926 goto associate;
10927
10928 case BIT_IOR_EXPR:
10929 bit_ior:
10930 /* ~X | X is -1. */
10931 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10932 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10933 {
10934 t1 = build_zero_cst (type);
10935 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10936 return omit_one_operand_loc (loc, type, t1, arg1);
10937 }
10938
10939 /* X | ~X is -1. */
10940 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10941 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10942 {
10943 t1 = build_zero_cst (type);
10944 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10945 return omit_one_operand_loc (loc, type, t1, arg0);
10946 }
10947
10948 /* Canonicalize (X & C1) | C2. */
10949 if (TREE_CODE (arg0) == BIT_AND_EXPR
10950 && TREE_CODE (arg1) == INTEGER_CST
10951 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10952 {
10953 int width = TYPE_PRECISION (type), w;
10954 wide_int c1 = TREE_OPERAND (arg0, 1);
10955 wide_int c2 = arg1;
10956
10957 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10958 if ((c1 & c2) == c1)
10959 return omit_one_operand_loc (loc, type, arg1,
10960 TREE_OPERAND (arg0, 0));
10961
10962 wide_int msk = wi::mask (width, false,
10963 TYPE_PRECISION (TREE_TYPE (arg1)));
10964
10965 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10966 if (msk.and_not (c1 | c2) == 0)
10967 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10968 TREE_OPERAND (arg0, 0), arg1);
10969
10970 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10971 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10972 mode which allows further optimizations. */
10973 c1 &= msk;
10974 c2 &= msk;
10975 wide_int c3 = c1.and_not (c2);
10976 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10977 {
10978 wide_int mask = wi::mask (w, false,
10979 TYPE_PRECISION (type));
10980 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10981 {
10982 c3 = mask;
10983 break;
10984 }
10985 }
10986
10987 if (c3 != c1)
10988 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10989 fold_build2_loc (loc, BIT_AND_EXPR, type,
10990 TREE_OPERAND (arg0, 0),
10991 wide_int_to_tree (type,
10992 c3)),
10993 arg1);
10994 }
10995
10996 /* (X & ~Y) | (~X & Y) is X ^ Y */
10997 if (TREE_CODE (arg0) == BIT_AND_EXPR
10998 && TREE_CODE (arg1) == BIT_AND_EXPR)
10999 {
11000 tree a0, a1, l0, l1, n0, n1;
11001
11002 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11003 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11004
11005 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11006 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11007
11008 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11009 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11010
11011 if ((operand_equal_p (n0, a0, 0)
11012 && operand_equal_p (n1, a1, 0))
11013 || (operand_equal_p (n0, a1, 0)
11014 && operand_equal_p (n1, a0, 0)))
11015 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11016 }
11017
11018 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11019 if (t1 != NULL_TREE)
11020 return t1;
11021
11022 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11023
11024 This results in more efficient code for machines without a NAND
11025 instruction. Combine will canonicalize to the first form
11026 which will allow use of NAND instructions provided by the
11027 backend if they exist. */
11028 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11029 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11030 {
11031 return
11032 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11033 build2 (BIT_AND_EXPR, type,
11034 fold_convert_loc (loc, type,
11035 TREE_OPERAND (arg0, 0)),
11036 fold_convert_loc (loc, type,
11037 TREE_OPERAND (arg1, 0))));
11038 }
11039
11040 /* See if this can be simplified into a rotate first. If that
11041 is unsuccessful continue in the association code. */
11042 goto bit_rotate;
11043
11044 case BIT_XOR_EXPR:
11045 /* ~X ^ X is -1. */
11046 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11047 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11048 {
11049 t1 = build_zero_cst (type);
11050 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11051 return omit_one_operand_loc (loc, type, t1, arg1);
11052 }
11053
11054 /* X ^ ~X is -1. */
11055 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11056 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11057 {
11058 t1 = build_zero_cst (type);
11059 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11060 return omit_one_operand_loc (loc, type, t1, arg0);
11061 }
11062
11063 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11064 with a constant, and the two constants have no bits in common,
11065 we should treat this as a BIT_IOR_EXPR since this may produce more
11066 simplifications. */
11067 if (TREE_CODE (arg0) == BIT_AND_EXPR
11068 && TREE_CODE (arg1) == BIT_AND_EXPR
11069 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11070 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11071 && wi::bit_and (TREE_OPERAND (arg0, 1),
11072 TREE_OPERAND (arg1, 1)) == 0)
11073 {
11074 code = BIT_IOR_EXPR;
11075 goto bit_ior;
11076 }
11077
11078 /* (X | Y) ^ X -> Y & ~ X*/
11079 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11081 {
11082 tree t2 = TREE_OPERAND (arg0, 1);
11083 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11084 arg1);
11085 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11086 fold_convert_loc (loc, type, t2),
11087 fold_convert_loc (loc, type, t1));
11088 return t1;
11089 }
11090
11091 /* (Y | X) ^ X -> Y & ~ X*/
11092 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11093 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11094 {
11095 tree t2 = TREE_OPERAND (arg0, 0);
11096 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11097 arg1);
11098 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11099 fold_convert_loc (loc, type, t2),
11100 fold_convert_loc (loc, type, t1));
11101 return t1;
11102 }
11103
11104 /* X ^ (X | Y) -> Y & ~ X*/
11105 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11106 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11107 {
11108 tree t2 = TREE_OPERAND (arg1, 1);
11109 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11110 arg0);
11111 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11112 fold_convert_loc (loc, type, t2),
11113 fold_convert_loc (loc, type, t1));
11114 return t1;
11115 }
11116
11117 /* X ^ (Y | X) -> Y & ~ X*/
11118 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11119 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11120 {
11121 tree t2 = TREE_OPERAND (arg1, 0);
11122 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11123 arg0);
11124 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11125 fold_convert_loc (loc, type, t2),
11126 fold_convert_loc (loc, type, t1));
11127 return t1;
11128 }
11129
11130 /* Convert ~X ^ ~Y to X ^ Y. */
11131 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11132 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11133 return fold_build2_loc (loc, code, type,
11134 fold_convert_loc (loc, type,
11135 TREE_OPERAND (arg0, 0)),
11136 fold_convert_loc (loc, type,
11137 TREE_OPERAND (arg1, 0)));
11138
11139 /* Convert ~X ^ C to X ^ ~C. */
11140 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11141 && TREE_CODE (arg1) == INTEGER_CST)
11142 return fold_build2_loc (loc, code, type,
11143 fold_convert_loc (loc, type,
11144 TREE_OPERAND (arg0, 0)),
11145 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11146
11147 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11148 if (TREE_CODE (arg0) == BIT_AND_EXPR
11149 && INTEGRAL_TYPE_P (type)
11150 && integer_onep (TREE_OPERAND (arg0, 1))
11151 && integer_onep (arg1))
11152 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11153 build_zero_cst (TREE_TYPE (arg0)));
11154
11155 /* Fold (X & Y) ^ Y as ~X & Y. */
11156 if (TREE_CODE (arg0) == BIT_AND_EXPR
11157 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11158 {
11159 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11160 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11161 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11162 fold_convert_loc (loc, type, arg1));
11163 }
11164 /* Fold (X & Y) ^ X as ~Y & X. */
11165 if (TREE_CODE (arg0) == BIT_AND_EXPR
11166 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11167 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11168 {
11169 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11170 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11171 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11172 fold_convert_loc (loc, type, arg1));
11173 }
11174 /* Fold X ^ (X & Y) as X & ~Y. */
11175 if (TREE_CODE (arg1) == BIT_AND_EXPR
11176 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11177 {
11178 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11179 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11180 fold_convert_loc (loc, type, arg0),
11181 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11182 }
11183 /* Fold X ^ (Y & X) as ~Y & X. */
11184 if (TREE_CODE (arg1) == BIT_AND_EXPR
11185 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11186 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11187 {
11188 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11189 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11190 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11191 fold_convert_loc (loc, type, arg0));
11192 }
11193
11194 /* See if this can be simplified into a rotate first. If that
11195 is unsuccessful continue in the association code. */
11196 goto bit_rotate;
11197
11198 case BIT_AND_EXPR:
11199 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11200 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11201 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11202 || (TREE_CODE (arg0) == EQ_EXPR
11203 && integer_zerop (TREE_OPERAND (arg0, 1))))
11204 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11205 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11206
11207 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11208 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11209 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11210 || (TREE_CODE (arg1) == EQ_EXPR
11211 && integer_zerop (TREE_OPERAND (arg1, 1))))
11212 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11213 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11214
11215 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11216 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11217 && INTEGRAL_TYPE_P (type)
11218 && integer_onep (TREE_OPERAND (arg0, 1))
11219 && integer_onep (arg1))
11220 {
11221 tree tem2;
11222 tem = TREE_OPERAND (arg0, 0);
11223 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11224 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11225 tem, tem2);
11226 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11227 build_zero_cst (TREE_TYPE (tem)));
11228 }
11229 /* Fold ~X & 1 as (X & 1) == 0. */
11230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11231 && INTEGRAL_TYPE_P (type)
11232 && integer_onep (arg1))
11233 {
11234 tree tem2;
11235 tem = TREE_OPERAND (arg0, 0);
11236 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11237 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11238 tem, tem2);
11239 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11240 build_zero_cst (TREE_TYPE (tem)));
11241 }
11242 /* Fold !X & 1 as X == 0. */
11243 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11244 && integer_onep (arg1))
11245 {
11246 tem = TREE_OPERAND (arg0, 0);
11247 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11248 build_zero_cst (TREE_TYPE (tem)));
11249 }
11250
11251 /* Fold (X ^ Y) & Y as ~X & Y. */
11252 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11253 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11254 {
11255 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11256 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11257 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11258 fold_convert_loc (loc, type, arg1));
11259 }
11260 /* Fold (X ^ Y) & X as ~Y & X. */
11261 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11262 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11263 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11264 {
11265 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11266 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11267 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11268 fold_convert_loc (loc, type, arg1));
11269 }
11270 /* Fold X & (X ^ Y) as X & ~Y. */
11271 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11272 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11273 {
11274 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11275 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11276 fold_convert_loc (loc, type, arg0),
11277 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11278 }
11279 /* Fold X & (Y ^ X) as ~Y & X. */
11280 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11281 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11282 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11283 {
11284 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11285 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11286 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11287 fold_convert_loc (loc, type, arg0));
11288 }
11289
11290 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11291 multiple of 1 << CST. */
11292 if (TREE_CODE (arg1) == INTEGER_CST)
11293 {
11294 wide_int cst1 = arg1;
11295 wide_int ncst1 = -cst1;
11296 if ((cst1 & ncst1) == ncst1
11297 && multiple_of_p (type, arg0,
11298 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11299 return fold_convert_loc (loc, type, arg0);
11300 }
11301
11302 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11303 bits from CST2. */
11304 if (TREE_CODE (arg1) == INTEGER_CST
11305 && TREE_CODE (arg0) == MULT_EXPR
11306 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11307 {
11308 wide_int warg1 = arg1;
11309 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11310
11311 if (masked == 0)
11312 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11313 arg0, arg1);
11314 else if (masked != warg1)
11315 {
11316 /* Avoid the transform if arg1 is a mask of some
11317 mode which allows further optimizations. */
11318 int pop = wi::popcount (warg1);
11319 if (!(pop >= BITS_PER_UNIT
11320 && exact_log2 (pop) != -1
11321 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11322 return fold_build2_loc (loc, code, type, op0,
11323 wide_int_to_tree (type, masked));
11324 }
11325 }
11326
11327 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11328 ((A & N) + B) & M -> (A + B) & M
11329 Similarly if (N & M) == 0,
11330 ((A | N) + B) & M -> (A + B) & M
11331 and for - instead of + (or unary - instead of +)
11332 and/or ^ instead of |.
11333 If B is constant and (B & M) == 0, fold into A & M. */
11334 if (TREE_CODE (arg1) == INTEGER_CST)
11335 {
11336 wide_int cst1 = arg1;
11337 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11338 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11339 && (TREE_CODE (arg0) == PLUS_EXPR
11340 || TREE_CODE (arg0) == MINUS_EXPR
11341 || TREE_CODE (arg0) == NEGATE_EXPR)
11342 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11343 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11344 {
11345 tree pmop[2];
11346 int which = 0;
11347 wide_int cst0;
11348
11349 /* Now we know that arg0 is (C + D) or (C - D) or
11350 -C and arg1 (M) is == (1LL << cst) - 1.
11351 Store C into PMOP[0] and D into PMOP[1]. */
11352 pmop[0] = TREE_OPERAND (arg0, 0);
11353 pmop[1] = NULL;
11354 if (TREE_CODE (arg0) != NEGATE_EXPR)
11355 {
11356 pmop[1] = TREE_OPERAND (arg0, 1);
11357 which = 1;
11358 }
11359
11360 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11361 which = -1;
11362
11363 for (; which >= 0; which--)
11364 switch (TREE_CODE (pmop[which]))
11365 {
11366 case BIT_AND_EXPR:
11367 case BIT_IOR_EXPR:
11368 case BIT_XOR_EXPR:
11369 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11370 != INTEGER_CST)
11371 break;
11372 cst0 = TREE_OPERAND (pmop[which], 1);
11373 cst0 &= cst1;
11374 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11375 {
11376 if (cst0 != cst1)
11377 break;
11378 }
11379 else if (cst0 != 0)
11380 break;
11381 /* If C or D is of the form (A & N) where
11382 (N & M) == M, or of the form (A | N) or
11383 (A ^ N) where (N & M) == 0, replace it with A. */
11384 pmop[which] = TREE_OPERAND (pmop[which], 0);
11385 break;
11386 case INTEGER_CST:
11387 /* If C or D is a N where (N & M) == 0, it can be
11388 omitted (assumed 0). */
11389 if ((TREE_CODE (arg0) == PLUS_EXPR
11390 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11391 && (cst1 & pmop[which]) == 0)
11392 pmop[which] = NULL;
11393 break;
11394 default:
11395 break;
11396 }
11397
11398 /* Only build anything new if we optimized one or both arguments
11399 above. */
11400 if (pmop[0] != TREE_OPERAND (arg0, 0)
11401 || (TREE_CODE (arg0) != NEGATE_EXPR
11402 && pmop[1] != TREE_OPERAND (arg0, 1)))
11403 {
11404 tree utype = TREE_TYPE (arg0);
11405 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11406 {
11407 /* Perform the operations in a type that has defined
11408 overflow behavior. */
11409 utype = unsigned_type_for (TREE_TYPE (arg0));
11410 if (pmop[0] != NULL)
11411 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11412 if (pmop[1] != NULL)
11413 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11414 }
11415
11416 if (TREE_CODE (arg0) == NEGATE_EXPR)
11417 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11418 else if (TREE_CODE (arg0) == PLUS_EXPR)
11419 {
11420 if (pmop[0] != NULL && pmop[1] != NULL)
11421 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11422 pmop[0], pmop[1]);
11423 else if (pmop[0] != NULL)
11424 tem = pmop[0];
11425 else if (pmop[1] != NULL)
11426 tem = pmop[1];
11427 else
11428 return build_int_cst (type, 0);
11429 }
11430 else if (pmop[0] == NULL)
11431 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11432 else
11433 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11434 pmop[0], pmop[1]);
11435 /* TEM is now the new binary +, - or unary - replacement. */
11436 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11437 fold_convert_loc (loc, utype, arg1));
11438 return fold_convert_loc (loc, type, tem);
11439 }
11440 }
11441 }
11442
11443 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11444 if (t1 != NULL_TREE)
11445 return t1;
11446 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11447 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11448 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11449 {
11450 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11451
11452 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11453 if (mask == -1)
11454 return
11455 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11456 }
11457
11458 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11459
11460 This results in more efficient code for machines without a NOR
11461 instruction. Combine will canonicalize to the first form
11462 which will allow use of NOR instructions provided by the
11463 backend if they exist. */
11464 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11465 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11466 {
11467 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11468 build2 (BIT_IOR_EXPR, type,
11469 fold_convert_loc (loc, type,
11470 TREE_OPERAND (arg0, 0)),
11471 fold_convert_loc (loc, type,
11472 TREE_OPERAND (arg1, 0))));
11473 }
11474
11475 /* If arg0 is derived from the address of an object or function, we may
11476 be able to fold this expression using the object or function's
11477 alignment. */
11478 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11479 {
11480 unsigned HOST_WIDE_INT modulus, residue;
11481 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11482
11483 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11484 integer_onep (arg1));
11485
11486 /* This works because modulus is a power of 2. If this weren't the
11487 case, we'd have to replace it by its greatest power-of-2
11488 divisor: modulus & -modulus. */
11489 if (low < modulus)
11490 return build_int_cst (type, residue & low);
11491 }
11492
11493 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11494 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11495 if the new mask might be further optimized. */
11496 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11497 || TREE_CODE (arg0) == RSHIFT_EXPR)
11498 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11499 && TREE_CODE (arg1) == INTEGER_CST
11500 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11501 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11502 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11503 < TYPE_PRECISION (TREE_TYPE (arg0))))
11504 {
11505 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11506 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11507 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11508 tree shift_type = TREE_TYPE (arg0);
11509
11510 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11511 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11512 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11513 && TYPE_PRECISION (TREE_TYPE (arg0))
11514 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11515 {
11516 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11517 tree arg00 = TREE_OPERAND (arg0, 0);
11518 /* See if more bits can be proven as zero because of
11519 zero extension. */
11520 if (TREE_CODE (arg00) == NOP_EXPR
11521 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11522 {
11523 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11524 if (TYPE_PRECISION (inner_type)
11525 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11526 && TYPE_PRECISION (inner_type) < prec)
11527 {
11528 prec = TYPE_PRECISION (inner_type);
11529 /* See if we can shorten the right shift. */
11530 if (shiftc < prec)
11531 shift_type = inner_type;
11532 /* Otherwise X >> C1 is all zeros, so we'll optimize
11533 it into (X, 0) later on by making sure zerobits
11534 is all ones. */
11535 }
11536 }
11537 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11538 if (shiftc < prec)
11539 {
11540 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11541 zerobits <<= prec - shiftc;
11542 }
11543 /* For arithmetic shift if sign bit could be set, zerobits
11544 can contain actually sign bits, so no transformation is
11545 possible, unless MASK masks them all away. In that
11546 case the shift needs to be converted into logical shift. */
11547 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11548 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11549 {
11550 if ((mask & zerobits) == 0)
11551 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11552 else
11553 zerobits = 0;
11554 }
11555 }
11556
11557 /* ((X << 16) & 0xff00) is (X, 0). */
11558 if ((mask & zerobits) == mask)
11559 return omit_one_operand_loc (loc, type,
11560 build_int_cst (type, 0), arg0);
11561
11562 newmask = mask | zerobits;
11563 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11564 {
11565 /* Only do the transformation if NEWMASK is some integer
11566 mode's mask. */
11567 for (prec = BITS_PER_UNIT;
11568 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11569 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11570 break;
11571 if (prec < HOST_BITS_PER_WIDE_INT
11572 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11573 {
11574 tree newmaskt;
11575
11576 if (shift_type != TREE_TYPE (arg0))
11577 {
11578 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11579 fold_convert_loc (loc, shift_type,
11580 TREE_OPERAND (arg0, 0)),
11581 TREE_OPERAND (arg0, 1));
11582 tem = fold_convert_loc (loc, type, tem);
11583 }
11584 else
11585 tem = op0;
11586 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11587 if (!tree_int_cst_equal (newmaskt, arg1))
11588 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11589 }
11590 }
11591 }
11592
11593 goto associate;
11594
11595 case RDIV_EXPR:
11596 /* Don't touch a floating-point divide by zero unless the mode
11597 of the constant can represent infinity. */
11598 if (TREE_CODE (arg1) == REAL_CST
11599 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11600 && real_zerop (arg1))
11601 return NULL_TREE;
11602
11603 /* (-A) / (-B) -> A / B */
11604 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11605 return fold_build2_loc (loc, RDIV_EXPR, type,
11606 TREE_OPERAND (arg0, 0),
11607 negate_expr (arg1));
11608 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11609 return fold_build2_loc (loc, RDIV_EXPR, type,
11610 negate_expr (arg0),
11611 TREE_OPERAND (arg1, 0));
11612
11613 /* Convert A/B/C to A/(B*C). */
11614 if (flag_reciprocal_math
11615 && TREE_CODE (arg0) == RDIV_EXPR)
11616 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11617 fold_build2_loc (loc, MULT_EXPR, type,
11618 TREE_OPERAND (arg0, 1), arg1));
11619
11620 /* Convert A/(B/C) to (A/B)*C. */
11621 if (flag_reciprocal_math
11622 && TREE_CODE (arg1) == RDIV_EXPR)
11623 return fold_build2_loc (loc, MULT_EXPR, type,
11624 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11625 TREE_OPERAND (arg1, 0)),
11626 TREE_OPERAND (arg1, 1));
11627
11628 /* Convert C1/(X*C2) into (C1/C2)/X. */
11629 if (flag_reciprocal_math
11630 && TREE_CODE (arg1) == MULT_EXPR
11631 && TREE_CODE (arg0) == REAL_CST
11632 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11633 {
11634 tree tem = const_binop (RDIV_EXPR, arg0,
11635 TREE_OPERAND (arg1, 1));
11636 if (tem)
11637 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11638 TREE_OPERAND (arg1, 0));
11639 }
11640
11641 if (flag_unsafe_math_optimizations)
11642 {
11643 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11644 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11645
11646 /* Optimize sin(x)/cos(x) as tan(x). */
11647 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11648 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11649 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11650 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11651 CALL_EXPR_ARG (arg1, 0), 0))
11652 {
11653 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11654
11655 if (tanfn != NULL_TREE)
11656 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11657 }
11658
11659 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11660 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11661 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11662 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11663 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11664 CALL_EXPR_ARG (arg1, 0), 0))
11665 {
11666 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11667
11668 if (tanfn != NULL_TREE)
11669 {
11670 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11671 CALL_EXPR_ARG (arg0, 0));
11672 return fold_build2_loc (loc, RDIV_EXPR, type,
11673 build_real (type, dconst1), tmp);
11674 }
11675 }
11676
11677 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11678 NaNs or Infinities. */
11679 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11680 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11681 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11682 {
11683 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11684 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11685
11686 if (! HONOR_NANS (element_mode (arg00))
11687 && ! HONOR_INFINITIES (element_mode (arg00))
11688 && operand_equal_p (arg00, arg01, 0))
11689 {
11690 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11691
11692 if (cosfn != NULL_TREE)
11693 return build_call_expr_loc (loc, cosfn, 1, arg00);
11694 }
11695 }
11696
11697 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11698 NaNs or Infinities. */
11699 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11700 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11701 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11702 {
11703 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11704 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11705
11706 if (! HONOR_NANS (element_mode (arg00))
11707 && ! HONOR_INFINITIES (element_mode (arg00))
11708 && operand_equal_p (arg00, arg01, 0))
11709 {
11710 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11711
11712 if (cosfn != NULL_TREE)
11713 {
11714 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11715 return fold_build2_loc (loc, RDIV_EXPR, type,
11716 build_real (type, dconst1),
11717 tmp);
11718 }
11719 }
11720 }
11721
11722 /* Optimize pow(x,c)/x as pow(x,c-1). */
11723 if (fcode0 == BUILT_IN_POW
11724 || fcode0 == BUILT_IN_POWF
11725 || fcode0 == BUILT_IN_POWL)
11726 {
11727 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11728 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11729 if (TREE_CODE (arg01) == REAL_CST
11730 && !TREE_OVERFLOW (arg01)
11731 && operand_equal_p (arg1, arg00, 0))
11732 {
11733 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11734 REAL_VALUE_TYPE c;
11735 tree arg;
11736
11737 c = TREE_REAL_CST (arg01);
11738 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11739 arg = build_real (type, c);
11740 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11741 }
11742 }
11743
11744 /* Optimize a/root(b/c) into a*root(c/b). */
11745 if (BUILTIN_ROOT_P (fcode1))
11746 {
11747 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11748
11749 if (TREE_CODE (rootarg) == RDIV_EXPR)
11750 {
11751 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11752 tree b = TREE_OPERAND (rootarg, 0);
11753 tree c = TREE_OPERAND (rootarg, 1);
11754
11755 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11756
11757 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11758 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11759 }
11760 }
11761
11762 /* Optimize x/expN(y) into x*expN(-y). */
11763 if (BUILTIN_EXPONENT_P (fcode1))
11764 {
11765 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11766 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11767 arg1 = build_call_expr_loc (loc,
11768 expfn, 1,
11769 fold_convert_loc (loc, type, arg));
11770 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11771 }
11772
11773 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11774 if (fcode1 == BUILT_IN_POW
11775 || fcode1 == BUILT_IN_POWF
11776 || fcode1 == BUILT_IN_POWL)
11777 {
11778 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11779 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11780 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11781 tree neg11 = fold_convert_loc (loc, type,
11782 negate_expr (arg11));
11783 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11784 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11785 }
11786 }
11787 return NULL_TREE;
11788
11789 case TRUNC_DIV_EXPR:
11790 /* Optimize (X & (-A)) / A where A is a power of 2,
11791 to X >> log2(A) */
11792 if (TREE_CODE (arg0) == BIT_AND_EXPR
11793 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11794 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11795 {
11796 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11797 arg1, TREE_OPERAND (arg0, 1));
11798 if (sum && integer_zerop (sum)) {
11799 tree pow2 = build_int_cst (integer_type_node,
11800 wi::exact_log2 (arg1));
11801 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11802 TREE_OPERAND (arg0, 0), pow2);
11803 }
11804 }
11805
11806 /* Fall through */
11807
11808 case FLOOR_DIV_EXPR:
11809 /* Simplify A / (B << N) where A and B are positive and B is
11810 a power of 2, to A >> (N + log2(B)). */
11811 strict_overflow_p = false;
11812 if (TREE_CODE (arg1) == LSHIFT_EXPR
11813 && (TYPE_UNSIGNED (type)
11814 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11815 {
11816 tree sval = TREE_OPERAND (arg1, 0);
11817 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11818 {
11819 tree sh_cnt = TREE_OPERAND (arg1, 1);
11820 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11821 wi::exact_log2 (sval));
11822
11823 if (strict_overflow_p)
11824 fold_overflow_warning (("assuming signed overflow does not "
11825 "occur when simplifying A / (B << N)"),
11826 WARN_STRICT_OVERFLOW_MISC);
11827
11828 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11829 sh_cnt, pow2);
11830 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11831 fold_convert_loc (loc, type, arg0), sh_cnt);
11832 }
11833 }
11834
11835 /* Fall through */
11836
11837 case ROUND_DIV_EXPR:
11838 case CEIL_DIV_EXPR:
11839 case EXACT_DIV_EXPR:
11840 if (integer_zerop (arg1))
11841 return NULL_TREE;
11842
11843 /* Convert -A / -B to A / B when the type is signed and overflow is
11844 undefined. */
11845 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11846 && TREE_CODE (arg0) == NEGATE_EXPR
11847 && negate_expr_p (arg1))
11848 {
11849 if (INTEGRAL_TYPE_P (type))
11850 fold_overflow_warning (("assuming signed overflow does not occur "
11851 "when distributing negation across "
11852 "division"),
11853 WARN_STRICT_OVERFLOW_MISC);
11854 return fold_build2_loc (loc, code, type,
11855 fold_convert_loc (loc, type,
11856 TREE_OPERAND (arg0, 0)),
11857 fold_convert_loc (loc, type,
11858 negate_expr (arg1)));
11859 }
11860 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11861 && TREE_CODE (arg1) == NEGATE_EXPR
11862 && negate_expr_p (arg0))
11863 {
11864 if (INTEGRAL_TYPE_P (type))
11865 fold_overflow_warning (("assuming signed overflow does not occur "
11866 "when distributing negation across "
11867 "division"),
11868 WARN_STRICT_OVERFLOW_MISC);
11869 return fold_build2_loc (loc, code, type,
11870 fold_convert_loc (loc, type,
11871 negate_expr (arg0)),
11872 fold_convert_loc (loc, type,
11873 TREE_OPERAND (arg1, 0)));
11874 }
11875
11876 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11877 operation, EXACT_DIV_EXPR.
11878
11879 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11880 At one time others generated faster code, it's not clear if they do
11881 after the last round to changes to the DIV code in expmed.c. */
11882 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11883 && multiple_of_p (type, arg0, arg1))
11884 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11885
11886 strict_overflow_p = false;
11887 if (TREE_CODE (arg1) == INTEGER_CST
11888 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11889 &strict_overflow_p)))
11890 {
11891 if (strict_overflow_p)
11892 fold_overflow_warning (("assuming signed overflow does not occur "
11893 "when simplifying division"),
11894 WARN_STRICT_OVERFLOW_MISC);
11895 return fold_convert_loc (loc, type, tem);
11896 }
11897
11898 return NULL_TREE;
11899
11900 case CEIL_MOD_EXPR:
11901 case FLOOR_MOD_EXPR:
11902 case ROUND_MOD_EXPR:
11903 case TRUNC_MOD_EXPR:
11904 /* X % -Y is the same as X % Y. */
11905 if (code == TRUNC_MOD_EXPR
11906 && !TYPE_UNSIGNED (type)
11907 && TREE_CODE (arg1) == NEGATE_EXPR
11908 && !TYPE_OVERFLOW_TRAPS (type))
11909 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11910 fold_convert_loc (loc, type,
11911 TREE_OPERAND (arg1, 0)));
11912
11913 strict_overflow_p = false;
11914 if (TREE_CODE (arg1) == INTEGER_CST
11915 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11916 &strict_overflow_p)))
11917 {
11918 if (strict_overflow_p)
11919 fold_overflow_warning (("assuming signed overflow does not occur "
11920 "when simplifying modulus"),
11921 WARN_STRICT_OVERFLOW_MISC);
11922 return fold_convert_loc (loc, type, tem);
11923 }
11924
11925 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11926 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11927 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11928 && (TYPE_UNSIGNED (type)
11929 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11930 {
11931 tree c = arg1;
11932 /* Also optimize A % (C << N) where C is a power of 2,
11933 to A & ((C << N) - 1). */
11934 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11935 c = TREE_OPERAND (arg1, 0);
11936
11937 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11938 {
11939 tree mask
11940 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11941 build_int_cst (TREE_TYPE (arg1), 1));
11942 if (strict_overflow_p)
11943 fold_overflow_warning (("assuming signed overflow does not "
11944 "occur when simplifying "
11945 "X % (power of two)"),
11946 WARN_STRICT_OVERFLOW_MISC);
11947 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11948 fold_convert_loc (loc, type, arg0),
11949 fold_convert_loc (loc, type, mask));
11950 }
11951 }
11952
11953 return NULL_TREE;
11954
11955 case LROTATE_EXPR:
11956 case RROTATE_EXPR:
11957 case RSHIFT_EXPR:
11958 case LSHIFT_EXPR:
11959 /* Since negative shift count is not well-defined,
11960 don't try to compute it in the compiler. */
11961 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11962 return NULL_TREE;
11963
11964 prec = element_precision (type);
11965
11966 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11967 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11968 && tree_to_uhwi (arg1) < prec
11969 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11970 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11971 {
11972 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11973 + tree_to_uhwi (arg1));
11974
11975 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11976 being well defined. */
11977 if (low >= prec)
11978 {
11979 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11980 low = low % prec;
11981 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11982 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11983 TREE_OPERAND (arg0, 0));
11984 else
11985 low = prec - 1;
11986 }
11987
11988 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11989 build_int_cst (TREE_TYPE (arg1), low));
11990 }
11991
11992 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11993 into x & ((unsigned)-1 >> c) for unsigned types. */
11994 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11995 || (TYPE_UNSIGNED (type)
11996 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11997 && tree_fits_uhwi_p (arg1)
11998 && tree_to_uhwi (arg1) < prec
11999 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12000 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12001 {
12002 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12003 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12004 tree lshift;
12005 tree arg00;
12006
12007 if (low0 == low1)
12008 {
12009 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12010
12011 lshift = build_minus_one_cst (type);
12012 lshift = const_binop (code, lshift, arg1);
12013
12014 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12015 }
12016 }
12017
12018 /* If we have a rotate of a bit operation with the rotate count and
12019 the second operand of the bit operation both constant,
12020 permute the two operations. */
12021 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12022 && (TREE_CODE (arg0) == BIT_AND_EXPR
12023 || TREE_CODE (arg0) == BIT_IOR_EXPR
12024 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12025 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12026 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12027 fold_build2_loc (loc, code, type,
12028 TREE_OPERAND (arg0, 0), arg1),
12029 fold_build2_loc (loc, code, type,
12030 TREE_OPERAND (arg0, 1), arg1));
12031
12032 /* Two consecutive rotates adding up to the some integer
12033 multiple of the precision of the type can be ignored. */
12034 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12035 && TREE_CODE (arg0) == RROTATE_EXPR
12036 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12037 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12038 prec) == 0)
12039 return TREE_OPERAND (arg0, 0);
12040
12041 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12042 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12043 if the latter can be further optimized. */
12044 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12045 && TREE_CODE (arg0) == BIT_AND_EXPR
12046 && TREE_CODE (arg1) == INTEGER_CST
12047 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12048 {
12049 tree mask = fold_build2_loc (loc, code, type,
12050 fold_convert_loc (loc, type,
12051 TREE_OPERAND (arg0, 1)),
12052 arg1);
12053 tree shift = fold_build2_loc (loc, code, type,
12054 fold_convert_loc (loc, type,
12055 TREE_OPERAND (arg0, 0)),
12056 arg1);
12057 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12058 if (tem)
12059 return tem;
12060 }
12061
12062 return NULL_TREE;
12063
12064 case MIN_EXPR:
12065 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12066 if (tem)
12067 return tem;
12068 goto associate;
12069
12070 case MAX_EXPR:
12071 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12072 if (tem)
12073 return tem;
12074 goto associate;
12075
12076 case TRUTH_ANDIF_EXPR:
12077 /* Note that the operands of this must be ints
12078 and their values must be 0 or 1.
12079 ("true" is a fixed value perhaps depending on the language.) */
12080 /* If first arg is constant zero, return it. */
12081 if (integer_zerop (arg0))
12082 return fold_convert_loc (loc, type, arg0);
12083 case TRUTH_AND_EXPR:
12084 /* If either arg is constant true, drop it. */
12085 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12087 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12088 /* Preserve sequence points. */
12089 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12090 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12091 /* If second arg is constant zero, result is zero, but first arg
12092 must be evaluated. */
12093 if (integer_zerop (arg1))
12094 return omit_one_operand_loc (loc, type, arg1, arg0);
12095 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12096 case will be handled here. */
12097 if (integer_zerop (arg0))
12098 return omit_one_operand_loc (loc, type, arg0, arg1);
12099
12100 /* !X && X is always false. */
12101 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12102 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12103 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12104 /* X && !X is always false. */
12105 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12106 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12107 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12108
12109 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12110 means A >= Y && A != MAX, but in this case we know that
12111 A < X <= MAX. */
12112
12113 if (!TREE_SIDE_EFFECTS (arg0)
12114 && !TREE_SIDE_EFFECTS (arg1))
12115 {
12116 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12117 if (tem && !operand_equal_p (tem, arg0, 0))
12118 return fold_build2_loc (loc, code, type, tem, arg1);
12119
12120 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12121 if (tem && !operand_equal_p (tem, arg1, 0))
12122 return fold_build2_loc (loc, code, type, arg0, tem);
12123 }
12124
12125 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12126 != NULL_TREE)
12127 return tem;
12128
12129 return NULL_TREE;
12130
12131 case TRUTH_ORIF_EXPR:
12132 /* Note that the operands of this must be ints
12133 and their values must be 0 or true.
12134 ("true" is a fixed value perhaps depending on the language.) */
12135 /* If first arg is constant true, return it. */
12136 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12137 return fold_convert_loc (loc, type, arg0);
12138 case TRUTH_OR_EXPR:
12139 /* If either arg is constant zero, drop it. */
12140 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12141 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12142 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12143 /* Preserve sequence points. */
12144 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12146 /* If second arg is constant true, result is true, but we must
12147 evaluate first arg. */
12148 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12149 return omit_one_operand_loc (loc, type, arg1, arg0);
12150 /* Likewise for first arg, but note this only occurs here for
12151 TRUTH_OR_EXPR. */
12152 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12153 return omit_one_operand_loc (loc, type, arg0, arg1);
12154
12155 /* !X || X is always true. */
12156 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12157 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12158 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12159 /* X || !X is always true. */
12160 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12161 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12162 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12163
12164 /* (X && !Y) || (!X && Y) is X ^ Y */
12165 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12166 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12167 {
12168 tree a0, a1, l0, l1, n0, n1;
12169
12170 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12171 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12172
12173 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12174 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12175
12176 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12177 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12178
12179 if ((operand_equal_p (n0, a0, 0)
12180 && operand_equal_p (n1, a1, 0))
12181 || (operand_equal_p (n0, a1, 0)
12182 && operand_equal_p (n1, a0, 0)))
12183 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12184 }
12185
12186 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12187 != NULL_TREE)
12188 return tem;
12189
12190 return NULL_TREE;
12191
12192 case TRUTH_XOR_EXPR:
12193 /* If the second arg is constant zero, drop it. */
12194 if (integer_zerop (arg1))
12195 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12196 /* If the second arg is constant true, this is a logical inversion. */
12197 if (integer_onep (arg1))
12198 {
12199 tem = invert_truthvalue_loc (loc, arg0);
12200 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12201 }
12202 /* Identical arguments cancel to zero. */
12203 if (operand_equal_p (arg0, arg1, 0))
12204 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12205
12206 /* !X ^ X is always true. */
12207 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12208 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12209 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12210
12211 /* X ^ !X is always true. */
12212 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12213 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12214 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12215
12216 return NULL_TREE;
12217
12218 case EQ_EXPR:
12219 case NE_EXPR:
12220 STRIP_NOPS (arg0);
12221 STRIP_NOPS (arg1);
12222
12223 tem = fold_comparison (loc, code, type, op0, op1);
12224 if (tem != NULL_TREE)
12225 return tem;
12226
12227 /* bool_var != 0 becomes bool_var. */
12228 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12229 && code == NE_EXPR)
12230 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12231
12232 /* bool_var == 1 becomes bool_var. */
12233 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12234 && code == EQ_EXPR)
12235 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12236
12237 /* bool_var != 1 becomes !bool_var. */
12238 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12239 && code == NE_EXPR)
12240 return fold_convert_loc (loc, type,
12241 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12242 TREE_TYPE (arg0), arg0));
12243
12244 /* bool_var == 0 becomes !bool_var. */
12245 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12246 && code == EQ_EXPR)
12247 return fold_convert_loc (loc, type,
12248 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12249 TREE_TYPE (arg0), arg0));
12250
12251 /* !exp != 0 becomes !exp */
12252 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12253 && code == NE_EXPR)
12254 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12255
12256 /* If this is an equality comparison of the address of two non-weak,
12257 unaliased symbols neither of which are extern (since we do not
12258 have access to attributes for externs), then we know the result. */
12259 if (TREE_CODE (arg0) == ADDR_EXPR
12260 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12261 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12262 && ! lookup_attribute ("alias",
12263 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12264 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12265 && TREE_CODE (arg1) == ADDR_EXPR
12266 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12267 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12268 && ! lookup_attribute ("alias",
12269 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12270 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12271 {
12272 /* We know that we're looking at the address of two
12273 non-weak, unaliased, static _DECL nodes.
12274
12275 It is both wasteful and incorrect to call operand_equal_p
12276 to compare the two ADDR_EXPR nodes. It is wasteful in that
12277 all we need to do is test pointer equality for the arguments
12278 to the two ADDR_EXPR nodes. It is incorrect to use
12279 operand_equal_p as that function is NOT equivalent to a
12280 C equality test. It can in fact return false for two
12281 objects which would test as equal using the C equality
12282 operator. */
12283 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12284 return constant_boolean_node (equal
12285 ? code == EQ_EXPR : code != EQ_EXPR,
12286 type);
12287 }
12288
12289 /* Similarly for a NEGATE_EXPR. */
12290 if (TREE_CODE (arg0) == NEGATE_EXPR
12291 && TREE_CODE (arg1) == INTEGER_CST
12292 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12293 arg1)))
12294 && TREE_CODE (tem) == INTEGER_CST
12295 && !TREE_OVERFLOW (tem))
12296 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12297
12298 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12299 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12300 && TREE_CODE (arg1) == INTEGER_CST
12301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12302 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12303 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12304 fold_convert_loc (loc,
12305 TREE_TYPE (arg0),
12306 arg1),
12307 TREE_OPERAND (arg0, 1)));
12308
12309 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12310 if ((TREE_CODE (arg0) == PLUS_EXPR
12311 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12312 || TREE_CODE (arg0) == MINUS_EXPR)
12313 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12314 0)),
12315 arg1, 0)
12316 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12317 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12318 {
12319 tree val = TREE_OPERAND (arg0, 1);
12320 return omit_two_operands_loc (loc, type,
12321 fold_build2_loc (loc, code, type,
12322 val,
12323 build_int_cst (TREE_TYPE (val),
12324 0)),
12325 TREE_OPERAND (arg0, 0), arg1);
12326 }
12327
12328 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12329 if (TREE_CODE (arg0) == MINUS_EXPR
12330 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12331 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12332 1)),
12333 arg1, 0)
12334 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12335 {
12336 return omit_two_operands_loc (loc, type,
12337 code == NE_EXPR
12338 ? boolean_true_node : boolean_false_node,
12339 TREE_OPERAND (arg0, 1), arg1);
12340 }
12341
12342 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12343 if (TREE_CODE (arg0) == ABS_EXPR
12344 && (integer_zerop (arg1) || real_zerop (arg1)))
12345 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12346
12347 /* If this is an EQ or NE comparison with zero and ARG0 is
12348 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12349 two operations, but the latter can be done in one less insn
12350 on machines that have only two-operand insns or on which a
12351 constant cannot be the first operand. */
12352 if (TREE_CODE (arg0) == BIT_AND_EXPR
12353 && integer_zerop (arg1))
12354 {
12355 tree arg00 = TREE_OPERAND (arg0, 0);
12356 tree arg01 = TREE_OPERAND (arg0, 1);
12357 if (TREE_CODE (arg00) == LSHIFT_EXPR
12358 && integer_onep (TREE_OPERAND (arg00, 0)))
12359 {
12360 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12361 arg01, TREE_OPERAND (arg00, 1));
12362 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12363 build_int_cst (TREE_TYPE (arg0), 1));
12364 return fold_build2_loc (loc, code, type,
12365 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12366 arg1);
12367 }
12368 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12369 && integer_onep (TREE_OPERAND (arg01, 0)))
12370 {
12371 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12372 arg00, TREE_OPERAND (arg01, 1));
12373 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12374 build_int_cst (TREE_TYPE (arg0), 1));
12375 return fold_build2_loc (loc, code, type,
12376 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12377 arg1);
12378 }
12379 }
12380
12381 /* If this is an NE or EQ comparison of zero against the result of a
12382 signed MOD operation whose second operand is a power of 2, make
12383 the MOD operation unsigned since it is simpler and equivalent. */
12384 if (integer_zerop (arg1)
12385 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12386 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12387 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12388 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12389 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12390 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12391 {
12392 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12393 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12394 fold_convert_loc (loc, newtype,
12395 TREE_OPERAND (arg0, 0)),
12396 fold_convert_loc (loc, newtype,
12397 TREE_OPERAND (arg0, 1)));
12398
12399 return fold_build2_loc (loc, code, type, newmod,
12400 fold_convert_loc (loc, newtype, arg1));
12401 }
12402
12403 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12404 C1 is a valid shift constant, and C2 is a power of two, i.e.
12405 a single bit. */
12406 if (TREE_CODE (arg0) == BIT_AND_EXPR
12407 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12408 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12409 == INTEGER_CST
12410 && integer_pow2p (TREE_OPERAND (arg0, 1))
12411 && integer_zerop (arg1))
12412 {
12413 tree itype = TREE_TYPE (arg0);
12414 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12415 prec = TYPE_PRECISION (itype);
12416
12417 /* Check for a valid shift count. */
12418 if (wi::ltu_p (arg001, prec))
12419 {
12420 tree arg01 = TREE_OPERAND (arg0, 1);
12421 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12422 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12423 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12424 can be rewritten as (X & (C2 << C1)) != 0. */
12425 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12426 {
12427 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12428 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12429 return fold_build2_loc (loc, code, type, tem,
12430 fold_convert_loc (loc, itype, arg1));
12431 }
12432 /* Otherwise, for signed (arithmetic) shifts,
12433 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12434 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12435 else if (!TYPE_UNSIGNED (itype))
12436 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12437 arg000, build_int_cst (itype, 0));
12438 /* Otherwise, of unsigned (logical) shifts,
12439 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12440 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12441 else
12442 return omit_one_operand_loc (loc, type,
12443 code == EQ_EXPR ? integer_one_node
12444 : integer_zero_node,
12445 arg000);
12446 }
12447 }
12448
12449 /* If we have (A & C) == C where C is a power of 2, convert this into
12450 (A & C) != 0. Similarly for NE_EXPR. */
12451 if (TREE_CODE (arg0) == BIT_AND_EXPR
12452 && integer_pow2p (TREE_OPERAND (arg0, 1))
12453 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12454 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12455 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12456 integer_zero_node));
12457
12458 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12459 bit, then fold the expression into A < 0 or A >= 0. */
12460 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12461 if (tem)
12462 return tem;
12463
12464 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12465 Similarly for NE_EXPR. */
12466 if (TREE_CODE (arg0) == BIT_AND_EXPR
12467 && TREE_CODE (arg1) == INTEGER_CST
12468 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12469 {
12470 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12471 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12472 TREE_OPERAND (arg0, 1));
12473 tree dandnotc
12474 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12475 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12476 notc);
12477 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12478 if (integer_nonzerop (dandnotc))
12479 return omit_one_operand_loc (loc, type, rslt, arg0);
12480 }
12481
12482 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12483 Similarly for NE_EXPR. */
12484 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12485 && TREE_CODE (arg1) == INTEGER_CST
12486 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12487 {
12488 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12489 tree candnotd
12490 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12491 TREE_OPERAND (arg0, 1),
12492 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12493 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12494 if (integer_nonzerop (candnotd))
12495 return omit_one_operand_loc (loc, type, rslt, arg0);
12496 }
12497
12498 /* If this is a comparison of a field, we may be able to simplify it. */
12499 if ((TREE_CODE (arg0) == COMPONENT_REF
12500 || TREE_CODE (arg0) == BIT_FIELD_REF)
12501 /* Handle the constant case even without -O
12502 to make sure the warnings are given. */
12503 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12504 {
12505 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12506 if (t1)
12507 return t1;
12508 }
12509
12510 /* Optimize comparisons of strlen vs zero to a compare of the
12511 first character of the string vs zero. To wit,
12512 strlen(ptr) == 0 => *ptr == 0
12513 strlen(ptr) != 0 => *ptr != 0
12514 Other cases should reduce to one of these two (or a constant)
12515 due to the return value of strlen being unsigned. */
12516 if (TREE_CODE (arg0) == CALL_EXPR
12517 && integer_zerop (arg1))
12518 {
12519 tree fndecl = get_callee_fndecl (arg0);
12520
12521 if (fndecl
12522 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12523 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12524 && call_expr_nargs (arg0) == 1
12525 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12526 {
12527 tree iref = build_fold_indirect_ref_loc (loc,
12528 CALL_EXPR_ARG (arg0, 0));
12529 return fold_build2_loc (loc, code, type, iref,
12530 build_int_cst (TREE_TYPE (iref), 0));
12531 }
12532 }
12533
12534 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12535 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12536 if (TREE_CODE (arg0) == RSHIFT_EXPR
12537 && integer_zerop (arg1)
12538 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12539 {
12540 tree arg00 = TREE_OPERAND (arg0, 0);
12541 tree arg01 = TREE_OPERAND (arg0, 1);
12542 tree itype = TREE_TYPE (arg00);
12543 if (wi::eq_p (arg01, element_precision (itype) - 1))
12544 {
12545 if (TYPE_UNSIGNED (itype))
12546 {
12547 itype = signed_type_for (itype);
12548 arg00 = fold_convert_loc (loc, itype, arg00);
12549 }
12550 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12551 type, arg00, build_zero_cst (itype));
12552 }
12553 }
12554
12555 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12556 if (integer_zerop (arg1)
12557 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12558 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12559 TREE_OPERAND (arg0, 1));
12560
12561 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12562 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12563 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12564 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12565 build_zero_cst (TREE_TYPE (arg0)));
12566 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12567 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12568 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12569 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12570 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12571 build_zero_cst (TREE_TYPE (arg0)));
12572
12573 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12574 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12575 && TREE_CODE (arg1) == INTEGER_CST
12576 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12577 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12578 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12579 TREE_OPERAND (arg0, 1), arg1));
12580
12581 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12582 (X & C) == 0 when C is a single bit. */
12583 if (TREE_CODE (arg0) == BIT_AND_EXPR
12584 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12585 && integer_zerop (arg1)
12586 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12587 {
12588 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12589 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12590 TREE_OPERAND (arg0, 1));
12591 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12592 type, tem,
12593 fold_convert_loc (loc, TREE_TYPE (arg0),
12594 arg1));
12595 }
12596
12597 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12598 constant C is a power of two, i.e. a single bit. */
12599 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12600 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12601 && integer_zerop (arg1)
12602 && integer_pow2p (TREE_OPERAND (arg0, 1))
12603 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12604 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12605 {
12606 tree arg00 = TREE_OPERAND (arg0, 0);
12607 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12608 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12609 }
12610
12611 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12612 when is C is a power of two, i.e. a single bit. */
12613 if (TREE_CODE (arg0) == BIT_AND_EXPR
12614 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12615 && integer_zerop (arg1)
12616 && integer_pow2p (TREE_OPERAND (arg0, 1))
12617 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12618 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12619 {
12620 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12621 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12622 arg000, TREE_OPERAND (arg0, 1));
12623 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12624 tem, build_int_cst (TREE_TYPE (tem), 0));
12625 }
12626
12627 if (integer_zerop (arg1)
12628 && tree_expr_nonzero_p (arg0))
12629 {
12630 tree res = constant_boolean_node (code==NE_EXPR, type);
12631 return omit_one_operand_loc (loc, type, res, arg0);
12632 }
12633
12634 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12635 if (TREE_CODE (arg0) == NEGATE_EXPR
12636 && TREE_CODE (arg1) == NEGATE_EXPR)
12637 return fold_build2_loc (loc, code, type,
12638 TREE_OPERAND (arg0, 0),
12639 fold_convert_loc (loc, TREE_TYPE (arg0),
12640 TREE_OPERAND (arg1, 0)));
12641
12642 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12643 if (TREE_CODE (arg0) == BIT_AND_EXPR
12644 && TREE_CODE (arg1) == BIT_AND_EXPR)
12645 {
12646 tree arg00 = TREE_OPERAND (arg0, 0);
12647 tree arg01 = TREE_OPERAND (arg0, 1);
12648 tree arg10 = TREE_OPERAND (arg1, 0);
12649 tree arg11 = TREE_OPERAND (arg1, 1);
12650 tree itype = TREE_TYPE (arg0);
12651
12652 if (operand_equal_p (arg01, arg11, 0))
12653 return fold_build2_loc (loc, code, type,
12654 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12655 fold_build2_loc (loc,
12656 BIT_XOR_EXPR, itype,
12657 arg00, arg10),
12658 arg01),
12659 build_zero_cst (itype));
12660
12661 if (operand_equal_p (arg01, arg10, 0))
12662 return fold_build2_loc (loc, code, type,
12663 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12664 fold_build2_loc (loc,
12665 BIT_XOR_EXPR, itype,
12666 arg00, arg11),
12667 arg01),
12668 build_zero_cst (itype));
12669
12670 if (operand_equal_p (arg00, arg11, 0))
12671 return fold_build2_loc (loc, code, type,
12672 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12673 fold_build2_loc (loc,
12674 BIT_XOR_EXPR, itype,
12675 arg01, arg10),
12676 arg00),
12677 build_zero_cst (itype));
12678
12679 if (operand_equal_p (arg00, arg10, 0))
12680 return fold_build2_loc (loc, code, type,
12681 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12682 fold_build2_loc (loc,
12683 BIT_XOR_EXPR, itype,
12684 arg01, arg11),
12685 arg00),
12686 build_zero_cst (itype));
12687 }
12688
12689 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12690 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12691 {
12692 tree arg00 = TREE_OPERAND (arg0, 0);
12693 tree arg01 = TREE_OPERAND (arg0, 1);
12694 tree arg10 = TREE_OPERAND (arg1, 0);
12695 tree arg11 = TREE_OPERAND (arg1, 1);
12696 tree itype = TREE_TYPE (arg0);
12697
12698 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12699 operand_equal_p guarantees no side-effects so we don't need
12700 to use omit_one_operand on Z. */
12701 if (operand_equal_p (arg01, arg11, 0))
12702 return fold_build2_loc (loc, code, type, arg00,
12703 fold_convert_loc (loc, TREE_TYPE (arg00),
12704 arg10));
12705 if (operand_equal_p (arg01, arg10, 0))
12706 return fold_build2_loc (loc, code, type, arg00,
12707 fold_convert_loc (loc, TREE_TYPE (arg00),
12708 arg11));
12709 if (operand_equal_p (arg00, arg11, 0))
12710 return fold_build2_loc (loc, code, type, arg01,
12711 fold_convert_loc (loc, TREE_TYPE (arg01),
12712 arg10));
12713 if (operand_equal_p (arg00, arg10, 0))
12714 return fold_build2_loc (loc, code, type, arg01,
12715 fold_convert_loc (loc, TREE_TYPE (arg01),
12716 arg11));
12717
12718 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12719 if (TREE_CODE (arg01) == INTEGER_CST
12720 && TREE_CODE (arg11) == INTEGER_CST)
12721 {
12722 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12723 fold_convert_loc (loc, itype, arg11));
12724 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12725 return fold_build2_loc (loc, code, type, tem,
12726 fold_convert_loc (loc, itype, arg10));
12727 }
12728 }
12729
12730 /* Attempt to simplify equality/inequality comparisons of complex
12731 values. Only lower the comparison if the result is known or
12732 can be simplified to a single scalar comparison. */
12733 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12734 || TREE_CODE (arg0) == COMPLEX_CST)
12735 && (TREE_CODE (arg1) == COMPLEX_EXPR
12736 || TREE_CODE (arg1) == COMPLEX_CST))
12737 {
12738 tree real0, imag0, real1, imag1;
12739 tree rcond, icond;
12740
12741 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12742 {
12743 real0 = TREE_OPERAND (arg0, 0);
12744 imag0 = TREE_OPERAND (arg0, 1);
12745 }
12746 else
12747 {
12748 real0 = TREE_REALPART (arg0);
12749 imag0 = TREE_IMAGPART (arg0);
12750 }
12751
12752 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12753 {
12754 real1 = TREE_OPERAND (arg1, 0);
12755 imag1 = TREE_OPERAND (arg1, 1);
12756 }
12757 else
12758 {
12759 real1 = TREE_REALPART (arg1);
12760 imag1 = TREE_IMAGPART (arg1);
12761 }
12762
12763 rcond = fold_binary_loc (loc, code, type, real0, real1);
12764 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12765 {
12766 if (integer_zerop (rcond))
12767 {
12768 if (code == EQ_EXPR)
12769 return omit_two_operands_loc (loc, type, boolean_false_node,
12770 imag0, imag1);
12771 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12772 }
12773 else
12774 {
12775 if (code == NE_EXPR)
12776 return omit_two_operands_loc (loc, type, boolean_true_node,
12777 imag0, imag1);
12778 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12779 }
12780 }
12781
12782 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12783 if (icond && TREE_CODE (icond) == INTEGER_CST)
12784 {
12785 if (integer_zerop (icond))
12786 {
12787 if (code == EQ_EXPR)
12788 return omit_two_operands_loc (loc, type, boolean_false_node,
12789 real0, real1);
12790 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12791 }
12792 else
12793 {
12794 if (code == NE_EXPR)
12795 return omit_two_operands_loc (loc, type, boolean_true_node,
12796 real0, real1);
12797 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12798 }
12799 }
12800 }
12801
12802 return NULL_TREE;
12803
12804 case LT_EXPR:
12805 case GT_EXPR:
12806 case LE_EXPR:
12807 case GE_EXPR:
12808 tem = fold_comparison (loc, code, type, op0, op1);
12809 if (tem != NULL_TREE)
12810 return tem;
12811
12812 /* Transform comparisons of the form X +- C CMP X. */
12813 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12814 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12815 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12816 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12817 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12818 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12819 {
12820 tree arg01 = TREE_OPERAND (arg0, 1);
12821 enum tree_code code0 = TREE_CODE (arg0);
12822 int is_positive;
12823
12824 if (TREE_CODE (arg01) == REAL_CST)
12825 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12826 else
12827 is_positive = tree_int_cst_sgn (arg01);
12828
12829 /* (X - c) > X becomes false. */
12830 if (code == GT_EXPR
12831 && ((code0 == MINUS_EXPR && is_positive >= 0)
12832 || (code0 == PLUS_EXPR && is_positive <= 0)))
12833 {
12834 if (TREE_CODE (arg01) == INTEGER_CST
12835 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12836 fold_overflow_warning (("assuming signed overflow does not "
12837 "occur when assuming that (X - c) > X "
12838 "is always false"),
12839 WARN_STRICT_OVERFLOW_ALL);
12840 return constant_boolean_node (0, type);
12841 }
12842
12843 /* Likewise (X + c) < X becomes false. */
12844 if (code == LT_EXPR
12845 && ((code0 == PLUS_EXPR && is_positive >= 0)
12846 || (code0 == MINUS_EXPR && is_positive <= 0)))
12847 {
12848 if (TREE_CODE (arg01) == INTEGER_CST
12849 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12850 fold_overflow_warning (("assuming signed overflow does not "
12851 "occur when assuming that "
12852 "(X + c) < X is always false"),
12853 WARN_STRICT_OVERFLOW_ALL);
12854 return constant_boolean_node (0, type);
12855 }
12856
12857 /* Convert (X - c) <= X to true. */
12858 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12859 && code == LE_EXPR
12860 && ((code0 == MINUS_EXPR && is_positive >= 0)
12861 || (code0 == PLUS_EXPR && is_positive <= 0)))
12862 {
12863 if (TREE_CODE (arg01) == INTEGER_CST
12864 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12865 fold_overflow_warning (("assuming signed overflow does not "
12866 "occur when assuming that "
12867 "(X - c) <= X is always true"),
12868 WARN_STRICT_OVERFLOW_ALL);
12869 return constant_boolean_node (1, type);
12870 }
12871
12872 /* Convert (X + c) >= X to true. */
12873 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12874 && code == GE_EXPR
12875 && ((code0 == PLUS_EXPR && is_positive >= 0)
12876 || (code0 == MINUS_EXPR && is_positive <= 0)))
12877 {
12878 if (TREE_CODE (arg01) == INTEGER_CST
12879 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12880 fold_overflow_warning (("assuming signed overflow does not "
12881 "occur when assuming that "
12882 "(X + c) >= X is always true"),
12883 WARN_STRICT_OVERFLOW_ALL);
12884 return constant_boolean_node (1, type);
12885 }
12886
12887 if (TREE_CODE (arg01) == INTEGER_CST)
12888 {
12889 /* Convert X + c > X and X - c < X to true for integers. */
12890 if (code == GT_EXPR
12891 && ((code0 == PLUS_EXPR && is_positive > 0)
12892 || (code0 == MINUS_EXPR && is_positive < 0)))
12893 {
12894 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12895 fold_overflow_warning (("assuming signed overflow does "
12896 "not occur when assuming that "
12897 "(X + c) > X is always true"),
12898 WARN_STRICT_OVERFLOW_ALL);
12899 return constant_boolean_node (1, type);
12900 }
12901
12902 if (code == LT_EXPR
12903 && ((code0 == MINUS_EXPR && is_positive > 0)
12904 || (code0 == PLUS_EXPR && is_positive < 0)))
12905 {
12906 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12907 fold_overflow_warning (("assuming signed overflow does "
12908 "not occur when assuming that "
12909 "(X - c) < X is always true"),
12910 WARN_STRICT_OVERFLOW_ALL);
12911 return constant_boolean_node (1, type);
12912 }
12913
12914 /* Convert X + c <= X and X - c >= X to false for integers. */
12915 if (code == LE_EXPR
12916 && ((code0 == PLUS_EXPR && is_positive > 0)
12917 || (code0 == MINUS_EXPR && is_positive < 0)))
12918 {
12919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12920 fold_overflow_warning (("assuming signed overflow does "
12921 "not occur when assuming that "
12922 "(X + c) <= X is always false"),
12923 WARN_STRICT_OVERFLOW_ALL);
12924 return constant_boolean_node (0, type);
12925 }
12926
12927 if (code == GE_EXPR
12928 && ((code0 == MINUS_EXPR && is_positive > 0)
12929 || (code0 == PLUS_EXPR && is_positive < 0)))
12930 {
12931 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12932 fold_overflow_warning (("assuming signed overflow does "
12933 "not occur when assuming that "
12934 "(X - c) >= X is always false"),
12935 WARN_STRICT_OVERFLOW_ALL);
12936 return constant_boolean_node (0, type);
12937 }
12938 }
12939 }
12940
12941 /* Comparisons with the highest or lowest possible integer of
12942 the specified precision will have known values. */
12943 {
12944 tree arg1_type = TREE_TYPE (arg1);
12945 unsigned int prec = TYPE_PRECISION (arg1_type);
12946
12947 if (TREE_CODE (arg1) == INTEGER_CST
12948 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12949 {
12950 wide_int max = wi::max_value (arg1_type);
12951 wide_int signed_max = wi::max_value (prec, SIGNED);
12952 wide_int min = wi::min_value (arg1_type);
12953
12954 if (wi::eq_p (arg1, max))
12955 switch (code)
12956 {
12957 case GT_EXPR:
12958 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12959
12960 case GE_EXPR:
12961 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12962
12963 case LE_EXPR:
12964 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12965
12966 case LT_EXPR:
12967 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12968
12969 /* The GE_EXPR and LT_EXPR cases above are not normally
12970 reached because of previous transformations. */
12971
12972 default:
12973 break;
12974 }
12975 else if (wi::eq_p (arg1, max - 1))
12976 switch (code)
12977 {
12978 case GT_EXPR:
12979 arg1 = const_binop (PLUS_EXPR, arg1,
12980 build_int_cst (TREE_TYPE (arg1), 1));
12981 return fold_build2_loc (loc, EQ_EXPR, type,
12982 fold_convert_loc (loc,
12983 TREE_TYPE (arg1), arg0),
12984 arg1);
12985 case LE_EXPR:
12986 arg1 = const_binop (PLUS_EXPR, arg1,
12987 build_int_cst (TREE_TYPE (arg1), 1));
12988 return fold_build2_loc (loc, NE_EXPR, type,
12989 fold_convert_loc (loc, TREE_TYPE (arg1),
12990 arg0),
12991 arg1);
12992 default:
12993 break;
12994 }
12995 else if (wi::eq_p (arg1, min))
12996 switch (code)
12997 {
12998 case LT_EXPR:
12999 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13000
13001 case LE_EXPR:
13002 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13003
13004 case GE_EXPR:
13005 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13006
13007 case GT_EXPR:
13008 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13009
13010 default:
13011 break;
13012 }
13013 else if (wi::eq_p (arg1, min + 1))
13014 switch (code)
13015 {
13016 case GE_EXPR:
13017 arg1 = const_binop (MINUS_EXPR, arg1,
13018 build_int_cst (TREE_TYPE (arg1), 1));
13019 return fold_build2_loc (loc, NE_EXPR, type,
13020 fold_convert_loc (loc,
13021 TREE_TYPE (arg1), arg0),
13022 arg1);
13023 case LT_EXPR:
13024 arg1 = const_binop (MINUS_EXPR, arg1,
13025 build_int_cst (TREE_TYPE (arg1), 1));
13026 return fold_build2_loc (loc, EQ_EXPR, type,
13027 fold_convert_loc (loc, TREE_TYPE (arg1),
13028 arg0),
13029 arg1);
13030 default:
13031 break;
13032 }
13033
13034 else if (wi::eq_p (arg1, signed_max)
13035 && TYPE_UNSIGNED (arg1_type)
13036 /* We will flip the signedness of the comparison operator
13037 associated with the mode of arg1, so the sign bit is
13038 specified by this mode. Check that arg1 is the signed
13039 max associated with this sign bit. */
13040 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13041 /* signed_type does not work on pointer types. */
13042 && INTEGRAL_TYPE_P (arg1_type))
13043 {
13044 /* The following case also applies to X < signed_max+1
13045 and X >= signed_max+1 because previous transformations. */
13046 if (code == LE_EXPR || code == GT_EXPR)
13047 {
13048 tree st = signed_type_for (arg1_type);
13049 return fold_build2_loc (loc,
13050 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13051 type, fold_convert_loc (loc, st, arg0),
13052 build_int_cst (st, 0));
13053 }
13054 }
13055 }
13056 }
13057
13058 /* If we are comparing an ABS_EXPR with a constant, we can
13059 convert all the cases into explicit comparisons, but they may
13060 well not be faster than doing the ABS and one comparison.
13061 But ABS (X) <= C is a range comparison, which becomes a subtraction
13062 and a comparison, and is probably faster. */
13063 if (code == LE_EXPR
13064 && TREE_CODE (arg1) == INTEGER_CST
13065 && TREE_CODE (arg0) == ABS_EXPR
13066 && ! TREE_SIDE_EFFECTS (arg0)
13067 && (0 != (tem = negate_expr (arg1)))
13068 && TREE_CODE (tem) == INTEGER_CST
13069 && !TREE_OVERFLOW (tem))
13070 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13071 build2 (GE_EXPR, type,
13072 TREE_OPERAND (arg0, 0), tem),
13073 build2 (LE_EXPR, type,
13074 TREE_OPERAND (arg0, 0), arg1));
13075
13076 /* Convert ABS_EXPR<x> >= 0 to true. */
13077 strict_overflow_p = false;
13078 if (code == GE_EXPR
13079 && (integer_zerop (arg1)
13080 || (! HONOR_NANS (element_mode (arg0))
13081 && real_zerop (arg1)))
13082 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13083 {
13084 if (strict_overflow_p)
13085 fold_overflow_warning (("assuming signed overflow does not occur "
13086 "when simplifying comparison of "
13087 "absolute value and zero"),
13088 WARN_STRICT_OVERFLOW_CONDITIONAL);
13089 return omit_one_operand_loc (loc, type,
13090 constant_boolean_node (true, type),
13091 arg0);
13092 }
13093
13094 /* Convert ABS_EXPR<x> < 0 to false. */
13095 strict_overflow_p = false;
13096 if (code == LT_EXPR
13097 && (integer_zerop (arg1) || real_zerop (arg1))
13098 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13099 {
13100 if (strict_overflow_p)
13101 fold_overflow_warning (("assuming signed overflow does not occur "
13102 "when simplifying comparison of "
13103 "absolute value and zero"),
13104 WARN_STRICT_OVERFLOW_CONDITIONAL);
13105 return omit_one_operand_loc (loc, type,
13106 constant_boolean_node (false, type),
13107 arg0);
13108 }
13109
13110 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13111 and similarly for >= into !=. */
13112 if ((code == LT_EXPR || code == GE_EXPR)
13113 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13114 && TREE_CODE (arg1) == LSHIFT_EXPR
13115 && integer_onep (TREE_OPERAND (arg1, 0)))
13116 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13117 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13118 TREE_OPERAND (arg1, 1)),
13119 build_zero_cst (TREE_TYPE (arg0)));
13120
13121 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13122 otherwise Y might be >= # of bits in X's type and thus e.g.
13123 (unsigned char) (1 << Y) for Y 15 might be 0.
13124 If the cast is widening, then 1 << Y should have unsigned type,
13125 otherwise if Y is number of bits in the signed shift type minus 1,
13126 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13127 31 might be 0xffffffff80000000. */
13128 if ((code == LT_EXPR || code == GE_EXPR)
13129 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13130 && CONVERT_EXPR_P (arg1)
13131 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13132 && (element_precision (TREE_TYPE (arg1))
13133 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13134 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13135 || (element_precision (TREE_TYPE (arg1))
13136 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13137 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13138 {
13139 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13140 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13141 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13142 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13143 build_zero_cst (TREE_TYPE (arg0)));
13144 }
13145
13146 return NULL_TREE;
13147
13148 case UNORDERED_EXPR:
13149 case ORDERED_EXPR:
13150 case UNLT_EXPR:
13151 case UNLE_EXPR:
13152 case UNGT_EXPR:
13153 case UNGE_EXPR:
13154 case UNEQ_EXPR:
13155 case LTGT_EXPR:
13156 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13157 {
13158 t1 = fold_relational_const (code, type, arg0, arg1);
13159 if (t1 != NULL_TREE)
13160 return t1;
13161 }
13162
13163 /* If the first operand is NaN, the result is constant. */
13164 if (TREE_CODE (arg0) == REAL_CST
13165 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13166 && (code != LTGT_EXPR || ! flag_trapping_math))
13167 {
13168 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13169 ? integer_zero_node
13170 : integer_one_node;
13171 return omit_one_operand_loc (loc, type, t1, arg1);
13172 }
13173
13174 /* If the second operand is NaN, the result is constant. */
13175 if (TREE_CODE (arg1) == REAL_CST
13176 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13177 && (code != LTGT_EXPR || ! flag_trapping_math))
13178 {
13179 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13180 ? integer_zero_node
13181 : integer_one_node;
13182 return omit_one_operand_loc (loc, type, t1, arg0);
13183 }
13184
13185 /* Simplify unordered comparison of something with itself. */
13186 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13187 && operand_equal_p (arg0, arg1, 0))
13188 return constant_boolean_node (1, type);
13189
13190 if (code == LTGT_EXPR
13191 && !flag_trapping_math
13192 && operand_equal_p (arg0, arg1, 0))
13193 return constant_boolean_node (0, type);
13194
13195 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13196 {
13197 tree targ0 = strip_float_extensions (arg0);
13198 tree targ1 = strip_float_extensions (arg1);
13199 tree newtype = TREE_TYPE (targ0);
13200
13201 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13202 newtype = TREE_TYPE (targ1);
13203
13204 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13205 return fold_build2_loc (loc, code, type,
13206 fold_convert_loc (loc, newtype, targ0),
13207 fold_convert_loc (loc, newtype, targ1));
13208 }
13209
13210 return NULL_TREE;
13211
13212 case COMPOUND_EXPR:
13213 /* When pedantic, a compound expression can be neither an lvalue
13214 nor an integer constant expression. */
13215 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13216 return NULL_TREE;
13217 /* Don't let (0, 0) be null pointer constant. */
13218 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13219 : fold_convert_loc (loc, type, arg1);
13220 return pedantic_non_lvalue_loc (loc, tem);
13221
13222 case ASSERT_EXPR:
13223 /* An ASSERT_EXPR should never be passed to fold_binary. */
13224 gcc_unreachable ();
13225
13226 default:
13227 return NULL_TREE;
13228 } /* switch (code) */
13229 }
13230
13231 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13232 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13233 of GOTO_EXPR. */
13234
13235 static tree
13236 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13237 {
13238 switch (TREE_CODE (*tp))
13239 {
13240 case LABEL_EXPR:
13241 return *tp;
13242
13243 case GOTO_EXPR:
13244 *walk_subtrees = 0;
13245
13246 /* ... fall through ... */
13247
13248 default:
13249 return NULL_TREE;
13250 }
13251 }
13252
13253 /* Return whether the sub-tree ST contains a label which is accessible from
13254 outside the sub-tree. */
13255
13256 static bool
13257 contains_label_p (tree st)
13258 {
13259 return
13260 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13261 }
13262
13263 /* Fold a ternary expression of code CODE and type TYPE with operands
13264 OP0, OP1, and OP2. Return the folded expression if folding is
13265 successful. Otherwise, return NULL_TREE. */
13266
13267 tree
13268 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13269 tree op0, tree op1, tree op2)
13270 {
13271 tree tem;
13272 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13273 enum tree_code_class kind = TREE_CODE_CLASS (code);
13274
13275 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13276 && TREE_CODE_LENGTH (code) == 3);
13277
13278 /* If this is a commutative operation, and OP0 is a constant, move it
13279 to OP1 to reduce the number of tests below. */
13280 if (commutative_ternary_tree_code (code)
13281 && tree_swap_operands_p (op0, op1, true))
13282 return fold_build3_loc (loc, code, type, op1, op0, op2);
13283
13284 tem = generic_simplify (loc, code, type, op0, op1, op2);
13285 if (tem)
13286 return tem;
13287
13288 /* Strip any conversions that don't change the mode. This is safe
13289 for every expression, except for a comparison expression because
13290 its signedness is derived from its operands. So, in the latter
13291 case, only strip conversions that don't change the signedness.
13292
13293 Note that this is done as an internal manipulation within the
13294 constant folder, in order to find the simplest representation of
13295 the arguments so that their form can be studied. In any cases,
13296 the appropriate type conversions should be put back in the tree
13297 that will get out of the constant folder. */
13298 if (op0)
13299 {
13300 arg0 = op0;
13301 STRIP_NOPS (arg0);
13302 }
13303
13304 if (op1)
13305 {
13306 arg1 = op1;
13307 STRIP_NOPS (arg1);
13308 }
13309
13310 if (op2)
13311 {
13312 arg2 = op2;
13313 STRIP_NOPS (arg2);
13314 }
13315
13316 switch (code)
13317 {
13318 case COMPONENT_REF:
13319 if (TREE_CODE (arg0) == CONSTRUCTOR
13320 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13321 {
13322 unsigned HOST_WIDE_INT idx;
13323 tree field, value;
13324 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13325 if (field == arg1)
13326 return value;
13327 }
13328 return NULL_TREE;
13329
13330 case COND_EXPR:
13331 case VEC_COND_EXPR:
13332 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13333 so all simple results must be passed through pedantic_non_lvalue. */
13334 if (TREE_CODE (arg0) == INTEGER_CST)
13335 {
13336 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13337 tem = integer_zerop (arg0) ? op2 : op1;
13338 /* Only optimize constant conditions when the selected branch
13339 has the same type as the COND_EXPR. This avoids optimizing
13340 away "c ? x : throw", where the throw has a void type.
13341 Avoid throwing away that operand which contains label. */
13342 if ((!TREE_SIDE_EFFECTS (unused_op)
13343 || !contains_label_p (unused_op))
13344 && (! VOID_TYPE_P (TREE_TYPE (tem))
13345 || VOID_TYPE_P (type)))
13346 return pedantic_non_lvalue_loc (loc, tem);
13347 return NULL_TREE;
13348 }
13349 else if (TREE_CODE (arg0) == VECTOR_CST)
13350 {
13351 if ((TREE_CODE (arg1) == VECTOR_CST
13352 || TREE_CODE (arg1) == CONSTRUCTOR)
13353 && (TREE_CODE (arg2) == VECTOR_CST
13354 || TREE_CODE (arg2) == CONSTRUCTOR))
13355 {
13356 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13357 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13358 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13359 for (i = 0; i < nelts; i++)
13360 {
13361 tree val = VECTOR_CST_ELT (arg0, i);
13362 if (integer_all_onesp (val))
13363 sel[i] = i;
13364 else if (integer_zerop (val))
13365 sel[i] = nelts + i;
13366 else /* Currently unreachable. */
13367 return NULL_TREE;
13368 }
13369 tree t = fold_vec_perm (type, arg1, arg2, sel);
13370 if (t != NULL_TREE)
13371 return t;
13372 }
13373 }
13374
13375 /* If we have A op B ? A : C, we may be able to convert this to a
13376 simpler expression, depending on the operation and the values
13377 of B and C. Signed zeros prevent all of these transformations,
13378 for reasons given above each one.
13379
13380 Also try swapping the arguments and inverting the conditional. */
13381 if (COMPARISON_CLASS_P (arg0)
13382 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13383 arg1, TREE_OPERAND (arg0, 1))
13384 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13385 {
13386 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13387 if (tem)
13388 return tem;
13389 }
13390
13391 if (COMPARISON_CLASS_P (arg0)
13392 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13393 op2,
13394 TREE_OPERAND (arg0, 1))
13395 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13396 {
13397 location_t loc0 = expr_location_or (arg0, loc);
13398 tem = fold_invert_truthvalue (loc0, arg0);
13399 if (tem && COMPARISON_CLASS_P (tem))
13400 {
13401 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13402 if (tem)
13403 return tem;
13404 }
13405 }
13406
13407 /* If the second operand is simpler than the third, swap them
13408 since that produces better jump optimization results. */
13409 if (truth_value_p (TREE_CODE (arg0))
13410 && tree_swap_operands_p (op1, op2, false))
13411 {
13412 location_t loc0 = expr_location_or (arg0, loc);
13413 /* See if this can be inverted. If it can't, possibly because
13414 it was a floating-point inequality comparison, don't do
13415 anything. */
13416 tem = fold_invert_truthvalue (loc0, arg0);
13417 if (tem)
13418 return fold_build3_loc (loc, code, type, tem, op2, op1);
13419 }
13420
13421 /* Convert A ? 1 : 0 to simply A. */
13422 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13423 : (integer_onep (op1)
13424 && !VECTOR_TYPE_P (type)))
13425 && integer_zerop (op2)
13426 /* If we try to convert OP0 to our type, the
13427 call to fold will try to move the conversion inside
13428 a COND, which will recurse. In that case, the COND_EXPR
13429 is probably the best choice, so leave it alone. */
13430 && type == TREE_TYPE (arg0))
13431 return pedantic_non_lvalue_loc (loc, arg0);
13432
13433 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13434 over COND_EXPR in cases such as floating point comparisons. */
13435 if (integer_zerop (op1)
13436 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13437 : (integer_onep (op2)
13438 && !VECTOR_TYPE_P (type)))
13439 && truth_value_p (TREE_CODE (arg0)))
13440 return pedantic_non_lvalue_loc (loc,
13441 fold_convert_loc (loc, type,
13442 invert_truthvalue_loc (loc,
13443 arg0)));
13444
13445 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13446 if (TREE_CODE (arg0) == LT_EXPR
13447 && integer_zerop (TREE_OPERAND (arg0, 1))
13448 && integer_zerop (op2)
13449 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13450 {
13451 /* sign_bit_p looks through both zero and sign extensions,
13452 but for this optimization only sign extensions are
13453 usable. */
13454 tree tem2 = TREE_OPERAND (arg0, 0);
13455 while (tem != tem2)
13456 {
13457 if (TREE_CODE (tem2) != NOP_EXPR
13458 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13459 {
13460 tem = NULL_TREE;
13461 break;
13462 }
13463 tem2 = TREE_OPERAND (tem2, 0);
13464 }
13465 /* sign_bit_p only checks ARG1 bits within A's precision.
13466 If <sign bit of A> has wider type than A, bits outside
13467 of A's precision in <sign bit of A> need to be checked.
13468 If they are all 0, this optimization needs to be done
13469 in unsigned A's type, if they are all 1 in signed A's type,
13470 otherwise this can't be done. */
13471 if (tem
13472 && TYPE_PRECISION (TREE_TYPE (tem))
13473 < TYPE_PRECISION (TREE_TYPE (arg1))
13474 && TYPE_PRECISION (TREE_TYPE (tem))
13475 < TYPE_PRECISION (type))
13476 {
13477 int inner_width, outer_width;
13478 tree tem_type;
13479
13480 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13481 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13482 if (outer_width > TYPE_PRECISION (type))
13483 outer_width = TYPE_PRECISION (type);
13484
13485 wide_int mask = wi::shifted_mask
13486 (inner_width, outer_width - inner_width, false,
13487 TYPE_PRECISION (TREE_TYPE (arg1)));
13488
13489 wide_int common = mask & arg1;
13490 if (common == mask)
13491 {
13492 tem_type = signed_type_for (TREE_TYPE (tem));
13493 tem = fold_convert_loc (loc, tem_type, tem);
13494 }
13495 else if (common == 0)
13496 {
13497 tem_type = unsigned_type_for (TREE_TYPE (tem));
13498 tem = fold_convert_loc (loc, tem_type, tem);
13499 }
13500 else
13501 tem = NULL;
13502 }
13503
13504 if (tem)
13505 return
13506 fold_convert_loc (loc, type,
13507 fold_build2_loc (loc, BIT_AND_EXPR,
13508 TREE_TYPE (tem), tem,
13509 fold_convert_loc (loc,
13510 TREE_TYPE (tem),
13511 arg1)));
13512 }
13513
13514 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13515 already handled above. */
13516 if (TREE_CODE (arg0) == BIT_AND_EXPR
13517 && integer_onep (TREE_OPERAND (arg0, 1))
13518 && integer_zerop (op2)
13519 && integer_pow2p (arg1))
13520 {
13521 tree tem = TREE_OPERAND (arg0, 0);
13522 STRIP_NOPS (tem);
13523 if (TREE_CODE (tem) == RSHIFT_EXPR
13524 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13525 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13526 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13527 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13528 TREE_OPERAND (tem, 0), arg1);
13529 }
13530
13531 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13532 is probably obsolete because the first operand should be a
13533 truth value (that's why we have the two cases above), but let's
13534 leave it in until we can confirm this for all front-ends. */
13535 if (integer_zerop (op2)
13536 && TREE_CODE (arg0) == NE_EXPR
13537 && integer_zerop (TREE_OPERAND (arg0, 1))
13538 && integer_pow2p (arg1)
13539 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13540 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13541 arg1, OEP_ONLY_CONST))
13542 return pedantic_non_lvalue_loc (loc,
13543 fold_convert_loc (loc, type,
13544 TREE_OPERAND (arg0, 0)));
13545
13546 /* Disable the transformations below for vectors, since
13547 fold_binary_op_with_conditional_arg may undo them immediately,
13548 yielding an infinite loop. */
13549 if (code == VEC_COND_EXPR)
13550 return NULL_TREE;
13551
13552 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13553 if (integer_zerop (op2)
13554 && truth_value_p (TREE_CODE (arg0))
13555 && truth_value_p (TREE_CODE (arg1))
13556 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13557 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13558 : TRUTH_ANDIF_EXPR,
13559 type, fold_convert_loc (loc, type, arg0), arg1);
13560
13561 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13562 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13563 && truth_value_p (TREE_CODE (arg0))
13564 && truth_value_p (TREE_CODE (arg1))
13565 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13566 {
13567 location_t loc0 = expr_location_or (arg0, loc);
13568 /* Only perform transformation if ARG0 is easily inverted. */
13569 tem = fold_invert_truthvalue (loc0, arg0);
13570 if (tem)
13571 return fold_build2_loc (loc, code == VEC_COND_EXPR
13572 ? BIT_IOR_EXPR
13573 : TRUTH_ORIF_EXPR,
13574 type, fold_convert_loc (loc, type, tem),
13575 arg1);
13576 }
13577
13578 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13579 if (integer_zerop (arg1)
13580 && truth_value_p (TREE_CODE (arg0))
13581 && truth_value_p (TREE_CODE (op2))
13582 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13583 {
13584 location_t loc0 = expr_location_or (arg0, loc);
13585 /* Only perform transformation if ARG0 is easily inverted. */
13586 tem = fold_invert_truthvalue (loc0, arg0);
13587 if (tem)
13588 return fold_build2_loc (loc, code == VEC_COND_EXPR
13589 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13590 type, fold_convert_loc (loc, type, tem),
13591 op2);
13592 }
13593
13594 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13595 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13596 && truth_value_p (TREE_CODE (arg0))
13597 && truth_value_p (TREE_CODE (op2))
13598 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13599 return fold_build2_loc (loc, code == VEC_COND_EXPR
13600 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13601 type, fold_convert_loc (loc, type, arg0), op2);
13602
13603 return NULL_TREE;
13604
13605 case CALL_EXPR:
13606 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13607 of fold_ternary on them. */
13608 gcc_unreachable ();
13609
13610 case BIT_FIELD_REF:
13611 if ((TREE_CODE (arg0) == VECTOR_CST
13612 || (TREE_CODE (arg0) == CONSTRUCTOR
13613 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13614 && (type == TREE_TYPE (TREE_TYPE (arg0))
13615 || (TREE_CODE (type) == VECTOR_TYPE
13616 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13617 {
13618 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13619 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13620 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13621 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13622
13623 if (n != 0
13624 && (idx % width) == 0
13625 && (n % width) == 0
13626 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13627 {
13628 idx = idx / width;
13629 n = n / width;
13630
13631 if (TREE_CODE (arg0) == VECTOR_CST)
13632 {
13633 if (n == 1)
13634 return VECTOR_CST_ELT (arg0, idx);
13635
13636 tree *vals = XALLOCAVEC (tree, n);
13637 for (unsigned i = 0; i < n; ++i)
13638 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13639 return build_vector (type, vals);
13640 }
13641
13642 /* Constructor elements can be subvectors. */
13643 unsigned HOST_WIDE_INT k = 1;
13644 if (CONSTRUCTOR_NELTS (arg0) != 0)
13645 {
13646 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13647 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13648 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13649 }
13650
13651 /* We keep an exact subset of the constructor elements. */
13652 if ((idx % k) == 0 && (n % k) == 0)
13653 {
13654 if (CONSTRUCTOR_NELTS (arg0) == 0)
13655 return build_constructor (type, NULL);
13656 idx /= k;
13657 n /= k;
13658 if (n == 1)
13659 {
13660 if (idx < CONSTRUCTOR_NELTS (arg0))
13661 return CONSTRUCTOR_ELT (arg0, idx)->value;
13662 return build_zero_cst (type);
13663 }
13664
13665 vec<constructor_elt, va_gc> *vals;
13666 vec_alloc (vals, n);
13667 for (unsigned i = 0;
13668 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13669 ++i)
13670 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13671 CONSTRUCTOR_ELT
13672 (arg0, idx + i)->value);
13673 return build_constructor (type, vals);
13674 }
13675 /* The bitfield references a single constructor element. */
13676 else if (idx + n <= (idx / k + 1) * k)
13677 {
13678 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13679 return build_zero_cst (type);
13680 else if (n == k)
13681 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13682 else
13683 return fold_build3_loc (loc, code, type,
13684 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13685 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13686 }
13687 }
13688 }
13689
13690 /* A bit-field-ref that referenced the full argument can be stripped. */
13691 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13692 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13693 && integer_zerop (op2))
13694 return fold_convert_loc (loc, type, arg0);
13695
13696 /* On constants we can use native encode/interpret to constant
13697 fold (nearly) all BIT_FIELD_REFs. */
13698 if (CONSTANT_CLASS_P (arg0)
13699 && can_native_interpret_type_p (type)
13700 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13701 /* This limitation should not be necessary, we just need to
13702 round this up to mode size. */
13703 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13704 /* Need bit-shifting of the buffer to relax the following. */
13705 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13706 {
13707 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13708 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13709 unsigned HOST_WIDE_INT clen;
13710 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13711 /* ??? We cannot tell native_encode_expr to start at
13712 some random byte only. So limit us to a reasonable amount
13713 of work. */
13714 if (clen <= 4096)
13715 {
13716 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13717 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13718 if (len > 0
13719 && len * BITS_PER_UNIT >= bitpos + bitsize)
13720 {
13721 tree v = native_interpret_expr (type,
13722 b + bitpos / BITS_PER_UNIT,
13723 bitsize / BITS_PER_UNIT);
13724 if (v)
13725 return v;
13726 }
13727 }
13728 }
13729
13730 return NULL_TREE;
13731
13732 case FMA_EXPR:
13733 /* For integers we can decompose the FMA if possible. */
13734 if (TREE_CODE (arg0) == INTEGER_CST
13735 && TREE_CODE (arg1) == INTEGER_CST)
13736 return fold_build2_loc (loc, PLUS_EXPR, type,
13737 const_binop (MULT_EXPR, arg0, arg1), arg2);
13738 if (integer_zerop (arg2))
13739 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13740
13741 return fold_fma (loc, type, arg0, arg1, arg2);
13742
13743 case VEC_PERM_EXPR:
13744 if (TREE_CODE (arg2) == VECTOR_CST)
13745 {
13746 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13747 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13748 unsigned char *sel2 = sel + nelts;
13749 bool need_mask_canon = false;
13750 bool need_mask_canon2 = false;
13751 bool all_in_vec0 = true;
13752 bool all_in_vec1 = true;
13753 bool maybe_identity = true;
13754 bool single_arg = (op0 == op1);
13755 bool changed = false;
13756
13757 mask2 = 2 * nelts - 1;
13758 mask = single_arg ? (nelts - 1) : mask2;
13759 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13760 for (i = 0; i < nelts; i++)
13761 {
13762 tree val = VECTOR_CST_ELT (arg2, i);
13763 if (TREE_CODE (val) != INTEGER_CST)
13764 return NULL_TREE;
13765
13766 /* Make sure that the perm value is in an acceptable
13767 range. */
13768 wide_int t = val;
13769 need_mask_canon |= wi::gtu_p (t, mask);
13770 need_mask_canon2 |= wi::gtu_p (t, mask2);
13771 sel[i] = t.to_uhwi () & mask;
13772 sel2[i] = t.to_uhwi () & mask2;
13773
13774 if (sel[i] < nelts)
13775 all_in_vec1 = false;
13776 else
13777 all_in_vec0 = false;
13778
13779 if ((sel[i] & (nelts-1)) != i)
13780 maybe_identity = false;
13781 }
13782
13783 if (maybe_identity)
13784 {
13785 if (all_in_vec0)
13786 return op0;
13787 if (all_in_vec1)
13788 return op1;
13789 }
13790
13791 if (all_in_vec0)
13792 op1 = op0;
13793 else if (all_in_vec1)
13794 {
13795 op0 = op1;
13796 for (i = 0; i < nelts; i++)
13797 sel[i] -= nelts;
13798 need_mask_canon = true;
13799 }
13800
13801 if ((TREE_CODE (op0) == VECTOR_CST
13802 || TREE_CODE (op0) == CONSTRUCTOR)
13803 && (TREE_CODE (op1) == VECTOR_CST
13804 || TREE_CODE (op1) == CONSTRUCTOR))
13805 {
13806 tree t = fold_vec_perm (type, op0, op1, sel);
13807 if (t != NULL_TREE)
13808 return t;
13809 }
13810
13811 if (op0 == op1 && !single_arg)
13812 changed = true;
13813
13814 /* Some targets are deficient and fail to expand a single
13815 argument permutation while still allowing an equivalent
13816 2-argument version. */
13817 if (need_mask_canon && arg2 == op2
13818 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13819 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13820 {
13821 need_mask_canon = need_mask_canon2;
13822 sel = sel2;
13823 }
13824
13825 if (need_mask_canon && arg2 == op2)
13826 {
13827 tree *tsel = XALLOCAVEC (tree, nelts);
13828 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13829 for (i = 0; i < nelts; i++)
13830 tsel[i] = build_int_cst (eltype, sel[i]);
13831 op2 = build_vector (TREE_TYPE (arg2), tsel);
13832 changed = true;
13833 }
13834
13835 if (changed)
13836 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13837 }
13838 return NULL_TREE;
13839
13840 default:
13841 return NULL_TREE;
13842 } /* switch (code) */
13843 }
13844
13845 /* Perform constant folding and related simplification of EXPR.
13846 The related simplifications include x*1 => x, x*0 => 0, etc.,
13847 and application of the associative law.
13848 NOP_EXPR conversions may be removed freely (as long as we
13849 are careful not to change the type of the overall expression).
13850 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13851 but we can constant-fold them if they have constant operands. */
13852
13853 #ifdef ENABLE_FOLD_CHECKING
13854 # define fold(x) fold_1 (x)
13855 static tree fold_1 (tree);
13856 static
13857 #endif
13858 tree
13859 fold (tree expr)
13860 {
13861 const tree t = expr;
13862 enum tree_code code = TREE_CODE (t);
13863 enum tree_code_class kind = TREE_CODE_CLASS (code);
13864 tree tem;
13865 location_t loc = EXPR_LOCATION (expr);
13866
13867 /* Return right away if a constant. */
13868 if (kind == tcc_constant)
13869 return t;
13870
13871 /* CALL_EXPR-like objects with variable numbers of operands are
13872 treated specially. */
13873 if (kind == tcc_vl_exp)
13874 {
13875 if (code == CALL_EXPR)
13876 {
13877 tem = fold_call_expr (loc, expr, false);
13878 return tem ? tem : expr;
13879 }
13880 return expr;
13881 }
13882
13883 if (IS_EXPR_CODE_CLASS (kind))
13884 {
13885 tree type = TREE_TYPE (t);
13886 tree op0, op1, op2;
13887
13888 switch (TREE_CODE_LENGTH (code))
13889 {
13890 case 1:
13891 op0 = TREE_OPERAND (t, 0);
13892 tem = fold_unary_loc (loc, code, type, op0);
13893 return tem ? tem : expr;
13894 case 2:
13895 op0 = TREE_OPERAND (t, 0);
13896 op1 = TREE_OPERAND (t, 1);
13897 tem = fold_binary_loc (loc, code, type, op0, op1);
13898 return tem ? tem : expr;
13899 case 3:
13900 op0 = TREE_OPERAND (t, 0);
13901 op1 = TREE_OPERAND (t, 1);
13902 op2 = TREE_OPERAND (t, 2);
13903 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13904 return tem ? tem : expr;
13905 default:
13906 break;
13907 }
13908 }
13909
13910 switch (code)
13911 {
13912 case ARRAY_REF:
13913 {
13914 tree op0 = TREE_OPERAND (t, 0);
13915 tree op1 = TREE_OPERAND (t, 1);
13916
13917 if (TREE_CODE (op1) == INTEGER_CST
13918 && TREE_CODE (op0) == CONSTRUCTOR
13919 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13920 {
13921 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13922 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13923 unsigned HOST_WIDE_INT begin = 0;
13924
13925 /* Find a matching index by means of a binary search. */
13926 while (begin != end)
13927 {
13928 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13929 tree index = (*elts)[middle].index;
13930
13931 if (TREE_CODE (index) == INTEGER_CST
13932 && tree_int_cst_lt (index, op1))
13933 begin = middle + 1;
13934 else if (TREE_CODE (index) == INTEGER_CST
13935 && tree_int_cst_lt (op1, index))
13936 end = middle;
13937 else if (TREE_CODE (index) == RANGE_EXPR
13938 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13939 begin = middle + 1;
13940 else if (TREE_CODE (index) == RANGE_EXPR
13941 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13942 end = middle;
13943 else
13944 return (*elts)[middle].value;
13945 }
13946 }
13947
13948 return t;
13949 }
13950
13951 /* Return a VECTOR_CST if possible. */
13952 case CONSTRUCTOR:
13953 {
13954 tree type = TREE_TYPE (t);
13955 if (TREE_CODE (type) != VECTOR_TYPE)
13956 return t;
13957
13958 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13959 unsigned HOST_WIDE_INT idx, pos = 0;
13960 tree value;
13961
13962 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13963 {
13964 if (!CONSTANT_CLASS_P (value))
13965 return t;
13966 if (TREE_CODE (value) == VECTOR_CST)
13967 {
13968 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13969 vec[pos++] = VECTOR_CST_ELT (value, i);
13970 }
13971 else
13972 vec[pos++] = value;
13973 }
13974 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13975 vec[pos] = build_zero_cst (TREE_TYPE (type));
13976
13977 return build_vector (type, vec);
13978 }
13979
13980 case CONST_DECL:
13981 return fold (DECL_INITIAL (t));
13982
13983 default:
13984 return t;
13985 } /* switch (code) */
13986 }
13987
13988 #ifdef ENABLE_FOLD_CHECKING
13989 #undef fold
13990
13991 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13992 hash_table<pointer_hash<const tree_node> > *);
13993 static void fold_check_failed (const_tree, const_tree);
13994 void print_fold_checksum (const_tree);
13995
13996 /* When --enable-checking=fold, compute a digest of expr before
13997 and after actual fold call to see if fold did not accidentally
13998 change original expr. */
13999
14000 tree
14001 fold (tree expr)
14002 {
14003 tree ret;
14004 struct md5_ctx ctx;
14005 unsigned char checksum_before[16], checksum_after[16];
14006 hash_table<pointer_hash<const tree_node> > ht (32);
14007
14008 md5_init_ctx (&ctx);
14009 fold_checksum_tree (expr, &ctx, &ht);
14010 md5_finish_ctx (&ctx, checksum_before);
14011 ht.empty ();
14012
14013 ret = fold_1 (expr);
14014
14015 md5_init_ctx (&ctx);
14016 fold_checksum_tree (expr, &ctx, &ht);
14017 md5_finish_ctx (&ctx, checksum_after);
14018
14019 if (memcmp (checksum_before, checksum_after, 16))
14020 fold_check_failed (expr, ret);
14021
14022 return ret;
14023 }
14024
14025 void
14026 print_fold_checksum (const_tree expr)
14027 {
14028 struct md5_ctx ctx;
14029 unsigned char checksum[16], cnt;
14030 hash_table<pointer_hash<const tree_node> > ht (32);
14031
14032 md5_init_ctx (&ctx);
14033 fold_checksum_tree (expr, &ctx, &ht);
14034 md5_finish_ctx (&ctx, checksum);
14035 for (cnt = 0; cnt < 16; ++cnt)
14036 fprintf (stderr, "%02x", checksum[cnt]);
14037 putc ('\n', stderr);
14038 }
14039
14040 static void
14041 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14042 {
14043 internal_error ("fold check: original tree changed by fold");
14044 }
14045
14046 static void
14047 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14048 hash_table<pointer_hash <const tree_node> > *ht)
14049 {
14050 const tree_node **slot;
14051 enum tree_code code;
14052 union tree_node buf;
14053 int i, len;
14054
14055 recursive_label:
14056 if (expr == NULL)
14057 return;
14058 slot = ht->find_slot (expr, INSERT);
14059 if (*slot != NULL)
14060 return;
14061 *slot = expr;
14062 code = TREE_CODE (expr);
14063 if (TREE_CODE_CLASS (code) == tcc_declaration
14064 && DECL_ASSEMBLER_NAME_SET_P (expr))
14065 {
14066 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14067 memcpy ((char *) &buf, expr, tree_size (expr));
14068 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14069 expr = (tree) &buf;
14070 }
14071 else if (TREE_CODE_CLASS (code) == tcc_type
14072 && (TYPE_POINTER_TO (expr)
14073 || TYPE_REFERENCE_TO (expr)
14074 || TYPE_CACHED_VALUES_P (expr)
14075 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14076 || TYPE_NEXT_VARIANT (expr)))
14077 {
14078 /* Allow these fields to be modified. */
14079 tree tmp;
14080 memcpy ((char *) &buf, expr, tree_size (expr));
14081 expr = tmp = (tree) &buf;
14082 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14083 TYPE_POINTER_TO (tmp) = NULL;
14084 TYPE_REFERENCE_TO (tmp) = NULL;
14085 TYPE_NEXT_VARIANT (tmp) = NULL;
14086 if (TYPE_CACHED_VALUES_P (tmp))
14087 {
14088 TYPE_CACHED_VALUES_P (tmp) = 0;
14089 TYPE_CACHED_VALUES (tmp) = NULL;
14090 }
14091 }
14092 md5_process_bytes (expr, tree_size (expr), ctx);
14093 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14094 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14095 if (TREE_CODE_CLASS (code) != tcc_type
14096 && TREE_CODE_CLASS (code) != tcc_declaration
14097 && code != TREE_LIST
14098 && code != SSA_NAME
14099 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14100 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14101 switch (TREE_CODE_CLASS (code))
14102 {
14103 case tcc_constant:
14104 switch (code)
14105 {
14106 case STRING_CST:
14107 md5_process_bytes (TREE_STRING_POINTER (expr),
14108 TREE_STRING_LENGTH (expr), ctx);
14109 break;
14110 case COMPLEX_CST:
14111 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14112 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14113 break;
14114 case VECTOR_CST:
14115 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14116 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14117 break;
14118 default:
14119 break;
14120 }
14121 break;
14122 case tcc_exceptional:
14123 switch (code)
14124 {
14125 case TREE_LIST:
14126 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14127 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14128 expr = TREE_CHAIN (expr);
14129 goto recursive_label;
14130 break;
14131 case TREE_VEC:
14132 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14133 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14134 break;
14135 default:
14136 break;
14137 }
14138 break;
14139 case tcc_expression:
14140 case tcc_reference:
14141 case tcc_comparison:
14142 case tcc_unary:
14143 case tcc_binary:
14144 case tcc_statement:
14145 case tcc_vl_exp:
14146 len = TREE_OPERAND_LENGTH (expr);
14147 for (i = 0; i < len; ++i)
14148 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14149 break;
14150 case tcc_declaration:
14151 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14152 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14153 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14154 {
14155 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14156 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14157 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14158 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14159 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14160 }
14161
14162 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14163 {
14164 if (TREE_CODE (expr) == FUNCTION_DECL)
14165 {
14166 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14167 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14168 }
14169 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14170 }
14171 break;
14172 case tcc_type:
14173 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14174 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14175 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14176 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14177 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14178 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14179 if (INTEGRAL_TYPE_P (expr)
14180 || SCALAR_FLOAT_TYPE_P (expr))
14181 {
14182 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14183 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14184 }
14185 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14186 if (TREE_CODE (expr) == RECORD_TYPE
14187 || TREE_CODE (expr) == UNION_TYPE
14188 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14189 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14190 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14191 break;
14192 default:
14193 break;
14194 }
14195 }
14196
14197 /* Helper function for outputting the checksum of a tree T. When
14198 debugging with gdb, you can "define mynext" to be "next" followed
14199 by "call debug_fold_checksum (op0)", then just trace down till the
14200 outputs differ. */
14201
14202 DEBUG_FUNCTION void
14203 debug_fold_checksum (const_tree t)
14204 {
14205 int i;
14206 unsigned char checksum[16];
14207 struct md5_ctx ctx;
14208 hash_table<pointer_hash<const tree_node> > ht (32);
14209
14210 md5_init_ctx (&ctx);
14211 fold_checksum_tree (t, &ctx, &ht);
14212 md5_finish_ctx (&ctx, checksum);
14213 ht.empty ();
14214
14215 for (i = 0; i < 16; i++)
14216 fprintf (stderr, "%d ", checksum[i]);
14217
14218 fprintf (stderr, "\n");
14219 }
14220
14221 #endif
14222
14223 /* Fold a unary tree expression with code CODE of type TYPE with an
14224 operand OP0. LOC is the location of the resulting expression.
14225 Return a folded expression if successful. Otherwise, return a tree
14226 expression with code CODE of type TYPE with an operand OP0. */
14227
14228 tree
14229 fold_build1_stat_loc (location_t loc,
14230 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14231 {
14232 tree tem;
14233 #ifdef ENABLE_FOLD_CHECKING
14234 unsigned char checksum_before[16], checksum_after[16];
14235 struct md5_ctx ctx;
14236 hash_table<pointer_hash<const tree_node> > ht (32);
14237
14238 md5_init_ctx (&ctx);
14239 fold_checksum_tree (op0, &ctx, &ht);
14240 md5_finish_ctx (&ctx, checksum_before);
14241 ht.empty ();
14242 #endif
14243
14244 tem = fold_unary_loc (loc, code, type, op0);
14245 if (!tem)
14246 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14247
14248 #ifdef ENABLE_FOLD_CHECKING
14249 md5_init_ctx (&ctx);
14250 fold_checksum_tree (op0, &ctx, &ht);
14251 md5_finish_ctx (&ctx, checksum_after);
14252
14253 if (memcmp (checksum_before, checksum_after, 16))
14254 fold_check_failed (op0, tem);
14255 #endif
14256 return tem;
14257 }
14258
14259 /* Fold a binary tree expression with code CODE of type TYPE with
14260 operands OP0 and OP1. LOC is the location of the resulting
14261 expression. Return a folded expression if successful. Otherwise,
14262 return a tree expression with code CODE of type TYPE with operands
14263 OP0 and OP1. */
14264
14265 tree
14266 fold_build2_stat_loc (location_t loc,
14267 enum tree_code code, tree type, tree op0, tree op1
14268 MEM_STAT_DECL)
14269 {
14270 tree tem;
14271 #ifdef ENABLE_FOLD_CHECKING
14272 unsigned char checksum_before_op0[16],
14273 checksum_before_op1[16],
14274 checksum_after_op0[16],
14275 checksum_after_op1[16];
14276 struct md5_ctx ctx;
14277 hash_table<pointer_hash<const tree_node> > ht (32);
14278
14279 md5_init_ctx (&ctx);
14280 fold_checksum_tree (op0, &ctx, &ht);
14281 md5_finish_ctx (&ctx, checksum_before_op0);
14282 ht.empty ();
14283
14284 md5_init_ctx (&ctx);
14285 fold_checksum_tree (op1, &ctx, &ht);
14286 md5_finish_ctx (&ctx, checksum_before_op1);
14287 ht.empty ();
14288 #endif
14289
14290 tem = fold_binary_loc (loc, code, type, op0, op1);
14291 if (!tem)
14292 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14293
14294 #ifdef ENABLE_FOLD_CHECKING
14295 md5_init_ctx (&ctx);
14296 fold_checksum_tree (op0, &ctx, &ht);
14297 md5_finish_ctx (&ctx, checksum_after_op0);
14298 ht.empty ();
14299
14300 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14301 fold_check_failed (op0, tem);
14302
14303 md5_init_ctx (&ctx);
14304 fold_checksum_tree (op1, &ctx, &ht);
14305 md5_finish_ctx (&ctx, checksum_after_op1);
14306
14307 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14308 fold_check_failed (op1, tem);
14309 #endif
14310 return tem;
14311 }
14312
14313 /* Fold a ternary tree expression with code CODE of type TYPE with
14314 operands OP0, OP1, and OP2. Return a folded expression if
14315 successful. Otherwise, return a tree expression with code CODE of
14316 type TYPE with operands OP0, OP1, and OP2. */
14317
14318 tree
14319 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14320 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14321 {
14322 tree tem;
14323 #ifdef ENABLE_FOLD_CHECKING
14324 unsigned char checksum_before_op0[16],
14325 checksum_before_op1[16],
14326 checksum_before_op2[16],
14327 checksum_after_op0[16],
14328 checksum_after_op1[16],
14329 checksum_after_op2[16];
14330 struct md5_ctx ctx;
14331 hash_table<pointer_hash<const tree_node> > ht (32);
14332
14333 md5_init_ctx (&ctx);
14334 fold_checksum_tree (op0, &ctx, &ht);
14335 md5_finish_ctx (&ctx, checksum_before_op0);
14336 ht.empty ();
14337
14338 md5_init_ctx (&ctx);
14339 fold_checksum_tree (op1, &ctx, &ht);
14340 md5_finish_ctx (&ctx, checksum_before_op1);
14341 ht.empty ();
14342
14343 md5_init_ctx (&ctx);
14344 fold_checksum_tree (op2, &ctx, &ht);
14345 md5_finish_ctx (&ctx, checksum_before_op2);
14346 ht.empty ();
14347 #endif
14348
14349 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14350 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14351 if (!tem)
14352 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14353
14354 #ifdef ENABLE_FOLD_CHECKING
14355 md5_init_ctx (&ctx);
14356 fold_checksum_tree (op0, &ctx, &ht);
14357 md5_finish_ctx (&ctx, checksum_after_op0);
14358 ht.empty ();
14359
14360 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14361 fold_check_failed (op0, tem);
14362
14363 md5_init_ctx (&ctx);
14364 fold_checksum_tree (op1, &ctx, &ht);
14365 md5_finish_ctx (&ctx, checksum_after_op1);
14366 ht.empty ();
14367
14368 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14369 fold_check_failed (op1, tem);
14370
14371 md5_init_ctx (&ctx);
14372 fold_checksum_tree (op2, &ctx, &ht);
14373 md5_finish_ctx (&ctx, checksum_after_op2);
14374
14375 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14376 fold_check_failed (op2, tem);
14377 #endif
14378 return tem;
14379 }
14380
14381 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14382 arguments in ARGARRAY, and a null static chain.
14383 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14384 of type TYPE from the given operands as constructed by build_call_array. */
14385
14386 tree
14387 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14388 int nargs, tree *argarray)
14389 {
14390 tree tem;
14391 #ifdef ENABLE_FOLD_CHECKING
14392 unsigned char checksum_before_fn[16],
14393 checksum_before_arglist[16],
14394 checksum_after_fn[16],
14395 checksum_after_arglist[16];
14396 struct md5_ctx ctx;
14397 hash_table<pointer_hash<const tree_node> > ht (32);
14398 int i;
14399
14400 md5_init_ctx (&ctx);
14401 fold_checksum_tree (fn, &ctx, &ht);
14402 md5_finish_ctx (&ctx, checksum_before_fn);
14403 ht.empty ();
14404
14405 md5_init_ctx (&ctx);
14406 for (i = 0; i < nargs; i++)
14407 fold_checksum_tree (argarray[i], &ctx, &ht);
14408 md5_finish_ctx (&ctx, checksum_before_arglist);
14409 ht.empty ();
14410 #endif
14411
14412 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14413 if (!tem)
14414 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14415
14416 #ifdef ENABLE_FOLD_CHECKING
14417 md5_init_ctx (&ctx);
14418 fold_checksum_tree (fn, &ctx, &ht);
14419 md5_finish_ctx (&ctx, checksum_after_fn);
14420 ht.empty ();
14421
14422 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14423 fold_check_failed (fn, tem);
14424
14425 md5_init_ctx (&ctx);
14426 for (i = 0; i < nargs; i++)
14427 fold_checksum_tree (argarray[i], &ctx, &ht);
14428 md5_finish_ctx (&ctx, checksum_after_arglist);
14429
14430 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14431 fold_check_failed (NULL_TREE, tem);
14432 #endif
14433 return tem;
14434 }
14435
14436 /* Perform constant folding and related simplification of initializer
14437 expression EXPR. These behave identically to "fold_buildN" but ignore
14438 potential run-time traps and exceptions that fold must preserve. */
14439
14440 #define START_FOLD_INIT \
14441 int saved_signaling_nans = flag_signaling_nans;\
14442 int saved_trapping_math = flag_trapping_math;\
14443 int saved_rounding_math = flag_rounding_math;\
14444 int saved_trapv = flag_trapv;\
14445 int saved_folding_initializer = folding_initializer;\
14446 flag_signaling_nans = 0;\
14447 flag_trapping_math = 0;\
14448 flag_rounding_math = 0;\
14449 flag_trapv = 0;\
14450 folding_initializer = 1;
14451
14452 #define END_FOLD_INIT \
14453 flag_signaling_nans = saved_signaling_nans;\
14454 flag_trapping_math = saved_trapping_math;\
14455 flag_rounding_math = saved_rounding_math;\
14456 flag_trapv = saved_trapv;\
14457 folding_initializer = saved_folding_initializer;
14458
14459 tree
14460 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14461 tree type, tree op)
14462 {
14463 tree result;
14464 START_FOLD_INIT;
14465
14466 result = fold_build1_loc (loc, code, type, op);
14467
14468 END_FOLD_INIT;
14469 return result;
14470 }
14471
14472 tree
14473 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14474 tree type, tree op0, tree op1)
14475 {
14476 tree result;
14477 START_FOLD_INIT;
14478
14479 result = fold_build2_loc (loc, code, type, op0, op1);
14480
14481 END_FOLD_INIT;
14482 return result;
14483 }
14484
14485 tree
14486 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14487 int nargs, tree *argarray)
14488 {
14489 tree result;
14490 START_FOLD_INIT;
14491
14492 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14493
14494 END_FOLD_INIT;
14495 return result;
14496 }
14497
14498 #undef START_FOLD_INIT
14499 #undef END_FOLD_INIT
14500
14501 /* Determine if first argument is a multiple of second argument. Return 0 if
14502 it is not, or we cannot easily determined it to be.
14503
14504 An example of the sort of thing we care about (at this point; this routine
14505 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14506 fold cases do now) is discovering that
14507
14508 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14509
14510 is a multiple of
14511
14512 SAVE_EXPR (J * 8)
14513
14514 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14515
14516 This code also handles discovering that
14517
14518 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14519
14520 is a multiple of 8 so we don't have to worry about dealing with a
14521 possible remainder.
14522
14523 Note that we *look* inside a SAVE_EXPR only to determine how it was
14524 calculated; it is not safe for fold to do much of anything else with the
14525 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14526 at run time. For example, the latter example above *cannot* be implemented
14527 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14528 evaluation time of the original SAVE_EXPR is not necessarily the same at
14529 the time the new expression is evaluated. The only optimization of this
14530 sort that would be valid is changing
14531
14532 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14533
14534 divided by 8 to
14535
14536 SAVE_EXPR (I) * SAVE_EXPR (J)
14537
14538 (where the same SAVE_EXPR (J) is used in the original and the
14539 transformed version). */
14540
14541 int
14542 multiple_of_p (tree type, const_tree top, const_tree bottom)
14543 {
14544 if (operand_equal_p (top, bottom, 0))
14545 return 1;
14546
14547 if (TREE_CODE (type) != INTEGER_TYPE)
14548 return 0;
14549
14550 switch (TREE_CODE (top))
14551 {
14552 case BIT_AND_EXPR:
14553 /* Bitwise and provides a power of two multiple. If the mask is
14554 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14555 if (!integer_pow2p (bottom))
14556 return 0;
14557 /* FALLTHRU */
14558
14559 case MULT_EXPR:
14560 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14561 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14562
14563 case PLUS_EXPR:
14564 case MINUS_EXPR:
14565 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14566 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14567
14568 case LSHIFT_EXPR:
14569 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14570 {
14571 tree op1, t1;
14572
14573 op1 = TREE_OPERAND (top, 1);
14574 /* const_binop may not detect overflow correctly,
14575 so check for it explicitly here. */
14576 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14577 && 0 != (t1 = fold_convert (type,
14578 const_binop (LSHIFT_EXPR,
14579 size_one_node,
14580 op1)))
14581 && !TREE_OVERFLOW (t1))
14582 return multiple_of_p (type, t1, bottom);
14583 }
14584 return 0;
14585
14586 case NOP_EXPR:
14587 /* Can't handle conversions from non-integral or wider integral type. */
14588 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14589 || (TYPE_PRECISION (type)
14590 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14591 return 0;
14592
14593 /* .. fall through ... */
14594
14595 case SAVE_EXPR:
14596 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14597
14598 case COND_EXPR:
14599 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14600 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14601
14602 case INTEGER_CST:
14603 if (TREE_CODE (bottom) != INTEGER_CST
14604 || integer_zerop (bottom)
14605 || (TYPE_UNSIGNED (type)
14606 && (tree_int_cst_sgn (top) < 0
14607 || tree_int_cst_sgn (bottom) < 0)))
14608 return 0;
14609 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14610 SIGNED);
14611
14612 default:
14613 return 0;
14614 }
14615 }
14616
14617 /* Return true if CODE or TYPE is known to be non-negative. */
14618
14619 static bool
14620 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14621 {
14622 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14623 && truth_value_p (code))
14624 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14625 have a signed:1 type (where the value is -1 and 0). */
14626 return true;
14627 return false;
14628 }
14629
14630 /* Return true if (CODE OP0) is known to be non-negative. If the return
14631 value is based on the assumption that signed overflow is undefined,
14632 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14633 *STRICT_OVERFLOW_P. */
14634
14635 bool
14636 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14637 bool *strict_overflow_p)
14638 {
14639 if (TYPE_UNSIGNED (type))
14640 return true;
14641
14642 switch (code)
14643 {
14644 case ABS_EXPR:
14645 /* We can't return 1 if flag_wrapv is set because
14646 ABS_EXPR<INT_MIN> = INT_MIN. */
14647 if (!INTEGRAL_TYPE_P (type))
14648 return true;
14649 if (TYPE_OVERFLOW_UNDEFINED (type))
14650 {
14651 *strict_overflow_p = true;
14652 return true;
14653 }
14654 break;
14655
14656 case NON_LVALUE_EXPR:
14657 case FLOAT_EXPR:
14658 case FIX_TRUNC_EXPR:
14659 return tree_expr_nonnegative_warnv_p (op0,
14660 strict_overflow_p);
14661
14662 CASE_CONVERT:
14663 {
14664 tree inner_type = TREE_TYPE (op0);
14665 tree outer_type = type;
14666
14667 if (TREE_CODE (outer_type) == REAL_TYPE)
14668 {
14669 if (TREE_CODE (inner_type) == REAL_TYPE)
14670 return tree_expr_nonnegative_warnv_p (op0,
14671 strict_overflow_p);
14672 if (INTEGRAL_TYPE_P (inner_type))
14673 {
14674 if (TYPE_UNSIGNED (inner_type))
14675 return true;
14676 return tree_expr_nonnegative_warnv_p (op0,
14677 strict_overflow_p);
14678 }
14679 }
14680 else if (INTEGRAL_TYPE_P (outer_type))
14681 {
14682 if (TREE_CODE (inner_type) == REAL_TYPE)
14683 return tree_expr_nonnegative_warnv_p (op0,
14684 strict_overflow_p);
14685 if (INTEGRAL_TYPE_P (inner_type))
14686 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14687 && TYPE_UNSIGNED (inner_type);
14688 }
14689 }
14690 break;
14691
14692 default:
14693 return tree_simple_nonnegative_warnv_p (code, type);
14694 }
14695
14696 /* We don't know sign of `t', so be conservative and return false. */
14697 return false;
14698 }
14699
14700 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14701 value is based on the assumption that signed overflow is undefined,
14702 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14703 *STRICT_OVERFLOW_P. */
14704
14705 bool
14706 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14707 tree op1, bool *strict_overflow_p)
14708 {
14709 if (TYPE_UNSIGNED (type))
14710 return true;
14711
14712 switch (code)
14713 {
14714 case POINTER_PLUS_EXPR:
14715 case PLUS_EXPR:
14716 if (FLOAT_TYPE_P (type))
14717 return (tree_expr_nonnegative_warnv_p (op0,
14718 strict_overflow_p)
14719 && tree_expr_nonnegative_warnv_p (op1,
14720 strict_overflow_p));
14721
14722 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14723 both unsigned and at least 2 bits shorter than the result. */
14724 if (TREE_CODE (type) == INTEGER_TYPE
14725 && TREE_CODE (op0) == NOP_EXPR
14726 && TREE_CODE (op1) == NOP_EXPR)
14727 {
14728 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14729 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14730 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14731 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14732 {
14733 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14734 TYPE_PRECISION (inner2)) + 1;
14735 return prec < TYPE_PRECISION (type);
14736 }
14737 }
14738 break;
14739
14740 case MULT_EXPR:
14741 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14742 {
14743 /* x * x is always non-negative for floating point x
14744 or without overflow. */
14745 if (operand_equal_p (op0, op1, 0)
14746 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14747 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14748 {
14749 if (TYPE_OVERFLOW_UNDEFINED (type))
14750 *strict_overflow_p = true;
14751 return true;
14752 }
14753 }
14754
14755 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14756 both unsigned and their total bits is shorter than the result. */
14757 if (TREE_CODE (type) == INTEGER_TYPE
14758 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14759 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14760 {
14761 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14762 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14763 : TREE_TYPE (op0);
14764 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14765 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14766 : TREE_TYPE (op1);
14767
14768 bool unsigned0 = TYPE_UNSIGNED (inner0);
14769 bool unsigned1 = TYPE_UNSIGNED (inner1);
14770
14771 if (TREE_CODE (op0) == INTEGER_CST)
14772 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14773
14774 if (TREE_CODE (op1) == INTEGER_CST)
14775 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14776
14777 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14778 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14779 {
14780 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14781 ? tree_int_cst_min_precision (op0, UNSIGNED)
14782 : TYPE_PRECISION (inner0);
14783
14784 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14785 ? tree_int_cst_min_precision (op1, UNSIGNED)
14786 : TYPE_PRECISION (inner1);
14787
14788 return precision0 + precision1 < TYPE_PRECISION (type);
14789 }
14790 }
14791 return false;
14792
14793 case BIT_AND_EXPR:
14794 case MAX_EXPR:
14795 return (tree_expr_nonnegative_warnv_p (op0,
14796 strict_overflow_p)
14797 || tree_expr_nonnegative_warnv_p (op1,
14798 strict_overflow_p));
14799
14800 case BIT_IOR_EXPR:
14801 case BIT_XOR_EXPR:
14802 case MIN_EXPR:
14803 case RDIV_EXPR:
14804 case TRUNC_DIV_EXPR:
14805 case CEIL_DIV_EXPR:
14806 case FLOOR_DIV_EXPR:
14807 case ROUND_DIV_EXPR:
14808 return (tree_expr_nonnegative_warnv_p (op0,
14809 strict_overflow_p)
14810 && tree_expr_nonnegative_warnv_p (op1,
14811 strict_overflow_p));
14812
14813 case TRUNC_MOD_EXPR:
14814 case CEIL_MOD_EXPR:
14815 case FLOOR_MOD_EXPR:
14816 case ROUND_MOD_EXPR:
14817 return tree_expr_nonnegative_warnv_p (op0,
14818 strict_overflow_p);
14819 default:
14820 return tree_simple_nonnegative_warnv_p (code, type);
14821 }
14822
14823 /* We don't know sign of `t', so be conservative and return false. */
14824 return false;
14825 }
14826
14827 /* Return true if T is known to be non-negative. If the return
14828 value is based on the assumption that signed overflow is undefined,
14829 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14830 *STRICT_OVERFLOW_P. */
14831
14832 bool
14833 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14834 {
14835 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14836 return true;
14837
14838 switch (TREE_CODE (t))
14839 {
14840 case INTEGER_CST:
14841 return tree_int_cst_sgn (t) >= 0;
14842
14843 case REAL_CST:
14844 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14845
14846 case FIXED_CST:
14847 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14848
14849 case COND_EXPR:
14850 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14851 strict_overflow_p)
14852 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14853 strict_overflow_p));
14854 default:
14855 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14856 TREE_TYPE (t));
14857 }
14858 /* We don't know sign of `t', so be conservative and return false. */
14859 return false;
14860 }
14861
14862 /* Return true if T is known to be non-negative. If the return
14863 value is based on the assumption that signed overflow is undefined,
14864 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14865 *STRICT_OVERFLOW_P. */
14866
14867 bool
14868 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14869 tree arg0, tree arg1, bool *strict_overflow_p)
14870 {
14871 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14872 switch (DECL_FUNCTION_CODE (fndecl))
14873 {
14874 CASE_FLT_FN (BUILT_IN_ACOS):
14875 CASE_FLT_FN (BUILT_IN_ACOSH):
14876 CASE_FLT_FN (BUILT_IN_CABS):
14877 CASE_FLT_FN (BUILT_IN_COSH):
14878 CASE_FLT_FN (BUILT_IN_ERFC):
14879 CASE_FLT_FN (BUILT_IN_EXP):
14880 CASE_FLT_FN (BUILT_IN_EXP10):
14881 CASE_FLT_FN (BUILT_IN_EXP2):
14882 CASE_FLT_FN (BUILT_IN_FABS):
14883 CASE_FLT_FN (BUILT_IN_FDIM):
14884 CASE_FLT_FN (BUILT_IN_HYPOT):
14885 CASE_FLT_FN (BUILT_IN_POW10):
14886 CASE_INT_FN (BUILT_IN_FFS):
14887 CASE_INT_FN (BUILT_IN_PARITY):
14888 CASE_INT_FN (BUILT_IN_POPCOUNT):
14889 CASE_INT_FN (BUILT_IN_CLZ):
14890 CASE_INT_FN (BUILT_IN_CLRSB):
14891 case BUILT_IN_BSWAP32:
14892 case BUILT_IN_BSWAP64:
14893 /* Always true. */
14894 return true;
14895
14896 CASE_FLT_FN (BUILT_IN_SQRT):
14897 /* sqrt(-0.0) is -0.0. */
14898 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14899 return true;
14900 return tree_expr_nonnegative_warnv_p (arg0,
14901 strict_overflow_p);
14902
14903 CASE_FLT_FN (BUILT_IN_ASINH):
14904 CASE_FLT_FN (BUILT_IN_ATAN):
14905 CASE_FLT_FN (BUILT_IN_ATANH):
14906 CASE_FLT_FN (BUILT_IN_CBRT):
14907 CASE_FLT_FN (BUILT_IN_CEIL):
14908 CASE_FLT_FN (BUILT_IN_ERF):
14909 CASE_FLT_FN (BUILT_IN_EXPM1):
14910 CASE_FLT_FN (BUILT_IN_FLOOR):
14911 CASE_FLT_FN (BUILT_IN_FMOD):
14912 CASE_FLT_FN (BUILT_IN_FREXP):
14913 CASE_FLT_FN (BUILT_IN_ICEIL):
14914 CASE_FLT_FN (BUILT_IN_IFLOOR):
14915 CASE_FLT_FN (BUILT_IN_IRINT):
14916 CASE_FLT_FN (BUILT_IN_IROUND):
14917 CASE_FLT_FN (BUILT_IN_LCEIL):
14918 CASE_FLT_FN (BUILT_IN_LDEXP):
14919 CASE_FLT_FN (BUILT_IN_LFLOOR):
14920 CASE_FLT_FN (BUILT_IN_LLCEIL):
14921 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14922 CASE_FLT_FN (BUILT_IN_LLRINT):
14923 CASE_FLT_FN (BUILT_IN_LLROUND):
14924 CASE_FLT_FN (BUILT_IN_LRINT):
14925 CASE_FLT_FN (BUILT_IN_LROUND):
14926 CASE_FLT_FN (BUILT_IN_MODF):
14927 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14928 CASE_FLT_FN (BUILT_IN_RINT):
14929 CASE_FLT_FN (BUILT_IN_ROUND):
14930 CASE_FLT_FN (BUILT_IN_SCALB):
14931 CASE_FLT_FN (BUILT_IN_SCALBLN):
14932 CASE_FLT_FN (BUILT_IN_SCALBN):
14933 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14934 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14935 CASE_FLT_FN (BUILT_IN_SINH):
14936 CASE_FLT_FN (BUILT_IN_TANH):
14937 CASE_FLT_FN (BUILT_IN_TRUNC):
14938 /* True if the 1st argument is nonnegative. */
14939 return tree_expr_nonnegative_warnv_p (arg0,
14940 strict_overflow_p);
14941
14942 CASE_FLT_FN (BUILT_IN_FMAX):
14943 /* True if the 1st OR 2nd arguments are nonnegative. */
14944 return (tree_expr_nonnegative_warnv_p (arg0,
14945 strict_overflow_p)
14946 || (tree_expr_nonnegative_warnv_p (arg1,
14947 strict_overflow_p)));
14948
14949 CASE_FLT_FN (BUILT_IN_FMIN):
14950 /* True if the 1st AND 2nd arguments are nonnegative. */
14951 return (tree_expr_nonnegative_warnv_p (arg0,
14952 strict_overflow_p)
14953 && (tree_expr_nonnegative_warnv_p (arg1,
14954 strict_overflow_p)));
14955
14956 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14957 /* True if the 2nd argument is nonnegative. */
14958 return tree_expr_nonnegative_warnv_p (arg1,
14959 strict_overflow_p);
14960
14961 CASE_FLT_FN (BUILT_IN_POWI):
14962 /* True if the 1st argument is nonnegative or the second
14963 argument is an even integer. */
14964 if (TREE_CODE (arg1) == INTEGER_CST
14965 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14966 return true;
14967 return tree_expr_nonnegative_warnv_p (arg0,
14968 strict_overflow_p);
14969
14970 CASE_FLT_FN (BUILT_IN_POW):
14971 /* True if the 1st argument is nonnegative or the second
14972 argument is an even integer valued real. */
14973 if (TREE_CODE (arg1) == REAL_CST)
14974 {
14975 REAL_VALUE_TYPE c;
14976 HOST_WIDE_INT n;
14977
14978 c = TREE_REAL_CST (arg1);
14979 n = real_to_integer (&c);
14980 if ((n & 1) == 0)
14981 {
14982 REAL_VALUE_TYPE cint;
14983 real_from_integer (&cint, VOIDmode, n, SIGNED);
14984 if (real_identical (&c, &cint))
14985 return true;
14986 }
14987 }
14988 return tree_expr_nonnegative_warnv_p (arg0,
14989 strict_overflow_p);
14990
14991 default:
14992 break;
14993 }
14994 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14995 type);
14996 }
14997
14998 /* Return true if T is known to be non-negative. If the return
14999 value is based on the assumption that signed overflow is undefined,
15000 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15001 *STRICT_OVERFLOW_P. */
15002
15003 static bool
15004 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15005 {
15006 enum tree_code code = TREE_CODE (t);
15007 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15008 return true;
15009
15010 switch (code)
15011 {
15012 case TARGET_EXPR:
15013 {
15014 tree temp = TARGET_EXPR_SLOT (t);
15015 t = TARGET_EXPR_INITIAL (t);
15016
15017 /* If the initializer is non-void, then it's a normal expression
15018 that will be assigned to the slot. */
15019 if (!VOID_TYPE_P (t))
15020 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15021
15022 /* Otherwise, the initializer sets the slot in some way. One common
15023 way is an assignment statement at the end of the initializer. */
15024 while (1)
15025 {
15026 if (TREE_CODE (t) == BIND_EXPR)
15027 t = expr_last (BIND_EXPR_BODY (t));
15028 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15029 || TREE_CODE (t) == TRY_CATCH_EXPR)
15030 t = expr_last (TREE_OPERAND (t, 0));
15031 else if (TREE_CODE (t) == STATEMENT_LIST)
15032 t = expr_last (t);
15033 else
15034 break;
15035 }
15036 if (TREE_CODE (t) == MODIFY_EXPR
15037 && TREE_OPERAND (t, 0) == temp)
15038 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15039 strict_overflow_p);
15040
15041 return false;
15042 }
15043
15044 case CALL_EXPR:
15045 {
15046 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15047 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15048
15049 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15050 get_callee_fndecl (t),
15051 arg0,
15052 arg1,
15053 strict_overflow_p);
15054 }
15055 case COMPOUND_EXPR:
15056 case MODIFY_EXPR:
15057 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15058 strict_overflow_p);
15059 case BIND_EXPR:
15060 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15061 strict_overflow_p);
15062 case SAVE_EXPR:
15063 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15064 strict_overflow_p);
15065
15066 default:
15067 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15068 TREE_TYPE (t));
15069 }
15070
15071 /* We don't know sign of `t', so be conservative and return false. */
15072 return false;
15073 }
15074
15075 /* Return true if T is known to be non-negative. If the return
15076 value is based on the assumption that signed overflow is undefined,
15077 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15078 *STRICT_OVERFLOW_P. */
15079
15080 bool
15081 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15082 {
15083 enum tree_code code;
15084 if (t == error_mark_node)
15085 return false;
15086
15087 code = TREE_CODE (t);
15088 switch (TREE_CODE_CLASS (code))
15089 {
15090 case tcc_binary:
15091 case tcc_comparison:
15092 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15093 TREE_TYPE (t),
15094 TREE_OPERAND (t, 0),
15095 TREE_OPERAND (t, 1),
15096 strict_overflow_p);
15097
15098 case tcc_unary:
15099 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15100 TREE_TYPE (t),
15101 TREE_OPERAND (t, 0),
15102 strict_overflow_p);
15103
15104 case tcc_constant:
15105 case tcc_declaration:
15106 case tcc_reference:
15107 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15108
15109 default:
15110 break;
15111 }
15112
15113 switch (code)
15114 {
15115 case TRUTH_AND_EXPR:
15116 case TRUTH_OR_EXPR:
15117 case TRUTH_XOR_EXPR:
15118 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15119 TREE_TYPE (t),
15120 TREE_OPERAND (t, 0),
15121 TREE_OPERAND (t, 1),
15122 strict_overflow_p);
15123 case TRUTH_NOT_EXPR:
15124 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15125 TREE_TYPE (t),
15126 TREE_OPERAND (t, 0),
15127 strict_overflow_p);
15128
15129 case COND_EXPR:
15130 case CONSTRUCTOR:
15131 case OBJ_TYPE_REF:
15132 case ASSERT_EXPR:
15133 case ADDR_EXPR:
15134 case WITH_SIZE_EXPR:
15135 case SSA_NAME:
15136 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15137
15138 default:
15139 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15140 }
15141 }
15142
15143 /* Return true if `t' is known to be non-negative. Handle warnings
15144 about undefined signed overflow. */
15145
15146 bool
15147 tree_expr_nonnegative_p (tree t)
15148 {
15149 bool ret, strict_overflow_p;
15150
15151 strict_overflow_p = false;
15152 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15153 if (strict_overflow_p)
15154 fold_overflow_warning (("assuming signed overflow does not occur when "
15155 "determining that expression is always "
15156 "non-negative"),
15157 WARN_STRICT_OVERFLOW_MISC);
15158 return ret;
15159 }
15160
15161
15162 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15163 For floating point we further ensure that T is not denormal.
15164 Similar logic is present in nonzero_address in rtlanal.h.
15165
15166 If the return value is based on the assumption that signed overflow
15167 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15168 change *STRICT_OVERFLOW_P. */
15169
15170 bool
15171 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15172 bool *strict_overflow_p)
15173 {
15174 switch (code)
15175 {
15176 case ABS_EXPR:
15177 return tree_expr_nonzero_warnv_p (op0,
15178 strict_overflow_p);
15179
15180 case NOP_EXPR:
15181 {
15182 tree inner_type = TREE_TYPE (op0);
15183 tree outer_type = type;
15184
15185 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15186 && tree_expr_nonzero_warnv_p (op0,
15187 strict_overflow_p));
15188 }
15189 break;
15190
15191 case NON_LVALUE_EXPR:
15192 return tree_expr_nonzero_warnv_p (op0,
15193 strict_overflow_p);
15194
15195 default:
15196 break;
15197 }
15198
15199 return false;
15200 }
15201
15202 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15203 For floating point we further ensure that T is not denormal.
15204 Similar logic is present in nonzero_address in rtlanal.h.
15205
15206 If the return value is based on the assumption that signed overflow
15207 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15208 change *STRICT_OVERFLOW_P. */
15209
15210 bool
15211 tree_binary_nonzero_warnv_p (enum tree_code code,
15212 tree type,
15213 tree op0,
15214 tree op1, bool *strict_overflow_p)
15215 {
15216 bool sub_strict_overflow_p;
15217 switch (code)
15218 {
15219 case POINTER_PLUS_EXPR:
15220 case PLUS_EXPR:
15221 if (TYPE_OVERFLOW_UNDEFINED (type))
15222 {
15223 /* With the presence of negative values it is hard
15224 to say something. */
15225 sub_strict_overflow_p = false;
15226 if (!tree_expr_nonnegative_warnv_p (op0,
15227 &sub_strict_overflow_p)
15228 || !tree_expr_nonnegative_warnv_p (op1,
15229 &sub_strict_overflow_p))
15230 return false;
15231 /* One of operands must be positive and the other non-negative. */
15232 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15233 overflows, on a twos-complement machine the sum of two
15234 nonnegative numbers can never be zero. */
15235 return (tree_expr_nonzero_warnv_p (op0,
15236 strict_overflow_p)
15237 || tree_expr_nonzero_warnv_p (op1,
15238 strict_overflow_p));
15239 }
15240 break;
15241
15242 case MULT_EXPR:
15243 if (TYPE_OVERFLOW_UNDEFINED (type))
15244 {
15245 if (tree_expr_nonzero_warnv_p (op0,
15246 strict_overflow_p)
15247 && tree_expr_nonzero_warnv_p (op1,
15248 strict_overflow_p))
15249 {
15250 *strict_overflow_p = true;
15251 return true;
15252 }
15253 }
15254 break;
15255
15256 case MIN_EXPR:
15257 sub_strict_overflow_p = false;
15258 if (tree_expr_nonzero_warnv_p (op0,
15259 &sub_strict_overflow_p)
15260 && tree_expr_nonzero_warnv_p (op1,
15261 &sub_strict_overflow_p))
15262 {
15263 if (sub_strict_overflow_p)
15264 *strict_overflow_p = true;
15265 }
15266 break;
15267
15268 case MAX_EXPR:
15269 sub_strict_overflow_p = false;
15270 if (tree_expr_nonzero_warnv_p (op0,
15271 &sub_strict_overflow_p))
15272 {
15273 if (sub_strict_overflow_p)
15274 *strict_overflow_p = true;
15275
15276 /* When both operands are nonzero, then MAX must be too. */
15277 if (tree_expr_nonzero_warnv_p (op1,
15278 strict_overflow_p))
15279 return true;
15280
15281 /* MAX where operand 0 is positive is positive. */
15282 return tree_expr_nonnegative_warnv_p (op0,
15283 strict_overflow_p);
15284 }
15285 /* MAX where operand 1 is positive is positive. */
15286 else if (tree_expr_nonzero_warnv_p (op1,
15287 &sub_strict_overflow_p)
15288 && tree_expr_nonnegative_warnv_p (op1,
15289 &sub_strict_overflow_p))
15290 {
15291 if (sub_strict_overflow_p)
15292 *strict_overflow_p = true;
15293 return true;
15294 }
15295 break;
15296
15297 case BIT_IOR_EXPR:
15298 return (tree_expr_nonzero_warnv_p (op1,
15299 strict_overflow_p)
15300 || tree_expr_nonzero_warnv_p (op0,
15301 strict_overflow_p));
15302
15303 default:
15304 break;
15305 }
15306
15307 return false;
15308 }
15309
15310 /* Return true when T is an address and is known to be nonzero.
15311 For floating point we further ensure that T is not denormal.
15312 Similar logic is present in nonzero_address in rtlanal.h.
15313
15314 If the return value is based on the assumption that signed overflow
15315 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15316 change *STRICT_OVERFLOW_P. */
15317
15318 bool
15319 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15320 {
15321 bool sub_strict_overflow_p;
15322 switch (TREE_CODE (t))
15323 {
15324 case INTEGER_CST:
15325 return !integer_zerop (t);
15326
15327 case ADDR_EXPR:
15328 {
15329 tree base = TREE_OPERAND (t, 0);
15330
15331 if (!DECL_P (base))
15332 base = get_base_address (base);
15333
15334 if (!base)
15335 return false;
15336
15337 /* For objects in symbol table check if we know they are non-zero.
15338 Don't do anything for variables and functions before symtab is built;
15339 it is quite possible that they will be declared weak later. */
15340 if (DECL_P (base) && decl_in_symtab_p (base))
15341 {
15342 struct symtab_node *symbol;
15343
15344 symbol = symtab_node::get_create (base);
15345 if (symbol)
15346 return symbol->nonzero_address ();
15347 else
15348 return false;
15349 }
15350
15351 /* Function local objects are never NULL. */
15352 if (DECL_P (base)
15353 && (DECL_CONTEXT (base)
15354 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15355 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15356 return true;
15357
15358 /* Constants are never weak. */
15359 if (CONSTANT_CLASS_P (base))
15360 return true;
15361
15362 return false;
15363 }
15364
15365 case COND_EXPR:
15366 sub_strict_overflow_p = false;
15367 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15368 &sub_strict_overflow_p)
15369 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15370 &sub_strict_overflow_p))
15371 {
15372 if (sub_strict_overflow_p)
15373 *strict_overflow_p = true;
15374 return true;
15375 }
15376 break;
15377
15378 default:
15379 break;
15380 }
15381 return false;
15382 }
15383
15384 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15385 attempt to fold the expression to a constant without modifying TYPE,
15386 OP0 or OP1.
15387
15388 If the expression could be simplified to a constant, then return
15389 the constant. If the expression would not be simplified to a
15390 constant, then return NULL_TREE. */
15391
15392 tree
15393 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15394 {
15395 tree tem = fold_binary (code, type, op0, op1);
15396 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15397 }
15398
15399 /* Given the components of a unary expression CODE, TYPE and OP0,
15400 attempt to fold the expression to a constant without modifying
15401 TYPE or OP0.
15402
15403 If the expression could be simplified to a constant, then return
15404 the constant. If the expression would not be simplified to a
15405 constant, then return NULL_TREE. */
15406
15407 tree
15408 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15409 {
15410 tree tem = fold_unary (code, type, op0);
15411 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15412 }
15413
15414 /* If EXP represents referencing an element in a constant string
15415 (either via pointer arithmetic or array indexing), return the
15416 tree representing the value accessed, otherwise return NULL. */
15417
15418 tree
15419 fold_read_from_constant_string (tree exp)
15420 {
15421 if ((TREE_CODE (exp) == INDIRECT_REF
15422 || TREE_CODE (exp) == ARRAY_REF)
15423 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15424 {
15425 tree exp1 = TREE_OPERAND (exp, 0);
15426 tree index;
15427 tree string;
15428 location_t loc = EXPR_LOCATION (exp);
15429
15430 if (TREE_CODE (exp) == INDIRECT_REF)
15431 string = string_constant (exp1, &index);
15432 else
15433 {
15434 tree low_bound = array_ref_low_bound (exp);
15435 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15436
15437 /* Optimize the special-case of a zero lower bound.
15438
15439 We convert the low_bound to sizetype to avoid some problems
15440 with constant folding. (E.g. suppose the lower bound is 1,
15441 and its mode is QI. Without the conversion,l (ARRAY
15442 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15443 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15444 if (! integer_zerop (low_bound))
15445 index = size_diffop_loc (loc, index,
15446 fold_convert_loc (loc, sizetype, low_bound));
15447
15448 string = exp1;
15449 }
15450
15451 if (string
15452 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15453 && TREE_CODE (string) == STRING_CST
15454 && TREE_CODE (index) == INTEGER_CST
15455 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15456 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15457 == MODE_INT)
15458 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15459 return build_int_cst_type (TREE_TYPE (exp),
15460 (TREE_STRING_POINTER (string)
15461 [TREE_INT_CST_LOW (index)]));
15462 }
15463 return NULL;
15464 }
15465
15466 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15467 an integer constant, real, or fixed-point constant.
15468
15469 TYPE is the type of the result. */
15470
15471 static tree
15472 fold_negate_const (tree arg0, tree type)
15473 {
15474 tree t = NULL_TREE;
15475
15476 switch (TREE_CODE (arg0))
15477 {
15478 case INTEGER_CST:
15479 {
15480 bool overflow;
15481 wide_int val = wi::neg (arg0, &overflow);
15482 t = force_fit_type (type, val, 1,
15483 (overflow | TREE_OVERFLOW (arg0))
15484 && !TYPE_UNSIGNED (type));
15485 break;
15486 }
15487
15488 case REAL_CST:
15489 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15490 break;
15491
15492 case FIXED_CST:
15493 {
15494 FIXED_VALUE_TYPE f;
15495 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15496 &(TREE_FIXED_CST (arg0)), NULL,
15497 TYPE_SATURATING (type));
15498 t = build_fixed (type, f);
15499 /* Propagate overflow flags. */
15500 if (overflow_p | TREE_OVERFLOW (arg0))
15501 TREE_OVERFLOW (t) = 1;
15502 break;
15503 }
15504
15505 default:
15506 gcc_unreachable ();
15507 }
15508
15509 return t;
15510 }
15511
15512 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15513 an integer constant or real constant.
15514
15515 TYPE is the type of the result. */
15516
15517 tree
15518 fold_abs_const (tree arg0, tree type)
15519 {
15520 tree t = NULL_TREE;
15521
15522 switch (TREE_CODE (arg0))
15523 {
15524 case INTEGER_CST:
15525 {
15526 /* If the value is unsigned or non-negative, then the absolute value
15527 is the same as the ordinary value. */
15528 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15529 t = arg0;
15530
15531 /* If the value is negative, then the absolute value is
15532 its negation. */
15533 else
15534 {
15535 bool overflow;
15536 wide_int val = wi::neg (arg0, &overflow);
15537 t = force_fit_type (type, val, -1,
15538 overflow | TREE_OVERFLOW (arg0));
15539 }
15540 }
15541 break;
15542
15543 case REAL_CST:
15544 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15545 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15546 else
15547 t = arg0;
15548 break;
15549
15550 default:
15551 gcc_unreachable ();
15552 }
15553
15554 return t;
15555 }
15556
15557 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15558 constant. TYPE is the type of the result. */
15559
15560 static tree
15561 fold_not_const (const_tree arg0, tree type)
15562 {
15563 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15564
15565 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15566 }
15567
15568 /* Given CODE, a relational operator, the target type, TYPE and two
15569 constant operands OP0 and OP1, return the result of the
15570 relational operation. If the result is not a compile time
15571 constant, then return NULL_TREE. */
15572
15573 static tree
15574 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15575 {
15576 int result, invert;
15577
15578 /* From here on, the only cases we handle are when the result is
15579 known to be a constant. */
15580
15581 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15582 {
15583 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15584 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15585
15586 /* Handle the cases where either operand is a NaN. */
15587 if (real_isnan (c0) || real_isnan (c1))
15588 {
15589 switch (code)
15590 {
15591 case EQ_EXPR:
15592 case ORDERED_EXPR:
15593 result = 0;
15594 break;
15595
15596 case NE_EXPR:
15597 case UNORDERED_EXPR:
15598 case UNLT_EXPR:
15599 case UNLE_EXPR:
15600 case UNGT_EXPR:
15601 case UNGE_EXPR:
15602 case UNEQ_EXPR:
15603 result = 1;
15604 break;
15605
15606 case LT_EXPR:
15607 case LE_EXPR:
15608 case GT_EXPR:
15609 case GE_EXPR:
15610 case LTGT_EXPR:
15611 if (flag_trapping_math)
15612 return NULL_TREE;
15613 result = 0;
15614 break;
15615
15616 default:
15617 gcc_unreachable ();
15618 }
15619
15620 return constant_boolean_node (result, type);
15621 }
15622
15623 return constant_boolean_node (real_compare (code, c0, c1), type);
15624 }
15625
15626 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15627 {
15628 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15629 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15630 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15631 }
15632
15633 /* Handle equality/inequality of complex constants. */
15634 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15635 {
15636 tree rcond = fold_relational_const (code, type,
15637 TREE_REALPART (op0),
15638 TREE_REALPART (op1));
15639 tree icond = fold_relational_const (code, type,
15640 TREE_IMAGPART (op0),
15641 TREE_IMAGPART (op1));
15642 if (code == EQ_EXPR)
15643 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15644 else if (code == NE_EXPR)
15645 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15646 else
15647 return NULL_TREE;
15648 }
15649
15650 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15651 {
15652 unsigned count = VECTOR_CST_NELTS (op0);
15653 tree *elts = XALLOCAVEC (tree, count);
15654 gcc_assert (VECTOR_CST_NELTS (op1) == count
15655 && TYPE_VECTOR_SUBPARTS (type) == count);
15656
15657 for (unsigned i = 0; i < count; i++)
15658 {
15659 tree elem_type = TREE_TYPE (type);
15660 tree elem0 = VECTOR_CST_ELT (op0, i);
15661 tree elem1 = VECTOR_CST_ELT (op1, i);
15662
15663 tree tem = fold_relational_const (code, elem_type,
15664 elem0, elem1);
15665
15666 if (tem == NULL_TREE)
15667 return NULL_TREE;
15668
15669 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15670 }
15671
15672 return build_vector (type, elts);
15673 }
15674
15675 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15676
15677 To compute GT, swap the arguments and do LT.
15678 To compute GE, do LT and invert the result.
15679 To compute LE, swap the arguments, do LT and invert the result.
15680 To compute NE, do EQ and invert the result.
15681
15682 Therefore, the code below must handle only EQ and LT. */
15683
15684 if (code == LE_EXPR || code == GT_EXPR)
15685 {
15686 tree tem = op0;
15687 op0 = op1;
15688 op1 = tem;
15689 code = swap_tree_comparison (code);
15690 }
15691
15692 /* Note that it is safe to invert for real values here because we
15693 have already handled the one case that it matters. */
15694
15695 invert = 0;
15696 if (code == NE_EXPR || code == GE_EXPR)
15697 {
15698 invert = 1;
15699 code = invert_tree_comparison (code, false);
15700 }
15701
15702 /* Compute a result for LT or EQ if args permit;
15703 Otherwise return T. */
15704 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15705 {
15706 if (code == EQ_EXPR)
15707 result = tree_int_cst_equal (op0, op1);
15708 else
15709 result = tree_int_cst_lt (op0, op1);
15710 }
15711 else
15712 return NULL_TREE;
15713
15714 if (invert)
15715 result ^= 1;
15716 return constant_boolean_node (result, type);
15717 }
15718
15719 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15720 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15721 itself. */
15722
15723 tree
15724 fold_build_cleanup_point_expr (tree type, tree expr)
15725 {
15726 /* If the expression does not have side effects then we don't have to wrap
15727 it with a cleanup point expression. */
15728 if (!TREE_SIDE_EFFECTS (expr))
15729 return expr;
15730
15731 /* If the expression is a return, check to see if the expression inside the
15732 return has no side effects or the right hand side of the modify expression
15733 inside the return. If either don't have side effects set we don't need to
15734 wrap the expression in a cleanup point expression. Note we don't check the
15735 left hand side of the modify because it should always be a return decl. */
15736 if (TREE_CODE (expr) == RETURN_EXPR)
15737 {
15738 tree op = TREE_OPERAND (expr, 0);
15739 if (!op || !TREE_SIDE_EFFECTS (op))
15740 return expr;
15741 op = TREE_OPERAND (op, 1);
15742 if (!TREE_SIDE_EFFECTS (op))
15743 return expr;
15744 }
15745
15746 return build1 (CLEANUP_POINT_EXPR, type, expr);
15747 }
15748
15749 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15750 of an indirection through OP0, or NULL_TREE if no simplification is
15751 possible. */
15752
15753 tree
15754 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15755 {
15756 tree sub = op0;
15757 tree subtype;
15758
15759 STRIP_NOPS (sub);
15760 subtype = TREE_TYPE (sub);
15761 if (!POINTER_TYPE_P (subtype))
15762 return NULL_TREE;
15763
15764 if (TREE_CODE (sub) == ADDR_EXPR)
15765 {
15766 tree op = TREE_OPERAND (sub, 0);
15767 tree optype = TREE_TYPE (op);
15768 /* *&CONST_DECL -> to the value of the const decl. */
15769 if (TREE_CODE (op) == CONST_DECL)
15770 return DECL_INITIAL (op);
15771 /* *&p => p; make sure to handle *&"str"[cst] here. */
15772 if (type == optype)
15773 {
15774 tree fop = fold_read_from_constant_string (op);
15775 if (fop)
15776 return fop;
15777 else
15778 return op;
15779 }
15780 /* *(foo *)&fooarray => fooarray[0] */
15781 else if (TREE_CODE (optype) == ARRAY_TYPE
15782 && type == TREE_TYPE (optype)
15783 && (!in_gimple_form
15784 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15785 {
15786 tree type_domain = TYPE_DOMAIN (optype);
15787 tree min_val = size_zero_node;
15788 if (type_domain && TYPE_MIN_VALUE (type_domain))
15789 min_val = TYPE_MIN_VALUE (type_domain);
15790 if (in_gimple_form
15791 && TREE_CODE (min_val) != INTEGER_CST)
15792 return NULL_TREE;
15793 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15794 NULL_TREE, NULL_TREE);
15795 }
15796 /* *(foo *)&complexfoo => __real__ complexfoo */
15797 else if (TREE_CODE (optype) == COMPLEX_TYPE
15798 && type == TREE_TYPE (optype))
15799 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15800 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15801 else if (TREE_CODE (optype) == VECTOR_TYPE
15802 && type == TREE_TYPE (optype))
15803 {
15804 tree part_width = TYPE_SIZE (type);
15805 tree index = bitsize_int (0);
15806 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15807 }
15808 }
15809
15810 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15811 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15812 {
15813 tree op00 = TREE_OPERAND (sub, 0);
15814 tree op01 = TREE_OPERAND (sub, 1);
15815
15816 STRIP_NOPS (op00);
15817 if (TREE_CODE (op00) == ADDR_EXPR)
15818 {
15819 tree op00type;
15820 op00 = TREE_OPERAND (op00, 0);
15821 op00type = TREE_TYPE (op00);
15822
15823 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15824 if (TREE_CODE (op00type) == VECTOR_TYPE
15825 && type == TREE_TYPE (op00type))
15826 {
15827 HOST_WIDE_INT offset = tree_to_shwi (op01);
15828 tree part_width = TYPE_SIZE (type);
15829 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15830 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15831 tree index = bitsize_int (indexi);
15832
15833 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15834 return fold_build3_loc (loc,
15835 BIT_FIELD_REF, type, op00,
15836 part_width, index);
15837
15838 }
15839 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15840 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15841 && type == TREE_TYPE (op00type))
15842 {
15843 tree size = TYPE_SIZE_UNIT (type);
15844 if (tree_int_cst_equal (size, op01))
15845 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15846 }
15847 /* ((foo *)&fooarray)[1] => fooarray[1] */
15848 else if (TREE_CODE (op00type) == ARRAY_TYPE
15849 && type == TREE_TYPE (op00type))
15850 {
15851 tree type_domain = TYPE_DOMAIN (op00type);
15852 tree min_val = size_zero_node;
15853 if (type_domain && TYPE_MIN_VALUE (type_domain))
15854 min_val = TYPE_MIN_VALUE (type_domain);
15855 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15856 TYPE_SIZE_UNIT (type));
15857 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15858 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15859 NULL_TREE, NULL_TREE);
15860 }
15861 }
15862 }
15863
15864 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15865 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15866 && type == TREE_TYPE (TREE_TYPE (subtype))
15867 && (!in_gimple_form
15868 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15869 {
15870 tree type_domain;
15871 tree min_val = size_zero_node;
15872 sub = build_fold_indirect_ref_loc (loc, sub);
15873 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15874 if (type_domain && TYPE_MIN_VALUE (type_domain))
15875 min_val = TYPE_MIN_VALUE (type_domain);
15876 if (in_gimple_form
15877 && TREE_CODE (min_val) != INTEGER_CST)
15878 return NULL_TREE;
15879 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15880 NULL_TREE);
15881 }
15882
15883 return NULL_TREE;
15884 }
15885
15886 /* Builds an expression for an indirection through T, simplifying some
15887 cases. */
15888
15889 tree
15890 build_fold_indirect_ref_loc (location_t loc, tree t)
15891 {
15892 tree type = TREE_TYPE (TREE_TYPE (t));
15893 tree sub = fold_indirect_ref_1 (loc, type, t);
15894
15895 if (sub)
15896 return sub;
15897
15898 return build1_loc (loc, INDIRECT_REF, type, t);
15899 }
15900
15901 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15902
15903 tree
15904 fold_indirect_ref_loc (location_t loc, tree t)
15905 {
15906 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15907
15908 if (sub)
15909 return sub;
15910 else
15911 return t;
15912 }
15913
15914 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15915 whose result is ignored. The type of the returned tree need not be
15916 the same as the original expression. */
15917
15918 tree
15919 fold_ignored_result (tree t)
15920 {
15921 if (!TREE_SIDE_EFFECTS (t))
15922 return integer_zero_node;
15923
15924 for (;;)
15925 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15926 {
15927 case tcc_unary:
15928 t = TREE_OPERAND (t, 0);
15929 break;
15930
15931 case tcc_binary:
15932 case tcc_comparison:
15933 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15934 t = TREE_OPERAND (t, 0);
15935 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15936 t = TREE_OPERAND (t, 1);
15937 else
15938 return t;
15939 break;
15940
15941 case tcc_expression:
15942 switch (TREE_CODE (t))
15943 {
15944 case COMPOUND_EXPR:
15945 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15946 return t;
15947 t = TREE_OPERAND (t, 0);
15948 break;
15949
15950 case COND_EXPR:
15951 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15952 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15953 return t;
15954 t = TREE_OPERAND (t, 0);
15955 break;
15956
15957 default:
15958 return t;
15959 }
15960 break;
15961
15962 default:
15963 return t;
15964 }
15965 }
15966
15967 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15968
15969 tree
15970 round_up_loc (location_t loc, tree value, unsigned int divisor)
15971 {
15972 tree div = NULL_TREE;
15973
15974 if (divisor == 1)
15975 return value;
15976
15977 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15978 have to do anything. Only do this when we are not given a const,
15979 because in that case, this check is more expensive than just
15980 doing it. */
15981 if (TREE_CODE (value) != INTEGER_CST)
15982 {
15983 div = build_int_cst (TREE_TYPE (value), divisor);
15984
15985 if (multiple_of_p (TREE_TYPE (value), value, div))
15986 return value;
15987 }
15988
15989 /* If divisor is a power of two, simplify this to bit manipulation. */
15990 if (divisor == (divisor & -divisor))
15991 {
15992 if (TREE_CODE (value) == INTEGER_CST)
15993 {
15994 wide_int val = value;
15995 bool overflow_p;
15996
15997 if ((val & (divisor - 1)) == 0)
15998 return value;
15999
16000 overflow_p = TREE_OVERFLOW (value);
16001 val &= ~(divisor - 1);
16002 val += divisor;
16003 if (val == 0)
16004 overflow_p = true;
16005
16006 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16007 }
16008 else
16009 {
16010 tree t;
16011
16012 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16013 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16014 t = build_int_cst (TREE_TYPE (value), -divisor);
16015 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16016 }
16017 }
16018 else
16019 {
16020 if (!div)
16021 div = build_int_cst (TREE_TYPE (value), divisor);
16022 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16023 value = size_binop_loc (loc, MULT_EXPR, value, div);
16024 }
16025
16026 return value;
16027 }
16028
16029 /* Likewise, but round down. */
16030
16031 tree
16032 round_down_loc (location_t loc, tree value, int divisor)
16033 {
16034 tree div = NULL_TREE;
16035
16036 gcc_assert (divisor > 0);
16037 if (divisor == 1)
16038 return value;
16039
16040 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16041 have to do anything. Only do this when we are not given a const,
16042 because in that case, this check is more expensive than just
16043 doing it. */
16044 if (TREE_CODE (value) != INTEGER_CST)
16045 {
16046 div = build_int_cst (TREE_TYPE (value), divisor);
16047
16048 if (multiple_of_p (TREE_TYPE (value), value, div))
16049 return value;
16050 }
16051
16052 /* If divisor is a power of two, simplify this to bit manipulation. */
16053 if (divisor == (divisor & -divisor))
16054 {
16055 tree t;
16056
16057 t = build_int_cst (TREE_TYPE (value), -divisor);
16058 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16059 }
16060 else
16061 {
16062 if (!div)
16063 div = build_int_cst (TREE_TYPE (value), divisor);
16064 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16065 value = size_binop_loc (loc, MULT_EXPR, value, div);
16066 }
16067
16068 return value;
16069 }
16070
16071 /* Returns the pointer to the base of the object addressed by EXP and
16072 extracts the information about the offset of the access, storing it
16073 to PBITPOS and POFFSET. */
16074
16075 static tree
16076 split_address_to_core_and_offset (tree exp,
16077 HOST_WIDE_INT *pbitpos, tree *poffset)
16078 {
16079 tree core;
16080 machine_mode mode;
16081 int unsignedp, volatilep;
16082 HOST_WIDE_INT bitsize;
16083 location_t loc = EXPR_LOCATION (exp);
16084
16085 if (TREE_CODE (exp) == ADDR_EXPR)
16086 {
16087 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16088 poffset, &mode, &unsignedp, &volatilep,
16089 false);
16090 core = build_fold_addr_expr_loc (loc, core);
16091 }
16092 else
16093 {
16094 core = exp;
16095 *pbitpos = 0;
16096 *poffset = NULL_TREE;
16097 }
16098
16099 return core;
16100 }
16101
16102 /* Returns true if addresses of E1 and E2 differ by a constant, false
16103 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16104
16105 bool
16106 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16107 {
16108 tree core1, core2;
16109 HOST_WIDE_INT bitpos1, bitpos2;
16110 tree toffset1, toffset2, tdiff, type;
16111
16112 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16113 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16114
16115 if (bitpos1 % BITS_PER_UNIT != 0
16116 || bitpos2 % BITS_PER_UNIT != 0
16117 || !operand_equal_p (core1, core2, 0))
16118 return false;
16119
16120 if (toffset1 && toffset2)
16121 {
16122 type = TREE_TYPE (toffset1);
16123 if (type != TREE_TYPE (toffset2))
16124 toffset2 = fold_convert (type, toffset2);
16125
16126 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16127 if (!cst_and_fits_in_hwi (tdiff))
16128 return false;
16129
16130 *diff = int_cst_value (tdiff);
16131 }
16132 else if (toffset1 || toffset2)
16133 {
16134 /* If only one of the offsets is non-constant, the difference cannot
16135 be a constant. */
16136 return false;
16137 }
16138 else
16139 *diff = 0;
16140
16141 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16142 return true;
16143 }
16144
16145 /* Simplify the floating point expression EXP when the sign of the
16146 result is not significant. Return NULL_TREE if no simplification
16147 is possible. */
16148
16149 tree
16150 fold_strip_sign_ops (tree exp)
16151 {
16152 tree arg0, arg1;
16153 location_t loc = EXPR_LOCATION (exp);
16154
16155 switch (TREE_CODE (exp))
16156 {
16157 case ABS_EXPR:
16158 case NEGATE_EXPR:
16159 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16160 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16161
16162 case MULT_EXPR:
16163 case RDIV_EXPR:
16164 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16165 return NULL_TREE;
16166 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16167 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16168 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16169 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16170 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16171 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16172 break;
16173
16174 case COMPOUND_EXPR:
16175 arg0 = TREE_OPERAND (exp, 0);
16176 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16177 if (arg1)
16178 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16179 break;
16180
16181 case COND_EXPR:
16182 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16183 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16184 if (arg0 || arg1)
16185 return fold_build3_loc (loc,
16186 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16187 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16188 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16189 break;
16190
16191 case CALL_EXPR:
16192 {
16193 const enum built_in_function fcode = builtin_mathfn_code (exp);
16194 switch (fcode)
16195 {
16196 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16197 /* Strip copysign function call, return the 1st argument. */
16198 arg0 = CALL_EXPR_ARG (exp, 0);
16199 arg1 = CALL_EXPR_ARG (exp, 1);
16200 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16201
16202 default:
16203 /* Strip sign ops from the argument of "odd" math functions. */
16204 if (negate_mathfn_p (fcode))
16205 {
16206 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16207 if (arg0)
16208 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16209 }
16210 break;
16211 }
16212 }
16213 break;
16214
16215 default:
16216 break;
16217 }
16218 return NULL_TREE;
16219 }