re PR sanitizer/81505 (ICE in tree-ssa-loop-manip.c:95 with -fsanitize=signed-integer...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
86
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
107 };
108
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree split_tree (location_t, tree, tree, enum tree_code,
112 tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static bool vec_cst_ctor_to_array (tree, tree *);
141 static tree fold_negate_expr (location_t, tree);
142
143
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
146
147 static location_t
148 expr_location_or (tree t, location_t loc)
149 {
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
156
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
165 {
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
168 }
169 return x;
170 }
171 \f
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173 division and returns the quotient. Otherwise returns
174 NULL_TREE. */
175
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179 widest_int quo;
180
181 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 SIGNED, &quo))
183 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184
185 return NULL_TREE;
186 }
187 \f
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
196
197 static int fold_deferring_overflow_warnings;
198
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
203
204 static const char* fold_deferred_overflow_warning;
205
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
208
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
213
214 void
215 fold_defer_overflow_warnings (void)
216 {
217 ++fold_deferring_overflow_warnings;
218 }
219
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
228
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232 const char *warnmsg;
233 location_t locus;
234
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
238 {
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
244 }
245
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
248
249 if (!issue || warnmsg == NULL)
250 return;
251
252 if (gimple_no_warning_p (stmt))
253 return;
254
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
259
260 if (!issue_strict_overflow_warning (code))
261 return;
262
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
272
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276 fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278
279 /* Whether we are deferring overflow warnings. */
280
281 bool
282 fold_deferring_overflow_warnings_p (void)
283 {
284 return fold_deferring_overflow_warnings > 0;
285 }
286
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
289
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293 if (fold_deferring_overflow_warnings > 0)
294 {
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
297 {
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
300 }
301 }
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 \f
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
308
309 bool
310 negate_mathfn_p (combined_fn fn)
311 {
312 switch (fn)
313 {
314 CASE_CFN_ASIN:
315 CASE_CFN_ASINH:
316 CASE_CFN_ATAN:
317 CASE_CFN_ATANH:
318 CASE_CFN_CASIN:
319 CASE_CFN_CASINH:
320 CASE_CFN_CATAN:
321 CASE_CFN_CATANH:
322 CASE_CFN_CBRT:
323 CASE_CFN_CPROJ:
324 CASE_CFN_CSIN:
325 CASE_CFN_CSINH:
326 CASE_CFN_CTAN:
327 CASE_CFN_CTANH:
328 CASE_CFN_ERF:
329 CASE_CFN_LLROUND:
330 CASE_CFN_LROUND:
331 CASE_CFN_ROUND:
332 CASE_CFN_SIN:
333 CASE_CFN_SINH:
334 CASE_CFN_TAN:
335 CASE_CFN_TANH:
336 CASE_CFN_TRUNC:
337 return true;
338
339 CASE_CFN_LLRINT:
340 CASE_CFN_LRINT:
341 CASE_CFN_NEARBYINT:
342 CASE_CFN_RINT:
343 return !flag_rounding_math;
344
345 default:
346 break;
347 }
348 return false;
349 }
350
351 /* Check whether we may negate an integer constant T without causing
352 overflow. */
353
354 bool
355 may_negate_without_overflow_p (const_tree t)
356 {
357 tree type;
358
359 gcc_assert (TREE_CODE (t) == INTEGER_CST);
360
361 type = TREE_TYPE (t);
362 if (TYPE_UNSIGNED (type))
363 return false;
364
365 return !wi::only_sign_bit_p (t);
366 }
367
368 /* Determine whether an expression T can be cheaply negated using
369 the function negate_expr without introducing undefined overflow. */
370
371 static bool
372 negate_expr_p (tree t)
373 {
374 tree type;
375
376 if (t == 0)
377 return false;
378
379 type = TREE_TYPE (t);
380
381 STRIP_SIGN_NOPS (t);
382 switch (TREE_CODE (t))
383 {
384 case INTEGER_CST:
385 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 return true;
387
388 /* Check that -CST will not overflow type. */
389 return may_negate_without_overflow_p (t);
390 case BIT_NOT_EXPR:
391 return (INTEGRAL_TYPE_P (type)
392 && TYPE_OVERFLOW_WRAPS (type));
393
394 case FIXED_CST:
395 return true;
396
397 case NEGATE_EXPR:
398 return !TYPE_OVERFLOW_SANITIZED (type);
399
400 case REAL_CST:
401 /* We want to canonicalize to positive real constants. Pretend
402 that only negative ones can be easily negated. */
403 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404
405 case COMPLEX_CST:
406 return negate_expr_p (TREE_REALPART (t))
407 && negate_expr_p (TREE_IMAGPART (t));
408
409 case VECTOR_CST:
410 {
411 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 return true;
413
414 int count = TYPE_VECTOR_SUBPARTS (type), i;
415
416 for (i = 0; i < count; i++)
417 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
418 return false;
419
420 return true;
421 }
422
423 case COMPLEX_EXPR:
424 return negate_expr_p (TREE_OPERAND (t, 0))
425 && negate_expr_p (TREE_OPERAND (t, 1));
426
427 case CONJ_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0));
429
430 case PLUS_EXPR:
431 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 || HONOR_SIGNED_ZEROS (element_mode (type))
433 || (INTEGRAL_TYPE_P (type)
434 && ! TYPE_OVERFLOW_WRAPS (type)))
435 return false;
436 /* -(A + B) -> (-B) - A. */
437 if (negate_expr_p (TREE_OPERAND (t, 1)))
438 return true;
439 /* -(A + B) -> (-A) - B. */
440 return negate_expr_p (TREE_OPERAND (t, 0));
441
442 case MINUS_EXPR:
443 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
444 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 && !HONOR_SIGNED_ZEROS (element_mode (type))
446 && (! INTEGRAL_TYPE_P (type)
447 || TYPE_OVERFLOW_WRAPS (type));
448
449 case MULT_EXPR:
450 if (TYPE_UNSIGNED (type))
451 break;
452 /* INT_MIN/n * n doesn't overflow while negating one operand it does
453 if n is a (negative) power of two. */
454 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
458 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
459 && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
460 break;
461
462 /* Fall through. */
463
464 case RDIV_EXPR:
465 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
466 return negate_expr_p (TREE_OPERAND (t, 1))
467 || negate_expr_p (TREE_OPERAND (t, 0));
468 break;
469
470 case TRUNC_DIV_EXPR:
471 case ROUND_DIV_EXPR:
472 case EXACT_DIV_EXPR:
473 if (TYPE_UNSIGNED (type))
474 break;
475 if (negate_expr_p (TREE_OPERAND (t, 0)))
476 return true;
477 /* In general we can't negate B in A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. */
480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
483 && ! integer_onep (TREE_OPERAND (t, 1))))
484 return negate_expr_p (TREE_OPERAND (t, 1));
485 break;
486
487 case NOP_EXPR:
488 /* Negate -((double)float) as (double)(-float). */
489 if (TREE_CODE (type) == REAL_TYPE)
490 {
491 tree tem = strip_float_extensions (t);
492 if (tem != t)
493 return negate_expr_p (tem);
494 }
495 break;
496
497 case CALL_EXPR:
498 /* Negate -f(x) as f(-x). */
499 if (negate_mathfn_p (get_call_combined_fn (t)))
500 return negate_expr_p (CALL_EXPR_ARG (t, 0));
501 break;
502
503 case RSHIFT_EXPR:
504 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
505 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
506 {
507 tree op1 = TREE_OPERAND (t, 1);
508 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
509 return true;
510 }
511 break;
512
513 default:
514 break;
515 }
516 return false;
517 }
518
519 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
520 simplification is possible.
521 If negate_expr_p would return true for T, NULL_TREE will never be
522 returned. */
523
524 static tree
525 fold_negate_expr_1 (location_t loc, tree t)
526 {
527 tree type = TREE_TYPE (t);
528 tree tem;
529
530 switch (TREE_CODE (t))
531 {
532 /* Convert - (~A) to A + 1. */
533 case BIT_NOT_EXPR:
534 if (INTEGRAL_TYPE_P (type))
535 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
536 build_one_cst (type));
537 break;
538
539 case INTEGER_CST:
540 tem = fold_negate_const (t, type);
541 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && !TYPE_OVERFLOW_TRAPS (type)
544 && TYPE_OVERFLOW_WRAPS (type))
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
546 return tem;
547 break;
548
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 return tem;
552
553 case FIXED_CST:
554 tem = fold_negate_const (t, type);
555 return tem;
556
557 case COMPLEX_CST:
558 {
559 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
560 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
561 if (rpart && ipart)
562 return build_complex (type, rpart, ipart);
563 }
564 break;
565
566 case VECTOR_CST:
567 {
568 int count = TYPE_VECTOR_SUBPARTS (type), i;
569 tree *elts = XALLOCAVEC (tree, count);
570
571 for (i = 0; i < count; i++)
572 {
573 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 if (elts[i] == NULL_TREE)
575 return NULL_TREE;
576 }
577
578 return build_vector (type, elts);
579 }
580
581 case COMPLEX_EXPR:
582 if (negate_expr_p (t))
583 return fold_build2_loc (loc, COMPLEX_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
585 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
586 break;
587
588 case CONJ_EXPR:
589 if (negate_expr_p (t))
590 return fold_build1_loc (loc, CONJ_EXPR, type,
591 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
592 break;
593
594 case NEGATE_EXPR:
595 if (!TYPE_OVERFLOW_SANITIZED (type))
596 return TREE_OPERAND (t, 0);
597 break;
598
599 case PLUS_EXPR:
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
601 && !HONOR_SIGNED_ZEROS (element_mode (type)))
602 {
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1)))
605 {
606 tem = negate_expr (TREE_OPERAND (t, 1));
607 return fold_build2_loc (loc, MINUS_EXPR, type,
608 tem, TREE_OPERAND (t, 0));
609 }
610
611 /* -(A + B) -> (-A) - B. */
612 if (negate_expr_p (TREE_OPERAND (t, 0)))
613 {
614 tem = negate_expr (TREE_OPERAND (t, 0));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 1));
617 }
618 }
619 break;
620
621 case MINUS_EXPR:
622 /* - (A - B) -> B - A */
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
624 && !HONOR_SIGNED_ZEROS (element_mode (type)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627 break;
628
629 case MULT_EXPR:
630 if (TYPE_UNSIGNED (type))
631 break;
632
633 /* Fall through. */
634
635 case RDIV_EXPR:
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
637 {
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
646 }
647 break;
648
649 case TRUNC_DIV_EXPR:
650 case ROUND_DIV_EXPR:
651 case EXACT_DIV_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
654 if (negate_expr_p (TREE_OPERAND (t, 0)))
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 negate_expr (TREE_OPERAND (t, 0)),
657 TREE_OPERAND (t, 1));
658 /* In general we can't negate B in A / B, because if A is INT_MIN and
659 B is 1, we may turn this into INT_MIN / -1 which is undefined
660 and actually traps on some architectures. */
661 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
662 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
663 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
664 && ! integer_onep (TREE_OPERAND (t, 1))))
665 && negate_expr_p (TREE_OPERAND (t, 1)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0),
668 negate_expr (TREE_OPERAND (t, 1)));
669 break;
670
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
674 {
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
678 }
679 break;
680
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (get_call_combined_fn (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
685 {
686 tree fndecl, arg;
687
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
691 }
692 break;
693
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
697 {
698 tree op1 = TREE_OPERAND (t, 1);
699 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
700 {
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
707 }
708 }
709 break;
710
711 default:
712 break;
713 }
714
715 return NULL_TREE;
716 }
717
718 /* A wrapper for fold_negate_expr_1. */
719
720 static tree
721 fold_negate_expr (location_t loc, tree t)
722 {
723 tree type = TREE_TYPE (t);
724 STRIP_SIGN_NOPS (t);
725 tree tem = fold_negate_expr_1 (loc, t);
726 if (tem == NULL_TREE)
727 return NULL_TREE;
728 return fold_convert_loc (loc, type, tem);
729 }
730
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 return NULL_TREE. */
734
735 static tree
736 negate_expr (tree t)
737 {
738 tree type, tem;
739 location_t loc;
740
741 if (t == NULL_TREE)
742 return NULL_TREE;
743
744 loc = EXPR_LOCATION (t);
745 type = TREE_TYPE (t);
746 STRIP_SIGN_NOPS (t);
747
748 tem = fold_negate_expr (loc, t);
749 if (!tem)
750 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751 return fold_convert_loc (loc, type, tem);
752 }
753 \f
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
761
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
765
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead. If a variable part is of pointer
768 type, it is negated after converting to TYPE. This prevents us from
769 generating illegal MINUS pointer expression. LOC is the location of
770 the converted variable part.
771
772 If IN is itself a literal or constant, return it as appropriate.
773
774 Note that we do not guarantee that any of the three values will be the
775 same type as IN, but they will have the same signedness and mode. */
776
777 static tree
778 split_tree (location_t loc, tree in, tree type, enum tree_code code,
779 tree *conp, tree *litp, tree *minus_litp, int negate_p)
780 {
781 tree var = 0;
782
783 *conp = 0;
784 *litp = 0;
785 *minus_litp = 0;
786
787 /* Strip any conversions that don't change the machine mode or signedness. */
788 STRIP_SIGN_NOPS (in);
789
790 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
791 || TREE_CODE (in) == FIXED_CST)
792 *litp = in;
793 else if (TREE_CODE (in) == code
794 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
795 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
796 /* We can associate addition and subtraction together (even
797 though the C standard doesn't say so) for integers because
798 the value is not affected. For reals, the value might be
799 affected, so we can't. */
800 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
801 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR
803 && (TREE_CODE (in) == PLUS_EXPR
804 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
805 {
806 tree op0 = TREE_OPERAND (in, 0);
807 tree op1 = TREE_OPERAND (in, 1);
808 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
809 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
810
811 /* First see if either of the operands is a literal, then a constant. */
812 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
813 || TREE_CODE (op0) == FIXED_CST)
814 *litp = op0, op0 = 0;
815 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
816 || TREE_CODE (op1) == FIXED_CST)
817 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
818
819 if (op0 != 0 && TREE_CONSTANT (op0))
820 *conp = op0, op0 = 0;
821 else if (op1 != 0 && TREE_CONSTANT (op1))
822 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
823
824 /* If we haven't dealt with either operand, this is not a case we can
825 decompose. Otherwise, VAR is either of the ones remaining, if any. */
826 if (op0 != 0 && op1 != 0)
827 var = in;
828 else if (op0 != 0)
829 var = op0;
830 else
831 var = op1, neg_var_p = neg1_p;
832
833 /* Now do any needed negations. */
834 if (neg_litp_p)
835 *minus_litp = *litp, *litp = 0;
836 if (neg_conp_p && *conp)
837 {
838 /* Convert to TYPE before negating. */
839 *conp = fold_convert_loc (loc, type, *conp);
840 *conp = negate_expr (*conp);
841 }
842 if (neg_var_p && var)
843 {
844 /* Convert to TYPE before negating. */
845 var = fold_convert_loc (loc, type, var);
846 var = negate_expr (var);
847 }
848 }
849 else if (TREE_CONSTANT (in))
850 *conp = in;
851 else if (TREE_CODE (in) == BIT_NOT_EXPR
852 && code == PLUS_EXPR)
853 {
854 /* -X - 1 is folded to ~X, undo that here. Do _not_ do this
855 when IN is constant. Convert to TYPE before negating. */
856 *minus_litp = build_one_cst (type);
857 var = negate_expr (fold_convert_loc (loc, type, TREE_OPERAND (in, 0)));
858 }
859 else
860 var = in;
861
862 if (negate_p)
863 {
864 if (*litp)
865 *minus_litp = *litp, *litp = 0;
866 else if (*minus_litp)
867 *litp = *minus_litp, *minus_litp = 0;
868 if (*conp)
869 {
870 /* Convert to TYPE before negating. */
871 *conp = fold_convert_loc (loc, type, *conp);
872 *conp = negate_expr (*conp);
873 }
874 if (var)
875 {
876 /* Convert to TYPE before negating. */
877 var = fold_convert_loc (loc, type, var);
878 var = negate_expr (var);
879 }
880 }
881
882 if (*litp
883 && TREE_OVERFLOW_P (*litp))
884 *litp = drop_tree_overflow (*litp);
885 if (*minus_litp
886 && TREE_OVERFLOW_P (*minus_litp))
887 *minus_litp = drop_tree_overflow (*minus_litp);
888
889 return var;
890 }
891
892 /* Re-associate trees split by the above function. T1 and T2 are
893 either expressions to associate or null. Return the new
894 expression, if any. LOC is the location of the new expression. If
895 we build an operation, do it in TYPE and with CODE. */
896
897 static tree
898 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
899 {
900 if (t1 == 0)
901 return t2;
902 else if (t2 == 0)
903 return t1;
904
905 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
906 try to fold this since we will have infinite recursion. But do
907 deal with any NEGATE_EXPRs. */
908 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
909 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
910 {
911 if (code == PLUS_EXPR)
912 {
913 if (TREE_CODE (t1) == NEGATE_EXPR)
914 return build2_loc (loc, MINUS_EXPR, type,
915 fold_convert_loc (loc, type, t2),
916 fold_convert_loc (loc, type,
917 TREE_OPERAND (t1, 0)));
918 else if (TREE_CODE (t2) == NEGATE_EXPR)
919 return build2_loc (loc, MINUS_EXPR, type,
920 fold_convert_loc (loc, type, t1),
921 fold_convert_loc (loc, type,
922 TREE_OPERAND (t2, 0)));
923 else if (integer_zerop (t2))
924 return fold_convert_loc (loc, type, t1);
925 }
926 else if (code == MINUS_EXPR)
927 {
928 if (integer_zerop (t2))
929 return fold_convert_loc (loc, type, t1);
930 }
931
932 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
933 fold_convert_loc (loc, type, t2));
934 }
935
936 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
937 fold_convert_loc (loc, type, t2));
938 }
939 \f
940 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
941 for use in int_const_binop, size_binop and size_diffop. */
942
943 static bool
944 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
945 {
946 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
947 return false;
948 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
949 return false;
950
951 switch (code)
952 {
953 case LSHIFT_EXPR:
954 case RSHIFT_EXPR:
955 case LROTATE_EXPR:
956 case RROTATE_EXPR:
957 return true;
958
959 default:
960 break;
961 }
962
963 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
964 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
965 && TYPE_MODE (type1) == TYPE_MODE (type2);
966 }
967
968
969 /* Combine two integer constants ARG1 and ARG2 under operation CODE
970 to produce a new constant. Return NULL_TREE if we don't know how
971 to evaluate CODE at compile-time. */
972
973 static tree
974 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
975 int overflowable)
976 {
977 wide_int res;
978 tree t;
979 tree type = TREE_TYPE (arg1);
980 signop sign = TYPE_SIGN (type);
981 bool overflow = false;
982
983 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
984
985 switch (code)
986 {
987 case BIT_IOR_EXPR:
988 res = wi::bit_or (arg1, arg2);
989 break;
990
991 case BIT_XOR_EXPR:
992 res = wi::bit_xor (arg1, arg2);
993 break;
994
995 case BIT_AND_EXPR:
996 res = wi::bit_and (arg1, arg2);
997 break;
998
999 case RSHIFT_EXPR:
1000 case LSHIFT_EXPR:
1001 if (wi::neg_p (arg2))
1002 {
1003 arg2 = -arg2;
1004 if (code == RSHIFT_EXPR)
1005 code = LSHIFT_EXPR;
1006 else
1007 code = RSHIFT_EXPR;
1008 }
1009
1010 if (code == RSHIFT_EXPR)
1011 /* It's unclear from the C standard whether shifts can overflow.
1012 The following code ignores overflow; perhaps a C standard
1013 interpretation ruling is needed. */
1014 res = wi::rshift (arg1, arg2, sign);
1015 else
1016 res = wi::lshift (arg1, arg2);
1017 break;
1018
1019 case RROTATE_EXPR:
1020 case LROTATE_EXPR:
1021 if (wi::neg_p (arg2))
1022 {
1023 arg2 = -arg2;
1024 if (code == RROTATE_EXPR)
1025 code = LROTATE_EXPR;
1026 else
1027 code = RROTATE_EXPR;
1028 }
1029
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, arg2);
1032 else
1033 res = wi::lrotate (arg1, arg2);
1034 break;
1035
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, &overflow);
1042 break;
1043
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, &overflow);
1046 break;
1047
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_floor (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::div_round (arg1, arg2, sign, &overflow);
1075 break;
1076
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1081 break;
1082
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1087 break;
1088
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1093 break;
1094
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return NULL_TREE;
1098 res = wi::mod_round (arg1, arg2, sign, &overflow);
1099 break;
1100
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1104
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1108
1109 default:
1110 return NULL_TREE;
1111 }
1112
1113 t = force_fit_type (type, res, overflowable,
1114 (((sign == SIGNED || overflowable == -1)
1115 && overflow)
1116 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1117
1118 return t;
1119 }
1120
1121 tree
1122 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1123 {
1124 return int_const_binop_1 (code, arg1, arg2, 1);
1125 }
1126
1127 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1128 constant. We assume ARG1 and ARG2 have the same data type, or at least
1129 are the same kind of constant and the same machine mode. Return zero if
1130 combining the constants is not allowed in the current operating mode. */
1131
1132 static tree
1133 const_binop (enum tree_code code, tree arg1, tree arg2)
1134 {
1135 /* Sanity check for the recursive cases. */
1136 if (!arg1 || !arg2)
1137 return NULL_TREE;
1138
1139 STRIP_NOPS (arg1);
1140 STRIP_NOPS (arg2);
1141
1142 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1143 {
1144 if (code == POINTER_PLUS_EXPR)
1145 return int_const_binop (PLUS_EXPR,
1146 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1147
1148 return int_const_binop (code, arg1, arg2);
1149 }
1150
1151 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1152 {
1153 machine_mode mode;
1154 REAL_VALUE_TYPE d1;
1155 REAL_VALUE_TYPE d2;
1156 REAL_VALUE_TYPE value;
1157 REAL_VALUE_TYPE result;
1158 bool inexact;
1159 tree t, type;
1160
1161 /* The following codes are handled by real_arithmetic. */
1162 switch (code)
1163 {
1164 case PLUS_EXPR:
1165 case MINUS_EXPR:
1166 case MULT_EXPR:
1167 case RDIV_EXPR:
1168 case MIN_EXPR:
1169 case MAX_EXPR:
1170 break;
1171
1172 default:
1173 return NULL_TREE;
1174 }
1175
1176 d1 = TREE_REAL_CST (arg1);
1177 d2 = TREE_REAL_CST (arg2);
1178
1179 type = TREE_TYPE (arg1);
1180 mode = TYPE_MODE (type);
1181
1182 /* Don't perform operation if we honor signaling NaNs and
1183 either operand is a signaling NaN. */
1184 if (HONOR_SNANS (mode)
1185 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1186 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1187 return NULL_TREE;
1188
1189 /* Don't perform operation if it would raise a division
1190 by zero exception. */
1191 if (code == RDIV_EXPR
1192 && real_equal (&d2, &dconst0)
1193 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1194 return NULL_TREE;
1195
1196 /* If either operand is a NaN, just return it. Otherwise, set up
1197 for floating-point trap; we return an overflow. */
1198 if (REAL_VALUE_ISNAN (d1))
1199 {
1200 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1201 is off. */
1202 d1.signalling = 0;
1203 t = build_real (type, d1);
1204 return t;
1205 }
1206 else if (REAL_VALUE_ISNAN (d2))
1207 {
1208 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1209 is off. */
1210 d2.signalling = 0;
1211 t = build_real (type, d2);
1212 return t;
1213 }
1214
1215 inexact = real_arithmetic (&value, code, &d1, &d2);
1216 real_convert (&result, mode, &value);
1217
1218 /* Don't constant fold this floating point operation if
1219 the result has overflowed and flag_trapping_math. */
1220 if (flag_trapping_math
1221 && MODE_HAS_INFINITIES (mode)
1222 && REAL_VALUE_ISINF (result)
1223 && !REAL_VALUE_ISINF (d1)
1224 && !REAL_VALUE_ISINF (d2))
1225 return NULL_TREE;
1226
1227 /* Don't constant fold this floating point operation if the
1228 result may dependent upon the run-time rounding mode and
1229 flag_rounding_math is set, or if GCC's software emulation
1230 is unable to accurately represent the result. */
1231 if ((flag_rounding_math
1232 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1233 && (inexact || !real_identical (&result, &value)))
1234 return NULL_TREE;
1235
1236 t = build_real (type, result);
1237
1238 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1239 return t;
1240 }
1241
1242 if (TREE_CODE (arg1) == FIXED_CST)
1243 {
1244 FIXED_VALUE_TYPE f1;
1245 FIXED_VALUE_TYPE f2;
1246 FIXED_VALUE_TYPE result;
1247 tree t, type;
1248 int sat_p;
1249 bool overflow_p;
1250
1251 /* The following codes are handled by fixed_arithmetic. */
1252 switch (code)
1253 {
1254 case PLUS_EXPR:
1255 case MINUS_EXPR:
1256 case MULT_EXPR:
1257 case TRUNC_DIV_EXPR:
1258 if (TREE_CODE (arg2) != FIXED_CST)
1259 return NULL_TREE;
1260 f2 = TREE_FIXED_CST (arg2);
1261 break;
1262
1263 case LSHIFT_EXPR:
1264 case RSHIFT_EXPR:
1265 {
1266 if (TREE_CODE (arg2) != INTEGER_CST)
1267 return NULL_TREE;
1268 wide_int w2 = arg2;
1269 f2.data.high = w2.elt (1);
1270 f2.data.low = w2.ulow ();
1271 f2.mode = SImode;
1272 }
1273 break;
1274
1275 default:
1276 return NULL_TREE;
1277 }
1278
1279 f1 = TREE_FIXED_CST (arg1);
1280 type = TREE_TYPE (arg1);
1281 sat_p = TYPE_SATURATING (type);
1282 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1283 t = build_fixed (type, result);
1284 /* Propagate overflow flags. */
1285 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1286 TREE_OVERFLOW (t) = 1;
1287 return t;
1288 }
1289
1290 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1291 {
1292 tree type = TREE_TYPE (arg1);
1293 tree r1 = TREE_REALPART (arg1);
1294 tree i1 = TREE_IMAGPART (arg1);
1295 tree r2 = TREE_REALPART (arg2);
1296 tree i2 = TREE_IMAGPART (arg2);
1297 tree real, imag;
1298
1299 switch (code)
1300 {
1301 case PLUS_EXPR:
1302 case MINUS_EXPR:
1303 real = const_binop (code, r1, r2);
1304 imag = const_binop (code, i1, i2);
1305 break;
1306
1307 case MULT_EXPR:
1308 if (COMPLEX_FLOAT_TYPE_P (type))
1309 return do_mpc_arg2 (arg1, arg2, type,
1310 /* do_nonfinite= */ folding_initializer,
1311 mpc_mul);
1312
1313 real = const_binop (MINUS_EXPR,
1314 const_binop (MULT_EXPR, r1, r2),
1315 const_binop (MULT_EXPR, i1, i2));
1316 imag = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, i2),
1318 const_binop (MULT_EXPR, i1, r2));
1319 break;
1320
1321 case RDIV_EXPR:
1322 if (COMPLEX_FLOAT_TYPE_P (type))
1323 return do_mpc_arg2 (arg1, arg2, type,
1324 /* do_nonfinite= */ folding_initializer,
1325 mpc_div);
1326 /* Fallthru. */
1327 case TRUNC_DIV_EXPR:
1328 case CEIL_DIV_EXPR:
1329 case FLOOR_DIV_EXPR:
1330 case ROUND_DIV_EXPR:
1331 if (flag_complex_method == 0)
1332 {
1333 /* Keep this algorithm in sync with
1334 tree-complex.c:expand_complex_div_straight().
1335
1336 Expand complex division to scalars, straightforward algorithm.
1337 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1338 t = br*br + bi*bi
1339 */
1340 tree magsquared
1341 = const_binop (PLUS_EXPR,
1342 const_binop (MULT_EXPR, r2, r2),
1343 const_binop (MULT_EXPR, i2, i2));
1344 tree t1
1345 = const_binop (PLUS_EXPR,
1346 const_binop (MULT_EXPR, r1, r2),
1347 const_binop (MULT_EXPR, i1, i2));
1348 tree t2
1349 = const_binop (MINUS_EXPR,
1350 const_binop (MULT_EXPR, i1, r2),
1351 const_binop (MULT_EXPR, r1, i2));
1352
1353 real = const_binop (code, t1, magsquared);
1354 imag = const_binop (code, t2, magsquared);
1355 }
1356 else
1357 {
1358 /* Keep this algorithm in sync with
1359 tree-complex.c:expand_complex_div_wide().
1360
1361 Expand complex division to scalars, modified algorithm to minimize
1362 overflow with wide input ranges. */
1363 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1364 fold_abs_const (r2, TREE_TYPE (type)),
1365 fold_abs_const (i2, TREE_TYPE (type)));
1366
1367 if (integer_nonzerop (compare))
1368 {
1369 /* In the TRUE branch, we compute
1370 ratio = br/bi;
1371 div = (br * ratio) + bi;
1372 tr = (ar * ratio) + ai;
1373 ti = (ai * ratio) - ar;
1374 tr = tr / div;
1375 ti = ti / div; */
1376 tree ratio = const_binop (code, r2, i2);
1377 tree div = const_binop (PLUS_EXPR, i2,
1378 const_binop (MULT_EXPR, r2, ratio));
1379 real = const_binop (MULT_EXPR, r1, ratio);
1380 real = const_binop (PLUS_EXPR, real, i1);
1381 real = const_binop (code, real, div);
1382
1383 imag = const_binop (MULT_EXPR, i1, ratio);
1384 imag = const_binop (MINUS_EXPR, imag, r1);
1385 imag = const_binop (code, imag, div);
1386 }
1387 else
1388 {
1389 /* In the FALSE branch, we compute
1390 ratio = d/c;
1391 divisor = (d * ratio) + c;
1392 tr = (b * ratio) + a;
1393 ti = b - (a * ratio);
1394 tr = tr / div;
1395 ti = ti / div; */
1396 tree ratio = const_binop (code, i2, r2);
1397 tree div = const_binop (PLUS_EXPR, r2,
1398 const_binop (MULT_EXPR, i2, ratio));
1399
1400 real = const_binop (MULT_EXPR, i1, ratio);
1401 real = const_binop (PLUS_EXPR, real, r1);
1402 real = const_binop (code, real, div);
1403
1404 imag = const_binop (MULT_EXPR, r1, ratio);
1405 imag = const_binop (MINUS_EXPR, i1, imag);
1406 imag = const_binop (code, imag, div);
1407 }
1408 }
1409 break;
1410
1411 default:
1412 return NULL_TREE;
1413 }
1414
1415 if (real && imag)
1416 return build_complex (type, real, imag);
1417 }
1418
1419 if (TREE_CODE (arg1) == VECTOR_CST
1420 && TREE_CODE (arg2) == VECTOR_CST)
1421 {
1422 tree type = TREE_TYPE (arg1);
1423 int count = TYPE_VECTOR_SUBPARTS (type), i;
1424 tree *elts = XALLOCAVEC (tree, count);
1425
1426 for (i = 0; i < count; i++)
1427 {
1428 tree elem1 = VECTOR_CST_ELT (arg1, i);
1429 tree elem2 = VECTOR_CST_ELT (arg2, i);
1430
1431 elts[i] = const_binop (code, elem1, elem2);
1432
1433 /* It is possible that const_binop cannot handle the given
1434 code and return NULL_TREE */
1435 if (elts[i] == NULL_TREE)
1436 return NULL_TREE;
1437 }
1438
1439 return build_vector (type, elts);
1440 }
1441
1442 /* Shifts allow a scalar offset for a vector. */
1443 if (TREE_CODE (arg1) == VECTOR_CST
1444 && TREE_CODE (arg2) == INTEGER_CST)
1445 {
1446 tree type = TREE_TYPE (arg1);
1447 int count = TYPE_VECTOR_SUBPARTS (type), i;
1448 tree *elts = XALLOCAVEC (tree, count);
1449
1450 for (i = 0; i < count; i++)
1451 {
1452 tree elem1 = VECTOR_CST_ELT (arg1, i);
1453
1454 elts[i] = const_binop (code, elem1, arg2);
1455
1456 /* It is possible that const_binop cannot handle the given
1457 code and return NULL_TREE. */
1458 if (elts[i] == NULL_TREE)
1459 return NULL_TREE;
1460 }
1461
1462 return build_vector (type, elts);
1463 }
1464 return NULL_TREE;
1465 }
1466
1467 /* Overload that adds a TYPE parameter to be able to dispatch
1468 to fold_relational_const. */
1469
1470 tree
1471 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1472 {
1473 if (TREE_CODE_CLASS (code) == tcc_comparison)
1474 return fold_relational_const (code, type, arg1, arg2);
1475
1476 /* ??? Until we make the const_binop worker take the type of the
1477 result as argument put those cases that need it here. */
1478 switch (code)
1479 {
1480 case COMPLEX_EXPR:
1481 if ((TREE_CODE (arg1) == REAL_CST
1482 && TREE_CODE (arg2) == REAL_CST)
1483 || (TREE_CODE (arg1) == INTEGER_CST
1484 && TREE_CODE (arg2) == INTEGER_CST))
1485 return build_complex (type, arg1, arg2);
1486 return NULL_TREE;
1487
1488 case VEC_PACK_TRUNC_EXPR:
1489 case VEC_PACK_FIX_TRUNC_EXPR:
1490 {
1491 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1492 tree *elts;
1493
1494 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1495 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1496 if (TREE_CODE (arg1) != VECTOR_CST
1497 || TREE_CODE (arg2) != VECTOR_CST)
1498 return NULL_TREE;
1499
1500 elts = XALLOCAVEC (tree, nelts);
1501 if (!vec_cst_ctor_to_array (arg1, elts)
1502 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1503 return NULL_TREE;
1504
1505 for (i = 0; i < nelts; i++)
1506 {
1507 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1508 ? NOP_EXPR : FIX_TRUNC_EXPR,
1509 TREE_TYPE (type), elts[i]);
1510 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1511 return NULL_TREE;
1512 }
1513
1514 return build_vector (type, elts);
1515 }
1516
1517 case VEC_WIDEN_MULT_LO_EXPR:
1518 case VEC_WIDEN_MULT_HI_EXPR:
1519 case VEC_WIDEN_MULT_EVEN_EXPR:
1520 case VEC_WIDEN_MULT_ODD_EXPR:
1521 {
1522 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1523 unsigned int out, ofs, scale;
1524 tree *elts;
1525
1526 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1527 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1528 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1529 return NULL_TREE;
1530
1531 elts = XALLOCAVEC (tree, nelts * 4);
1532 if (!vec_cst_ctor_to_array (arg1, elts)
1533 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1534 return NULL_TREE;
1535
1536 if (code == VEC_WIDEN_MULT_LO_EXPR)
1537 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1538 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1539 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1540 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1541 scale = 1, ofs = 0;
1542 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1543 scale = 1, ofs = 1;
1544
1545 for (out = 0; out < nelts; out++)
1546 {
1547 unsigned int in1 = (out << scale) + ofs;
1548 unsigned int in2 = in1 + nelts * 2;
1549 tree t1, t2;
1550
1551 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1552 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1553
1554 if (t1 == NULL_TREE || t2 == NULL_TREE)
1555 return NULL_TREE;
1556 elts[out] = const_binop (MULT_EXPR, t1, t2);
1557 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1558 return NULL_TREE;
1559 }
1560
1561 return build_vector (type, elts);
1562 }
1563
1564 default:;
1565 }
1566
1567 if (TREE_CODE_CLASS (code) != tcc_binary)
1568 return NULL_TREE;
1569
1570 /* Make sure type and arg0 have the same saturating flag. */
1571 gcc_checking_assert (TYPE_SATURATING (type)
1572 == TYPE_SATURATING (TREE_TYPE (arg1)));
1573
1574 return const_binop (code, arg1, arg2);
1575 }
1576
1577 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1578 Return zero if computing the constants is not possible. */
1579
1580 tree
1581 const_unop (enum tree_code code, tree type, tree arg0)
1582 {
1583 /* Don't perform the operation, other than NEGATE and ABS, if
1584 flag_signaling_nans is on and the operand is a signaling NaN. */
1585 if (TREE_CODE (arg0) == REAL_CST
1586 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1587 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1588 && code != NEGATE_EXPR
1589 && code != ABS_EXPR)
1590 return NULL_TREE;
1591
1592 switch (code)
1593 {
1594 CASE_CONVERT:
1595 case FLOAT_EXPR:
1596 case FIX_TRUNC_EXPR:
1597 case FIXED_CONVERT_EXPR:
1598 return fold_convert_const (code, type, arg0);
1599
1600 case ADDR_SPACE_CONVERT_EXPR:
1601 /* If the source address is 0, and the source address space
1602 cannot have a valid object at 0, fold to dest type null. */
1603 if (integer_zerop (arg0)
1604 && !(targetm.addr_space.zero_address_valid
1605 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1606 return fold_convert_const (code, type, arg0);
1607 break;
1608
1609 case VIEW_CONVERT_EXPR:
1610 return fold_view_convert_expr (type, arg0);
1611
1612 case NEGATE_EXPR:
1613 {
1614 /* Can't call fold_negate_const directly here as that doesn't
1615 handle all cases and we might not be able to negate some
1616 constants. */
1617 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1618 if (tem && CONSTANT_CLASS_P (tem))
1619 return tem;
1620 break;
1621 }
1622
1623 case ABS_EXPR:
1624 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1625 return fold_abs_const (arg0, type);
1626 break;
1627
1628 case CONJ_EXPR:
1629 if (TREE_CODE (arg0) == COMPLEX_CST)
1630 {
1631 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1632 TREE_TYPE (type));
1633 return build_complex (type, TREE_REALPART (arg0), ipart);
1634 }
1635 break;
1636
1637 case BIT_NOT_EXPR:
1638 if (TREE_CODE (arg0) == INTEGER_CST)
1639 return fold_not_const (arg0, type);
1640 /* Perform BIT_NOT_EXPR on each element individually. */
1641 else if (TREE_CODE (arg0) == VECTOR_CST)
1642 {
1643 tree *elements;
1644 tree elem;
1645 unsigned count = VECTOR_CST_NELTS (arg0), i;
1646
1647 elements = XALLOCAVEC (tree, count);
1648 for (i = 0; i < count; i++)
1649 {
1650 elem = VECTOR_CST_ELT (arg0, i);
1651 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1652 if (elem == NULL_TREE)
1653 break;
1654 elements[i] = elem;
1655 }
1656 if (i == count)
1657 return build_vector (type, elements);
1658 }
1659 break;
1660
1661 case TRUTH_NOT_EXPR:
1662 if (TREE_CODE (arg0) == INTEGER_CST)
1663 return constant_boolean_node (integer_zerop (arg0), type);
1664 break;
1665
1666 case REALPART_EXPR:
1667 if (TREE_CODE (arg0) == COMPLEX_CST)
1668 return fold_convert (type, TREE_REALPART (arg0));
1669 break;
1670
1671 case IMAGPART_EXPR:
1672 if (TREE_CODE (arg0) == COMPLEX_CST)
1673 return fold_convert (type, TREE_IMAGPART (arg0));
1674 break;
1675
1676 case VEC_UNPACK_LO_EXPR:
1677 case VEC_UNPACK_HI_EXPR:
1678 case VEC_UNPACK_FLOAT_LO_EXPR:
1679 case VEC_UNPACK_FLOAT_HI_EXPR:
1680 {
1681 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1682 tree *elts;
1683 enum tree_code subcode;
1684
1685 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1686 if (TREE_CODE (arg0) != VECTOR_CST)
1687 return NULL_TREE;
1688
1689 elts = XALLOCAVEC (tree, nelts * 2);
1690 if (!vec_cst_ctor_to_array (arg0, elts))
1691 return NULL_TREE;
1692
1693 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1694 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1695 elts += nelts;
1696
1697 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1698 subcode = NOP_EXPR;
1699 else
1700 subcode = FLOAT_EXPR;
1701
1702 for (i = 0; i < nelts; i++)
1703 {
1704 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1705 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1706 return NULL_TREE;
1707 }
1708
1709 return build_vector (type, elts);
1710 }
1711
1712 case REDUC_MIN_EXPR:
1713 case REDUC_MAX_EXPR:
1714 case REDUC_PLUS_EXPR:
1715 {
1716 unsigned int nelts, i;
1717 tree *elts;
1718 enum tree_code subcode;
1719
1720 if (TREE_CODE (arg0) != VECTOR_CST)
1721 return NULL_TREE;
1722 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1723
1724 elts = XALLOCAVEC (tree, nelts);
1725 if (!vec_cst_ctor_to_array (arg0, elts))
1726 return NULL_TREE;
1727
1728 switch (code)
1729 {
1730 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1731 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1732 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1733 default: gcc_unreachable ();
1734 }
1735
1736 for (i = 1; i < nelts; i++)
1737 {
1738 elts[0] = const_binop (subcode, elts[0], elts[i]);
1739 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1740 return NULL_TREE;
1741 }
1742
1743 return elts[0];
1744 }
1745
1746 default:
1747 break;
1748 }
1749
1750 return NULL_TREE;
1751 }
1752
1753 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1754 indicates which particular sizetype to create. */
1755
1756 tree
1757 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1758 {
1759 return build_int_cst (sizetype_tab[(int) kind], number);
1760 }
1761 \f
1762 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1763 is a tree code. The type of the result is taken from the operands.
1764 Both must be equivalent integer types, ala int_binop_types_match_p.
1765 If the operands are constant, so is the result. */
1766
1767 tree
1768 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1769 {
1770 tree type = TREE_TYPE (arg0);
1771
1772 if (arg0 == error_mark_node || arg1 == error_mark_node)
1773 return error_mark_node;
1774
1775 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1776 TREE_TYPE (arg1)));
1777
1778 /* Handle the special case of two integer constants faster. */
1779 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1780 {
1781 /* And some specific cases even faster than that. */
1782 if (code == PLUS_EXPR)
1783 {
1784 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1785 return arg1;
1786 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1787 return arg0;
1788 }
1789 else if (code == MINUS_EXPR)
1790 {
1791 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1792 return arg0;
1793 }
1794 else if (code == MULT_EXPR)
1795 {
1796 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1797 return arg1;
1798 }
1799
1800 /* Handle general case of two integer constants. For sizetype
1801 constant calculations we always want to know about overflow,
1802 even in the unsigned case. */
1803 return int_const_binop_1 (code, arg0, arg1, -1);
1804 }
1805
1806 return fold_build2_loc (loc, code, type, arg0, arg1);
1807 }
1808
1809 /* Given two values, either both of sizetype or both of bitsizetype,
1810 compute the difference between the two values. Return the value
1811 in signed type corresponding to the type of the operands. */
1812
1813 tree
1814 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1815 {
1816 tree type = TREE_TYPE (arg0);
1817 tree ctype;
1818
1819 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1820 TREE_TYPE (arg1)));
1821
1822 /* If the type is already signed, just do the simple thing. */
1823 if (!TYPE_UNSIGNED (type))
1824 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1825
1826 if (type == sizetype)
1827 ctype = ssizetype;
1828 else if (type == bitsizetype)
1829 ctype = sbitsizetype;
1830 else
1831 ctype = signed_type_for (type);
1832
1833 /* If either operand is not a constant, do the conversions to the signed
1834 type and subtract. The hardware will do the right thing with any
1835 overflow in the subtraction. */
1836 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1837 return size_binop_loc (loc, MINUS_EXPR,
1838 fold_convert_loc (loc, ctype, arg0),
1839 fold_convert_loc (loc, ctype, arg1));
1840
1841 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1842 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1843 overflow) and negate (which can't either). Special-case a result
1844 of zero while we're here. */
1845 if (tree_int_cst_equal (arg0, arg1))
1846 return build_int_cst (ctype, 0);
1847 else if (tree_int_cst_lt (arg1, arg0))
1848 return fold_convert_loc (loc, ctype,
1849 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1850 else
1851 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1852 fold_convert_loc (loc, ctype,
1853 size_binop_loc (loc,
1854 MINUS_EXPR,
1855 arg1, arg0)));
1856 }
1857 \f
1858 /* A subroutine of fold_convert_const handling conversions of an
1859 INTEGER_CST to another integer type. */
1860
1861 static tree
1862 fold_convert_const_int_from_int (tree type, const_tree arg1)
1863 {
1864 /* Given an integer constant, make new constant with new type,
1865 appropriately sign-extended or truncated. Use widest_int
1866 so that any extension is done according ARG1's type. */
1867 return force_fit_type (type, wi::to_widest (arg1),
1868 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1869 TREE_OVERFLOW (arg1));
1870 }
1871
1872 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1873 to an integer type. */
1874
1875 static tree
1876 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1877 {
1878 bool overflow = false;
1879 tree t;
1880
1881 /* The following code implements the floating point to integer
1882 conversion rules required by the Java Language Specification,
1883 that IEEE NaNs are mapped to zero and values that overflow
1884 the target precision saturate, i.e. values greater than
1885 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1886 are mapped to INT_MIN. These semantics are allowed by the
1887 C and C++ standards that simply state that the behavior of
1888 FP-to-integer conversion is unspecified upon overflow. */
1889
1890 wide_int val;
1891 REAL_VALUE_TYPE r;
1892 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1893
1894 switch (code)
1895 {
1896 case FIX_TRUNC_EXPR:
1897 real_trunc (&r, VOIDmode, &x);
1898 break;
1899
1900 default:
1901 gcc_unreachable ();
1902 }
1903
1904 /* If R is NaN, return zero and show we have an overflow. */
1905 if (REAL_VALUE_ISNAN (r))
1906 {
1907 overflow = true;
1908 val = wi::zero (TYPE_PRECISION (type));
1909 }
1910
1911 /* See if R is less than the lower bound or greater than the
1912 upper bound. */
1913
1914 if (! overflow)
1915 {
1916 tree lt = TYPE_MIN_VALUE (type);
1917 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1918 if (real_less (&r, &l))
1919 {
1920 overflow = true;
1921 val = lt;
1922 }
1923 }
1924
1925 if (! overflow)
1926 {
1927 tree ut = TYPE_MAX_VALUE (type);
1928 if (ut)
1929 {
1930 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1931 if (real_less (&u, &r))
1932 {
1933 overflow = true;
1934 val = ut;
1935 }
1936 }
1937 }
1938
1939 if (! overflow)
1940 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1941
1942 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1943 return t;
1944 }
1945
1946 /* A subroutine of fold_convert_const handling conversions of a
1947 FIXED_CST to an integer type. */
1948
1949 static tree
1950 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1951 {
1952 tree t;
1953 double_int temp, temp_trunc;
1954 unsigned int mode;
1955
1956 /* Right shift FIXED_CST to temp by fbit. */
1957 temp = TREE_FIXED_CST (arg1).data;
1958 mode = TREE_FIXED_CST (arg1).mode;
1959 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1960 {
1961 temp = temp.rshift (GET_MODE_FBIT (mode),
1962 HOST_BITS_PER_DOUBLE_INT,
1963 SIGNED_FIXED_POINT_MODE_P (mode));
1964
1965 /* Left shift temp to temp_trunc by fbit. */
1966 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1967 HOST_BITS_PER_DOUBLE_INT,
1968 SIGNED_FIXED_POINT_MODE_P (mode));
1969 }
1970 else
1971 {
1972 temp = double_int_zero;
1973 temp_trunc = double_int_zero;
1974 }
1975
1976 /* If FIXED_CST is negative, we need to round the value toward 0.
1977 By checking if the fractional bits are not zero to add 1 to temp. */
1978 if (SIGNED_FIXED_POINT_MODE_P (mode)
1979 && temp_trunc.is_negative ()
1980 && TREE_FIXED_CST (arg1).data != temp_trunc)
1981 temp += double_int_one;
1982
1983 /* Given a fixed-point constant, make new constant with new type,
1984 appropriately sign-extended or truncated. */
1985 t = force_fit_type (type, temp, -1,
1986 (temp.is_negative ()
1987 && (TYPE_UNSIGNED (type)
1988 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1989 | TREE_OVERFLOW (arg1));
1990
1991 return t;
1992 }
1993
1994 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1995 to another floating point type. */
1996
1997 static tree
1998 fold_convert_const_real_from_real (tree type, const_tree arg1)
1999 {
2000 REAL_VALUE_TYPE value;
2001 tree t;
2002
2003 /* Don't perform the operation if flag_signaling_nans is on
2004 and the operand is a signaling NaN. */
2005 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2006 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2007 return NULL_TREE;
2008
2009 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2010 t = build_real (type, value);
2011
2012 /* If converting an infinity or NAN to a representation that doesn't
2013 have one, set the overflow bit so that we can produce some kind of
2014 error message at the appropriate point if necessary. It's not the
2015 most user-friendly message, but it's better than nothing. */
2016 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2017 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2018 TREE_OVERFLOW (t) = 1;
2019 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2020 && !MODE_HAS_NANS (TYPE_MODE (type)))
2021 TREE_OVERFLOW (t) = 1;
2022 /* Regular overflow, conversion produced an infinity in a mode that
2023 can't represent them. */
2024 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2025 && REAL_VALUE_ISINF (value)
2026 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2027 TREE_OVERFLOW (t) = 1;
2028 else
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2031 }
2032
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to a floating point type. */
2035
2036 static tree
2037 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2038 {
2039 REAL_VALUE_TYPE value;
2040 tree t;
2041
2042 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2043 t = build_real (type, value);
2044
2045 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2046 return t;
2047 }
2048
2049 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2050 to another fixed-point type. */
2051
2052 static tree
2053 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2054 {
2055 FIXED_VALUE_TYPE value;
2056 tree t;
2057 bool overflow_p;
2058
2059 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2060 TYPE_SATURATING (type));
2061 t = build_fixed (type, value);
2062
2063 /* Propagate overflow flags. */
2064 if (overflow_p | TREE_OVERFLOW (arg1))
2065 TREE_OVERFLOW (t) = 1;
2066 return t;
2067 }
2068
2069 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2070 to a fixed-point type. */
2071
2072 static tree
2073 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2074 {
2075 FIXED_VALUE_TYPE value;
2076 tree t;
2077 bool overflow_p;
2078 double_int di;
2079
2080 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2081
2082 di.low = TREE_INT_CST_ELT (arg1, 0);
2083 if (TREE_INT_CST_NUNITS (arg1) == 1)
2084 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2085 else
2086 di.high = TREE_INT_CST_ELT (arg1, 1);
2087
2088 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2089 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2090 TYPE_SATURATING (type));
2091 t = build_fixed (type, value);
2092
2093 /* Propagate overflow flags. */
2094 if (overflow_p | TREE_OVERFLOW (arg1))
2095 TREE_OVERFLOW (t) = 1;
2096 return t;
2097 }
2098
2099 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2100 to a fixed-point type. */
2101
2102 static tree
2103 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2104 {
2105 FIXED_VALUE_TYPE value;
2106 tree t;
2107 bool overflow_p;
2108
2109 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2110 &TREE_REAL_CST (arg1),
2111 TYPE_SATURATING (type));
2112 t = build_fixed (type, value);
2113
2114 /* Propagate overflow flags. */
2115 if (overflow_p | TREE_OVERFLOW (arg1))
2116 TREE_OVERFLOW (t) = 1;
2117 return t;
2118 }
2119
2120 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2121 type TYPE. If no simplification can be done return NULL_TREE. */
2122
2123 static tree
2124 fold_convert_const (enum tree_code code, tree type, tree arg1)
2125 {
2126 if (TREE_TYPE (arg1) == type)
2127 return arg1;
2128
2129 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2130 || TREE_CODE (type) == OFFSET_TYPE)
2131 {
2132 if (TREE_CODE (arg1) == INTEGER_CST)
2133 return fold_convert_const_int_from_int (type, arg1);
2134 else if (TREE_CODE (arg1) == REAL_CST)
2135 return fold_convert_const_int_from_real (code, type, arg1);
2136 else if (TREE_CODE (arg1) == FIXED_CST)
2137 return fold_convert_const_int_from_fixed (type, arg1);
2138 }
2139 else if (TREE_CODE (type) == REAL_TYPE)
2140 {
2141 if (TREE_CODE (arg1) == INTEGER_CST)
2142 return build_real_from_int_cst (type, arg1);
2143 else if (TREE_CODE (arg1) == REAL_CST)
2144 return fold_convert_const_real_from_real (type, arg1);
2145 else if (TREE_CODE (arg1) == FIXED_CST)
2146 return fold_convert_const_real_from_fixed (type, arg1);
2147 }
2148 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2149 {
2150 if (TREE_CODE (arg1) == FIXED_CST)
2151 return fold_convert_const_fixed_from_fixed (type, arg1);
2152 else if (TREE_CODE (arg1) == INTEGER_CST)
2153 return fold_convert_const_fixed_from_int (type, arg1);
2154 else if (TREE_CODE (arg1) == REAL_CST)
2155 return fold_convert_const_fixed_from_real (type, arg1);
2156 }
2157 else if (TREE_CODE (type) == VECTOR_TYPE)
2158 {
2159 if (TREE_CODE (arg1) == VECTOR_CST
2160 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2161 {
2162 int len = TYPE_VECTOR_SUBPARTS (type);
2163 tree elttype = TREE_TYPE (type);
2164 tree *v = XALLOCAVEC (tree, len);
2165 for (int i = 0; i < len; ++i)
2166 {
2167 tree elt = VECTOR_CST_ELT (arg1, i);
2168 tree cvt = fold_convert_const (code, elttype, elt);
2169 if (cvt == NULL_TREE)
2170 return NULL_TREE;
2171 v[i] = cvt;
2172 }
2173 return build_vector (type, v);
2174 }
2175 }
2176 return NULL_TREE;
2177 }
2178
2179 /* Construct a vector of zero elements of vector type TYPE. */
2180
2181 static tree
2182 build_zero_vector (tree type)
2183 {
2184 tree t;
2185
2186 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2187 return build_vector_from_val (type, t);
2188 }
2189
2190 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2191
2192 bool
2193 fold_convertible_p (const_tree type, const_tree arg)
2194 {
2195 tree orig = TREE_TYPE (arg);
2196
2197 if (type == orig)
2198 return true;
2199
2200 if (TREE_CODE (arg) == ERROR_MARK
2201 || TREE_CODE (type) == ERROR_MARK
2202 || TREE_CODE (orig) == ERROR_MARK)
2203 return false;
2204
2205 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2206 return true;
2207
2208 switch (TREE_CODE (type))
2209 {
2210 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2211 case POINTER_TYPE: case REFERENCE_TYPE:
2212 case OFFSET_TYPE:
2213 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2214 || TREE_CODE (orig) == OFFSET_TYPE);
2215
2216 case REAL_TYPE:
2217 case FIXED_POINT_TYPE:
2218 case VECTOR_TYPE:
2219 case VOID_TYPE:
2220 return TREE_CODE (type) == TREE_CODE (orig);
2221
2222 default:
2223 return false;
2224 }
2225 }
2226
2227 /* Convert expression ARG to type TYPE. Used by the middle-end for
2228 simple conversions in preference to calling the front-end's convert. */
2229
2230 tree
2231 fold_convert_loc (location_t loc, tree type, tree arg)
2232 {
2233 tree orig = TREE_TYPE (arg);
2234 tree tem;
2235
2236 if (type == orig)
2237 return arg;
2238
2239 if (TREE_CODE (arg) == ERROR_MARK
2240 || TREE_CODE (type) == ERROR_MARK
2241 || TREE_CODE (orig) == ERROR_MARK)
2242 return error_mark_node;
2243
2244 switch (TREE_CODE (type))
2245 {
2246 case POINTER_TYPE:
2247 case REFERENCE_TYPE:
2248 /* Handle conversions between pointers to different address spaces. */
2249 if (POINTER_TYPE_P (orig)
2250 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2251 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2252 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2253 /* fall through */
2254
2255 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2256 case OFFSET_TYPE:
2257 if (TREE_CODE (arg) == INTEGER_CST)
2258 {
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2261 return tem;
2262 }
2263 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2264 || TREE_CODE (orig) == OFFSET_TYPE)
2265 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2266 if (TREE_CODE (orig) == COMPLEX_TYPE)
2267 return fold_convert_loc (loc, type,
2268 fold_build1_loc (loc, REALPART_EXPR,
2269 TREE_TYPE (orig), arg));
2270 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2271 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2272 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2273
2274 case REAL_TYPE:
2275 if (TREE_CODE (arg) == INTEGER_CST)
2276 {
2277 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2278 if (tem != NULL_TREE)
2279 return tem;
2280 }
2281 else if (TREE_CODE (arg) == REAL_CST)
2282 {
2283 tem = fold_convert_const (NOP_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 return tem;
2286 }
2287 else if (TREE_CODE (arg) == FIXED_CST)
2288 {
2289 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2290 if (tem != NULL_TREE)
2291 return tem;
2292 }
2293
2294 switch (TREE_CODE (orig))
2295 {
2296 case INTEGER_TYPE:
2297 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2298 case POINTER_TYPE: case REFERENCE_TYPE:
2299 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2300
2301 case REAL_TYPE:
2302 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2303
2304 case FIXED_POINT_TYPE:
2305 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2306
2307 case COMPLEX_TYPE:
2308 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2309 return fold_convert_loc (loc, type, tem);
2310
2311 default:
2312 gcc_unreachable ();
2313 }
2314
2315 case FIXED_POINT_TYPE:
2316 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2317 || TREE_CODE (arg) == REAL_CST)
2318 {
2319 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2320 if (tem != NULL_TREE)
2321 goto fold_convert_exit;
2322 }
2323
2324 switch (TREE_CODE (orig))
2325 {
2326 case FIXED_POINT_TYPE:
2327 case INTEGER_TYPE:
2328 case ENUMERAL_TYPE:
2329 case BOOLEAN_TYPE:
2330 case REAL_TYPE:
2331 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2332
2333 case COMPLEX_TYPE:
2334 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2335 return fold_convert_loc (loc, type, tem);
2336
2337 default:
2338 gcc_unreachable ();
2339 }
2340
2341 case COMPLEX_TYPE:
2342 switch (TREE_CODE (orig))
2343 {
2344 case INTEGER_TYPE:
2345 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2346 case POINTER_TYPE: case REFERENCE_TYPE:
2347 case REAL_TYPE:
2348 case FIXED_POINT_TYPE:
2349 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2350 fold_convert_loc (loc, TREE_TYPE (type), arg),
2351 fold_convert_loc (loc, TREE_TYPE (type),
2352 integer_zero_node));
2353 case COMPLEX_TYPE:
2354 {
2355 tree rpart, ipart;
2356
2357 if (TREE_CODE (arg) == COMPLEX_EXPR)
2358 {
2359 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2360 TREE_OPERAND (arg, 0));
2361 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2362 TREE_OPERAND (arg, 1));
2363 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2364 }
2365
2366 arg = save_expr (arg);
2367 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2368 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2369 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2370 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2371 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2372 }
2373
2374 default:
2375 gcc_unreachable ();
2376 }
2377
2378 case VECTOR_TYPE:
2379 if (integer_zerop (arg))
2380 return build_zero_vector (type);
2381 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2382 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2383 || TREE_CODE (orig) == VECTOR_TYPE);
2384 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2385
2386 case VOID_TYPE:
2387 tem = fold_ignored_result (arg);
2388 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2389
2390 default:
2391 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2392 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2393 gcc_unreachable ();
2394 }
2395 fold_convert_exit:
2396 protected_set_expr_location_unshare (tem, loc);
2397 return tem;
2398 }
2399 \f
2400 /* Return false if expr can be assumed not to be an lvalue, true
2401 otherwise. */
2402
2403 static bool
2404 maybe_lvalue_p (const_tree x)
2405 {
2406 /* We only need to wrap lvalue tree codes. */
2407 switch (TREE_CODE (x))
2408 {
2409 case VAR_DECL:
2410 case PARM_DECL:
2411 case RESULT_DECL:
2412 case LABEL_DECL:
2413 case FUNCTION_DECL:
2414 case SSA_NAME:
2415
2416 case COMPONENT_REF:
2417 case MEM_REF:
2418 case INDIRECT_REF:
2419 case ARRAY_REF:
2420 case ARRAY_RANGE_REF:
2421 case BIT_FIELD_REF:
2422 case OBJ_TYPE_REF:
2423
2424 case REALPART_EXPR:
2425 case IMAGPART_EXPR:
2426 case PREINCREMENT_EXPR:
2427 case PREDECREMENT_EXPR:
2428 case SAVE_EXPR:
2429 case TRY_CATCH_EXPR:
2430 case WITH_CLEANUP_EXPR:
2431 case COMPOUND_EXPR:
2432 case MODIFY_EXPR:
2433 case TARGET_EXPR:
2434 case COND_EXPR:
2435 case BIND_EXPR:
2436 break;
2437
2438 default:
2439 /* Assume the worst for front-end tree codes. */
2440 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2441 break;
2442 return false;
2443 }
2444
2445 return true;
2446 }
2447
2448 /* Return an expr equal to X but certainly not valid as an lvalue. */
2449
2450 tree
2451 non_lvalue_loc (location_t loc, tree x)
2452 {
2453 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2454 us. */
2455 if (in_gimple_form)
2456 return x;
2457
2458 if (! maybe_lvalue_p (x))
2459 return x;
2460 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2461 }
2462
2463 /* When pedantic, return an expr equal to X but certainly not valid as a
2464 pedantic lvalue. Otherwise, return X. */
2465
2466 static tree
2467 pedantic_non_lvalue_loc (location_t loc, tree x)
2468 {
2469 return protected_set_expr_location_unshare (x, loc);
2470 }
2471 \f
2472 /* Given a tree comparison code, return the code that is the logical inverse.
2473 It is generally not safe to do this for floating-point comparisons, except
2474 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2475 ERROR_MARK in this case. */
2476
2477 enum tree_code
2478 invert_tree_comparison (enum tree_code code, bool honor_nans)
2479 {
2480 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2481 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2482 return ERROR_MARK;
2483
2484 switch (code)
2485 {
2486 case EQ_EXPR:
2487 return NE_EXPR;
2488 case NE_EXPR:
2489 return EQ_EXPR;
2490 case GT_EXPR:
2491 return honor_nans ? UNLE_EXPR : LE_EXPR;
2492 case GE_EXPR:
2493 return honor_nans ? UNLT_EXPR : LT_EXPR;
2494 case LT_EXPR:
2495 return honor_nans ? UNGE_EXPR : GE_EXPR;
2496 case LE_EXPR:
2497 return honor_nans ? UNGT_EXPR : GT_EXPR;
2498 case LTGT_EXPR:
2499 return UNEQ_EXPR;
2500 case UNEQ_EXPR:
2501 return LTGT_EXPR;
2502 case UNGT_EXPR:
2503 return LE_EXPR;
2504 case UNGE_EXPR:
2505 return LT_EXPR;
2506 case UNLT_EXPR:
2507 return GE_EXPR;
2508 case UNLE_EXPR:
2509 return GT_EXPR;
2510 case ORDERED_EXPR:
2511 return UNORDERED_EXPR;
2512 case UNORDERED_EXPR:
2513 return ORDERED_EXPR;
2514 default:
2515 gcc_unreachable ();
2516 }
2517 }
2518
2519 /* Similar, but return the comparison that results if the operands are
2520 swapped. This is safe for floating-point. */
2521
2522 enum tree_code
2523 swap_tree_comparison (enum tree_code code)
2524 {
2525 switch (code)
2526 {
2527 case EQ_EXPR:
2528 case NE_EXPR:
2529 case ORDERED_EXPR:
2530 case UNORDERED_EXPR:
2531 case LTGT_EXPR:
2532 case UNEQ_EXPR:
2533 return code;
2534 case GT_EXPR:
2535 return LT_EXPR;
2536 case GE_EXPR:
2537 return LE_EXPR;
2538 case LT_EXPR:
2539 return GT_EXPR;
2540 case LE_EXPR:
2541 return GE_EXPR;
2542 case UNGT_EXPR:
2543 return UNLT_EXPR;
2544 case UNGE_EXPR:
2545 return UNLE_EXPR;
2546 case UNLT_EXPR:
2547 return UNGT_EXPR;
2548 case UNLE_EXPR:
2549 return UNGE_EXPR;
2550 default:
2551 gcc_unreachable ();
2552 }
2553 }
2554
2555
2556 /* Convert a comparison tree code from an enum tree_code representation
2557 into a compcode bit-based encoding. This function is the inverse of
2558 compcode_to_comparison. */
2559
2560 static enum comparison_code
2561 comparison_to_compcode (enum tree_code code)
2562 {
2563 switch (code)
2564 {
2565 case LT_EXPR:
2566 return COMPCODE_LT;
2567 case EQ_EXPR:
2568 return COMPCODE_EQ;
2569 case LE_EXPR:
2570 return COMPCODE_LE;
2571 case GT_EXPR:
2572 return COMPCODE_GT;
2573 case NE_EXPR:
2574 return COMPCODE_NE;
2575 case GE_EXPR:
2576 return COMPCODE_GE;
2577 case ORDERED_EXPR:
2578 return COMPCODE_ORD;
2579 case UNORDERED_EXPR:
2580 return COMPCODE_UNORD;
2581 case UNLT_EXPR:
2582 return COMPCODE_UNLT;
2583 case UNEQ_EXPR:
2584 return COMPCODE_UNEQ;
2585 case UNLE_EXPR:
2586 return COMPCODE_UNLE;
2587 case UNGT_EXPR:
2588 return COMPCODE_UNGT;
2589 case LTGT_EXPR:
2590 return COMPCODE_LTGT;
2591 case UNGE_EXPR:
2592 return COMPCODE_UNGE;
2593 default:
2594 gcc_unreachable ();
2595 }
2596 }
2597
2598 /* Convert a compcode bit-based encoding of a comparison operator back
2599 to GCC's enum tree_code representation. This function is the
2600 inverse of comparison_to_compcode. */
2601
2602 static enum tree_code
2603 compcode_to_comparison (enum comparison_code code)
2604 {
2605 switch (code)
2606 {
2607 case COMPCODE_LT:
2608 return LT_EXPR;
2609 case COMPCODE_EQ:
2610 return EQ_EXPR;
2611 case COMPCODE_LE:
2612 return LE_EXPR;
2613 case COMPCODE_GT:
2614 return GT_EXPR;
2615 case COMPCODE_NE:
2616 return NE_EXPR;
2617 case COMPCODE_GE:
2618 return GE_EXPR;
2619 case COMPCODE_ORD:
2620 return ORDERED_EXPR;
2621 case COMPCODE_UNORD:
2622 return UNORDERED_EXPR;
2623 case COMPCODE_UNLT:
2624 return UNLT_EXPR;
2625 case COMPCODE_UNEQ:
2626 return UNEQ_EXPR;
2627 case COMPCODE_UNLE:
2628 return UNLE_EXPR;
2629 case COMPCODE_UNGT:
2630 return UNGT_EXPR;
2631 case COMPCODE_LTGT:
2632 return LTGT_EXPR;
2633 case COMPCODE_UNGE:
2634 return UNGE_EXPR;
2635 default:
2636 gcc_unreachable ();
2637 }
2638 }
2639
2640 /* Return a tree for the comparison which is the combination of
2641 doing the AND or OR (depending on CODE) of the two operations LCODE
2642 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2643 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2644 if this makes the transformation invalid. */
2645
2646 tree
2647 combine_comparisons (location_t loc,
2648 enum tree_code code, enum tree_code lcode,
2649 enum tree_code rcode, tree truth_type,
2650 tree ll_arg, tree lr_arg)
2651 {
2652 bool honor_nans = HONOR_NANS (ll_arg);
2653 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2654 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2655 int compcode;
2656
2657 switch (code)
2658 {
2659 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2660 compcode = lcompcode & rcompcode;
2661 break;
2662
2663 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2664 compcode = lcompcode | rcompcode;
2665 break;
2666
2667 default:
2668 return NULL_TREE;
2669 }
2670
2671 if (!honor_nans)
2672 {
2673 /* Eliminate unordered comparisons, as well as LTGT and ORD
2674 which are not used unless the mode has NaNs. */
2675 compcode &= ~COMPCODE_UNORD;
2676 if (compcode == COMPCODE_LTGT)
2677 compcode = COMPCODE_NE;
2678 else if (compcode == COMPCODE_ORD)
2679 compcode = COMPCODE_TRUE;
2680 }
2681 else if (flag_trapping_math)
2682 {
2683 /* Check that the original operation and the optimized ones will trap
2684 under the same condition. */
2685 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2686 && (lcompcode != COMPCODE_EQ)
2687 && (lcompcode != COMPCODE_ORD);
2688 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2689 && (rcompcode != COMPCODE_EQ)
2690 && (rcompcode != COMPCODE_ORD);
2691 bool trap = (compcode & COMPCODE_UNORD) == 0
2692 && (compcode != COMPCODE_EQ)
2693 && (compcode != COMPCODE_ORD);
2694
2695 /* In a short-circuited boolean expression the LHS might be
2696 such that the RHS, if evaluated, will never trap. For
2697 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2698 if neither x nor y is NaN. (This is a mixed blessing: for
2699 example, the expression above will never trap, hence
2700 optimizing it to x < y would be invalid). */
2701 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2702 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2703 rtrap = false;
2704
2705 /* If the comparison was short-circuited, and only the RHS
2706 trapped, we may now generate a spurious trap. */
2707 if (rtrap && !ltrap
2708 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2709 return NULL_TREE;
2710
2711 /* If we changed the conditions that cause a trap, we lose. */
2712 if ((ltrap || rtrap) != trap)
2713 return NULL_TREE;
2714 }
2715
2716 if (compcode == COMPCODE_TRUE)
2717 return constant_boolean_node (true, truth_type);
2718 else if (compcode == COMPCODE_FALSE)
2719 return constant_boolean_node (false, truth_type);
2720 else
2721 {
2722 enum tree_code tcode;
2723
2724 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2725 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2726 }
2727 }
2728 \f
2729 /* Return nonzero if two operands (typically of the same tree node)
2730 are necessarily equal. FLAGS modifies behavior as follows:
2731
2732 If OEP_ONLY_CONST is set, only return nonzero for constants.
2733 This function tests whether the operands are indistinguishable;
2734 it does not test whether they are equal using C's == operation.
2735 The distinction is important for IEEE floating point, because
2736 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2737 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2738
2739 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2740 even though it may hold multiple values during a function.
2741 This is because a GCC tree node guarantees that nothing else is
2742 executed between the evaluation of its "operands" (which may often
2743 be evaluated in arbitrary order). Hence if the operands themselves
2744 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2745 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2746 unset means assuming isochronic (or instantaneous) tree equivalence.
2747 Unless comparing arbitrary expression trees, such as from different
2748 statements, this flag can usually be left unset.
2749
2750 If OEP_PURE_SAME is set, then pure functions with identical arguments
2751 are considered the same. It is used when the caller has other ways
2752 to ensure that global memory is unchanged in between.
2753
2754 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2755 not values of expressions.
2756
2757 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2758 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2759
2760 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2761 any operand with side effect. This is unnecesarily conservative in the
2762 case we know that arg0 and arg1 are in disjoint code paths (such as in
2763 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2764 addresses with TREE_CONSTANT flag set so we know that &var == &var
2765 even if var is volatile. */
2766
2767 int
2768 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2769 {
2770 /* When checking, verify at the outermost operand_equal_p call that
2771 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2772 hash value. */
2773 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2774 {
2775 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2776 {
2777 if (arg0 != arg1)
2778 {
2779 inchash::hash hstate0 (0), hstate1 (0);
2780 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2781 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2782 hashval_t h0 = hstate0.end ();
2783 hashval_t h1 = hstate1.end ();
2784 gcc_assert (h0 == h1);
2785 }
2786 return 1;
2787 }
2788 else
2789 return 0;
2790 }
2791
2792 /* If either is ERROR_MARK, they aren't equal. */
2793 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2794 || TREE_TYPE (arg0) == error_mark_node
2795 || TREE_TYPE (arg1) == error_mark_node)
2796 return 0;
2797
2798 /* Similar, if either does not have a type (like a released SSA name),
2799 they aren't equal. */
2800 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2801 return 0;
2802
2803 /* We cannot consider pointers to different address space equal. */
2804 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2805 && POINTER_TYPE_P (TREE_TYPE (arg1))
2806 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2807 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2808 return 0;
2809
2810 /* Check equality of integer constants before bailing out due to
2811 precision differences. */
2812 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2813 {
2814 /* Address of INTEGER_CST is not defined; check that we did not forget
2815 to drop the OEP_ADDRESS_OF flags. */
2816 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2817 return tree_int_cst_equal (arg0, arg1);
2818 }
2819
2820 if (!(flags & OEP_ADDRESS_OF))
2821 {
2822 /* If both types don't have the same signedness, then we can't consider
2823 them equal. We must check this before the STRIP_NOPS calls
2824 because they may change the signedness of the arguments. As pointers
2825 strictly don't have a signedness, require either two pointers or
2826 two non-pointers as well. */
2827 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2828 || POINTER_TYPE_P (TREE_TYPE (arg0))
2829 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2830 return 0;
2831
2832 /* If both types don't have the same precision, then it is not safe
2833 to strip NOPs. */
2834 if (element_precision (TREE_TYPE (arg0))
2835 != element_precision (TREE_TYPE (arg1)))
2836 return 0;
2837
2838 STRIP_NOPS (arg0);
2839 STRIP_NOPS (arg1);
2840 }
2841 #if 0
2842 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2843 sanity check once the issue is solved. */
2844 else
2845 /* Addresses of conversions and SSA_NAMEs (and many other things)
2846 are not defined. Check that we did not forget to drop the
2847 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2848 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2849 && TREE_CODE (arg0) != SSA_NAME);
2850 #endif
2851
2852 /* In case both args are comparisons but with different comparison
2853 code, try to swap the comparison operands of one arg to produce
2854 a match and compare that variant. */
2855 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2856 && COMPARISON_CLASS_P (arg0)
2857 && COMPARISON_CLASS_P (arg1))
2858 {
2859 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2860
2861 if (TREE_CODE (arg0) == swap_code)
2862 return operand_equal_p (TREE_OPERAND (arg0, 0),
2863 TREE_OPERAND (arg1, 1), flags)
2864 && operand_equal_p (TREE_OPERAND (arg0, 1),
2865 TREE_OPERAND (arg1, 0), flags);
2866 }
2867
2868 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2869 {
2870 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2871 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2872 ;
2873 else if (flags & OEP_ADDRESS_OF)
2874 {
2875 /* If we are interested in comparing addresses ignore
2876 MEM_REF wrappings of the base that can appear just for
2877 TBAA reasons. */
2878 if (TREE_CODE (arg0) == MEM_REF
2879 && DECL_P (arg1)
2880 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2881 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2882 && integer_zerop (TREE_OPERAND (arg0, 1)))
2883 return 1;
2884 else if (TREE_CODE (arg1) == MEM_REF
2885 && DECL_P (arg0)
2886 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2887 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2888 && integer_zerop (TREE_OPERAND (arg1, 1)))
2889 return 1;
2890 return 0;
2891 }
2892 else
2893 return 0;
2894 }
2895
2896 /* When not checking adddresses, this is needed for conversions and for
2897 COMPONENT_REF. Might as well play it safe and always test this. */
2898 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2899 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2900 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2901 && !(flags & OEP_ADDRESS_OF)))
2902 return 0;
2903
2904 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2905 We don't care about side effects in that case because the SAVE_EXPR
2906 takes care of that for us. In all other cases, two expressions are
2907 equal if they have no side effects. If we have two identical
2908 expressions with side effects that should be treated the same due
2909 to the only side effects being identical SAVE_EXPR's, that will
2910 be detected in the recursive calls below.
2911 If we are taking an invariant address of two identical objects
2912 they are necessarily equal as well. */
2913 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2914 && (TREE_CODE (arg0) == SAVE_EXPR
2915 || (flags & OEP_MATCH_SIDE_EFFECTS)
2916 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2917 return 1;
2918
2919 /* Next handle constant cases, those for which we can return 1 even
2920 if ONLY_CONST is set. */
2921 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2922 switch (TREE_CODE (arg0))
2923 {
2924 case INTEGER_CST:
2925 return tree_int_cst_equal (arg0, arg1);
2926
2927 case FIXED_CST:
2928 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2929 TREE_FIXED_CST (arg1));
2930
2931 case REAL_CST:
2932 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2933 return 1;
2934
2935
2936 if (!HONOR_SIGNED_ZEROS (arg0))
2937 {
2938 /* If we do not distinguish between signed and unsigned zero,
2939 consider them equal. */
2940 if (real_zerop (arg0) && real_zerop (arg1))
2941 return 1;
2942 }
2943 return 0;
2944
2945 case VECTOR_CST:
2946 {
2947 unsigned i;
2948
2949 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2950 return 0;
2951
2952 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2953 {
2954 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2955 VECTOR_CST_ELT (arg1, i), flags))
2956 return 0;
2957 }
2958 return 1;
2959 }
2960
2961 case COMPLEX_CST:
2962 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2963 flags)
2964 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2965 flags));
2966
2967 case STRING_CST:
2968 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2969 && ! memcmp (TREE_STRING_POINTER (arg0),
2970 TREE_STRING_POINTER (arg1),
2971 TREE_STRING_LENGTH (arg0)));
2972
2973 case ADDR_EXPR:
2974 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2975 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2976 flags | OEP_ADDRESS_OF
2977 | OEP_MATCH_SIDE_EFFECTS);
2978 case CONSTRUCTOR:
2979 /* In GIMPLE empty constructors are allowed in initializers of
2980 aggregates. */
2981 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2982 default:
2983 break;
2984 }
2985
2986 if (flags & OEP_ONLY_CONST)
2987 return 0;
2988
2989 /* Define macros to test an operand from arg0 and arg1 for equality and a
2990 variant that allows null and views null as being different from any
2991 non-null value. In the latter case, if either is null, the both
2992 must be; otherwise, do the normal comparison. */
2993 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2994 TREE_OPERAND (arg1, N), flags)
2995
2996 #define OP_SAME_WITH_NULL(N) \
2997 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2998 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2999
3000 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3001 {
3002 case tcc_unary:
3003 /* Two conversions are equal only if signedness and modes match. */
3004 switch (TREE_CODE (arg0))
3005 {
3006 CASE_CONVERT:
3007 case FIX_TRUNC_EXPR:
3008 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3009 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3010 return 0;
3011 break;
3012 default:
3013 break;
3014 }
3015
3016 return OP_SAME (0);
3017
3018
3019 case tcc_comparison:
3020 case tcc_binary:
3021 if (OP_SAME (0) && OP_SAME (1))
3022 return 1;
3023
3024 /* For commutative ops, allow the other order. */
3025 return (commutative_tree_code (TREE_CODE (arg0))
3026 && operand_equal_p (TREE_OPERAND (arg0, 0),
3027 TREE_OPERAND (arg1, 1), flags)
3028 && operand_equal_p (TREE_OPERAND (arg0, 1),
3029 TREE_OPERAND (arg1, 0), flags));
3030
3031 case tcc_reference:
3032 /* If either of the pointer (or reference) expressions we are
3033 dereferencing contain a side effect, these cannot be equal,
3034 but their addresses can be. */
3035 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3036 && (TREE_SIDE_EFFECTS (arg0)
3037 || TREE_SIDE_EFFECTS (arg1)))
3038 return 0;
3039
3040 switch (TREE_CODE (arg0))
3041 {
3042 case INDIRECT_REF:
3043 if (!(flags & OEP_ADDRESS_OF)
3044 && (TYPE_ALIGN (TREE_TYPE (arg0))
3045 != TYPE_ALIGN (TREE_TYPE (arg1))))
3046 return 0;
3047 flags &= ~OEP_ADDRESS_OF;
3048 return OP_SAME (0);
3049
3050 case IMAGPART_EXPR:
3051 /* Require the same offset. */
3052 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3053 TYPE_SIZE (TREE_TYPE (arg1)),
3054 flags & ~OEP_ADDRESS_OF))
3055 return 0;
3056
3057 /* Fallthru. */
3058 case REALPART_EXPR:
3059 case VIEW_CONVERT_EXPR:
3060 return OP_SAME (0);
3061
3062 case TARGET_MEM_REF:
3063 case MEM_REF:
3064 if (!(flags & OEP_ADDRESS_OF))
3065 {
3066 /* Require equal access sizes */
3067 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3068 && (!TYPE_SIZE (TREE_TYPE (arg0))
3069 || !TYPE_SIZE (TREE_TYPE (arg1))
3070 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3071 TYPE_SIZE (TREE_TYPE (arg1)),
3072 flags)))
3073 return 0;
3074 /* Verify that access happens in similar types. */
3075 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3076 return 0;
3077 /* Verify that accesses are TBAA compatible. */
3078 if (!alias_ptr_types_compatible_p
3079 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3080 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3081 || (MR_DEPENDENCE_CLIQUE (arg0)
3082 != MR_DEPENDENCE_CLIQUE (arg1))
3083 || (MR_DEPENDENCE_BASE (arg0)
3084 != MR_DEPENDENCE_BASE (arg1)))
3085 return 0;
3086 /* Verify that alignment is compatible. */
3087 if (TYPE_ALIGN (TREE_TYPE (arg0))
3088 != TYPE_ALIGN (TREE_TYPE (arg1)))
3089 return 0;
3090 }
3091 flags &= ~OEP_ADDRESS_OF;
3092 return (OP_SAME (0) && OP_SAME (1)
3093 /* TARGET_MEM_REF require equal extra operands. */
3094 && (TREE_CODE (arg0) != TARGET_MEM_REF
3095 || (OP_SAME_WITH_NULL (2)
3096 && OP_SAME_WITH_NULL (3)
3097 && OP_SAME_WITH_NULL (4))));
3098
3099 case ARRAY_REF:
3100 case ARRAY_RANGE_REF:
3101 if (!OP_SAME (0))
3102 return 0;
3103 flags &= ~OEP_ADDRESS_OF;
3104 /* Compare the array index by value if it is constant first as we
3105 may have different types but same value here. */
3106 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3107 TREE_OPERAND (arg1, 1))
3108 || OP_SAME (1))
3109 && OP_SAME_WITH_NULL (2)
3110 && OP_SAME_WITH_NULL (3)
3111 /* Compare low bound and element size as with OEP_ADDRESS_OF
3112 we have to account for the offset of the ref. */
3113 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3114 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3115 || (operand_equal_p (array_ref_low_bound
3116 (CONST_CAST_TREE (arg0)),
3117 array_ref_low_bound
3118 (CONST_CAST_TREE (arg1)), flags)
3119 && operand_equal_p (array_ref_element_size
3120 (CONST_CAST_TREE (arg0)),
3121 array_ref_element_size
3122 (CONST_CAST_TREE (arg1)),
3123 flags))));
3124
3125 case COMPONENT_REF:
3126 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3127 may be NULL when we're called to compare MEM_EXPRs. */
3128 if (!OP_SAME_WITH_NULL (0)
3129 || !OP_SAME (1))
3130 return 0;
3131 flags &= ~OEP_ADDRESS_OF;
3132 return OP_SAME_WITH_NULL (2);
3133
3134 case BIT_FIELD_REF:
3135 if (!OP_SAME (0))
3136 return 0;
3137 flags &= ~OEP_ADDRESS_OF;
3138 return OP_SAME (1) && OP_SAME (2);
3139
3140 default:
3141 return 0;
3142 }
3143
3144 case tcc_expression:
3145 switch (TREE_CODE (arg0))
3146 {
3147 case ADDR_EXPR:
3148 /* Be sure we pass right ADDRESS_OF flag. */
3149 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3150 return operand_equal_p (TREE_OPERAND (arg0, 0),
3151 TREE_OPERAND (arg1, 0),
3152 flags | OEP_ADDRESS_OF);
3153
3154 case TRUTH_NOT_EXPR:
3155 return OP_SAME (0);
3156
3157 case TRUTH_ANDIF_EXPR:
3158 case TRUTH_ORIF_EXPR:
3159 return OP_SAME (0) && OP_SAME (1);
3160
3161 case FMA_EXPR:
3162 case WIDEN_MULT_PLUS_EXPR:
3163 case WIDEN_MULT_MINUS_EXPR:
3164 if (!OP_SAME (2))
3165 return 0;
3166 /* The multiplcation operands are commutative. */
3167 /* FALLTHRU */
3168
3169 case TRUTH_AND_EXPR:
3170 case TRUTH_OR_EXPR:
3171 case TRUTH_XOR_EXPR:
3172 if (OP_SAME (0) && OP_SAME (1))
3173 return 1;
3174
3175 /* Otherwise take into account this is a commutative operation. */
3176 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3177 TREE_OPERAND (arg1, 1), flags)
3178 && operand_equal_p (TREE_OPERAND (arg0, 1),
3179 TREE_OPERAND (arg1, 0), flags));
3180
3181 case COND_EXPR:
3182 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3183 return 0;
3184 flags &= ~OEP_ADDRESS_OF;
3185 return OP_SAME (0);
3186
3187 case BIT_INSERT_EXPR:
3188 /* BIT_INSERT_EXPR has an implict operand as the type precision
3189 of op1. Need to check to make sure they are the same. */
3190 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3191 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3192 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3193 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3194 return false;
3195 /* FALLTHRU */
3196
3197 case VEC_COND_EXPR:
3198 case DOT_PROD_EXPR:
3199 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3200
3201 case MODIFY_EXPR:
3202 case INIT_EXPR:
3203 case COMPOUND_EXPR:
3204 case PREDECREMENT_EXPR:
3205 case PREINCREMENT_EXPR:
3206 case POSTDECREMENT_EXPR:
3207 case POSTINCREMENT_EXPR:
3208 if (flags & OEP_LEXICOGRAPHIC)
3209 return OP_SAME (0) && OP_SAME (1);
3210 return 0;
3211
3212 case CLEANUP_POINT_EXPR:
3213 case EXPR_STMT:
3214 if (flags & OEP_LEXICOGRAPHIC)
3215 return OP_SAME (0);
3216 return 0;
3217
3218 default:
3219 return 0;
3220 }
3221
3222 case tcc_vl_exp:
3223 switch (TREE_CODE (arg0))
3224 {
3225 case CALL_EXPR:
3226 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3227 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3228 /* If not both CALL_EXPRs are either internal or normal function
3229 functions, then they are not equal. */
3230 return 0;
3231 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3232 {
3233 /* If the CALL_EXPRs call different internal functions, then they
3234 are not equal. */
3235 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3236 return 0;
3237 }
3238 else
3239 {
3240 /* If the CALL_EXPRs call different functions, then they are not
3241 equal. */
3242 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3243 flags))
3244 return 0;
3245 }
3246
3247 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3248 {
3249 unsigned int cef = call_expr_flags (arg0);
3250 if (flags & OEP_PURE_SAME)
3251 cef &= ECF_CONST | ECF_PURE;
3252 else
3253 cef &= ECF_CONST;
3254 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3255 return 0;
3256 }
3257
3258 /* Now see if all the arguments are the same. */
3259 {
3260 const_call_expr_arg_iterator iter0, iter1;
3261 const_tree a0, a1;
3262 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3263 a1 = first_const_call_expr_arg (arg1, &iter1);
3264 a0 && a1;
3265 a0 = next_const_call_expr_arg (&iter0),
3266 a1 = next_const_call_expr_arg (&iter1))
3267 if (! operand_equal_p (a0, a1, flags))
3268 return 0;
3269
3270 /* If we get here and both argument lists are exhausted
3271 then the CALL_EXPRs are equal. */
3272 return ! (a0 || a1);
3273 }
3274 default:
3275 return 0;
3276 }
3277
3278 case tcc_declaration:
3279 /* Consider __builtin_sqrt equal to sqrt. */
3280 return (TREE_CODE (arg0) == FUNCTION_DECL
3281 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3282 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3283 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3284
3285 case tcc_exceptional:
3286 if (TREE_CODE (arg0) == CONSTRUCTOR)
3287 {
3288 /* In GIMPLE constructors are used only to build vectors from
3289 elements. Individual elements in the constructor must be
3290 indexed in increasing order and form an initial sequence.
3291
3292 We make no effort to compare constructors in generic.
3293 (see sem_variable::equals in ipa-icf which can do so for
3294 constants). */
3295 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3296 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3297 return 0;
3298
3299 /* Be sure that vectors constructed have the same representation.
3300 We only tested element precision and modes to match.
3301 Vectors may be BLKmode and thus also check that the number of
3302 parts match. */
3303 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3304 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3305 return 0;
3306
3307 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3308 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3309 unsigned int len = vec_safe_length (v0);
3310
3311 if (len != vec_safe_length (v1))
3312 return 0;
3313
3314 for (unsigned int i = 0; i < len; i++)
3315 {
3316 constructor_elt *c0 = &(*v0)[i];
3317 constructor_elt *c1 = &(*v1)[i];
3318
3319 if (!operand_equal_p (c0->value, c1->value, flags)
3320 /* In GIMPLE the indexes can be either NULL or matching i.
3321 Double check this so we won't get false
3322 positives for GENERIC. */
3323 || (c0->index
3324 && (TREE_CODE (c0->index) != INTEGER_CST
3325 || !compare_tree_int (c0->index, i)))
3326 || (c1->index
3327 && (TREE_CODE (c1->index) != INTEGER_CST
3328 || !compare_tree_int (c1->index, i))))
3329 return 0;
3330 }
3331 return 1;
3332 }
3333 else if (TREE_CODE (arg0) == STATEMENT_LIST
3334 && (flags & OEP_LEXICOGRAPHIC))
3335 {
3336 /* Compare the STATEMENT_LISTs. */
3337 tree_stmt_iterator tsi1, tsi2;
3338 tree body1 = CONST_CAST_TREE (arg0);
3339 tree body2 = CONST_CAST_TREE (arg1);
3340 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3341 tsi_next (&tsi1), tsi_next (&tsi2))
3342 {
3343 /* The lists don't have the same number of statements. */
3344 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3345 return 0;
3346 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3347 return 1;
3348 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3349 OEP_LEXICOGRAPHIC))
3350 return 0;
3351 }
3352 }
3353 return 0;
3354
3355 case tcc_statement:
3356 switch (TREE_CODE (arg0))
3357 {
3358 case RETURN_EXPR:
3359 if (flags & OEP_LEXICOGRAPHIC)
3360 return OP_SAME_WITH_NULL (0);
3361 return 0;
3362 default:
3363 return 0;
3364 }
3365
3366 default:
3367 return 0;
3368 }
3369
3370 #undef OP_SAME
3371 #undef OP_SAME_WITH_NULL
3372 }
3373 \f
3374 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3375 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3376
3377 When in doubt, return 0. */
3378
3379 static int
3380 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3381 {
3382 int unsignedp1, unsignedpo;
3383 tree primarg0, primarg1, primother;
3384 unsigned int correct_width;
3385
3386 if (operand_equal_p (arg0, arg1, 0))
3387 return 1;
3388
3389 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3390 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3391 return 0;
3392
3393 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3394 and see if the inner values are the same. This removes any
3395 signedness comparison, which doesn't matter here. */
3396 primarg0 = arg0, primarg1 = arg1;
3397 STRIP_NOPS (primarg0);
3398 STRIP_NOPS (primarg1);
3399 if (operand_equal_p (primarg0, primarg1, 0))
3400 return 1;
3401
3402 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3403 actual comparison operand, ARG0.
3404
3405 First throw away any conversions to wider types
3406 already present in the operands. */
3407
3408 primarg1 = get_narrower (arg1, &unsignedp1);
3409 primother = get_narrower (other, &unsignedpo);
3410
3411 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3412 if (unsignedp1 == unsignedpo
3413 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3414 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3415 {
3416 tree type = TREE_TYPE (arg0);
3417
3418 /* Make sure shorter operand is extended the right way
3419 to match the longer operand. */
3420 primarg1 = fold_convert (signed_or_unsigned_type_for
3421 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3422
3423 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3424 return 1;
3425 }
3426
3427 return 0;
3428 }
3429 \f
3430 /* See if ARG is an expression that is either a comparison or is performing
3431 arithmetic on comparisons. The comparisons must only be comparing
3432 two different values, which will be stored in *CVAL1 and *CVAL2; if
3433 they are nonzero it means that some operands have already been found.
3434 No variables may be used anywhere else in the expression except in the
3435 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3436 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3437
3438 If this is true, return 1. Otherwise, return zero. */
3439
3440 static int
3441 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3442 {
3443 enum tree_code code = TREE_CODE (arg);
3444 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3445
3446 /* We can handle some of the tcc_expression cases here. */
3447 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3448 tclass = tcc_unary;
3449 else if (tclass == tcc_expression
3450 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3451 || code == COMPOUND_EXPR))
3452 tclass = tcc_binary;
3453
3454 else if (tclass == tcc_expression && code == SAVE_EXPR
3455 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3456 {
3457 /* If we've already found a CVAL1 or CVAL2, this expression is
3458 two complex to handle. */
3459 if (*cval1 || *cval2)
3460 return 0;
3461
3462 tclass = tcc_unary;
3463 *save_p = 1;
3464 }
3465
3466 switch (tclass)
3467 {
3468 case tcc_unary:
3469 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3470
3471 case tcc_binary:
3472 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3473 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3474 cval1, cval2, save_p));
3475
3476 case tcc_constant:
3477 return 1;
3478
3479 case tcc_expression:
3480 if (code == COND_EXPR)
3481 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3482 cval1, cval2, save_p)
3483 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3484 cval1, cval2, save_p)
3485 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3486 cval1, cval2, save_p));
3487 return 0;
3488
3489 case tcc_comparison:
3490 /* First see if we can handle the first operand, then the second. For
3491 the second operand, we know *CVAL1 can't be zero. It must be that
3492 one side of the comparison is each of the values; test for the
3493 case where this isn't true by failing if the two operands
3494 are the same. */
3495
3496 if (operand_equal_p (TREE_OPERAND (arg, 0),
3497 TREE_OPERAND (arg, 1), 0))
3498 return 0;
3499
3500 if (*cval1 == 0)
3501 *cval1 = TREE_OPERAND (arg, 0);
3502 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3503 ;
3504 else if (*cval2 == 0)
3505 *cval2 = TREE_OPERAND (arg, 0);
3506 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3507 ;
3508 else
3509 return 0;
3510
3511 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3512 ;
3513 else if (*cval2 == 0)
3514 *cval2 = TREE_OPERAND (arg, 1);
3515 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3516 ;
3517 else
3518 return 0;
3519
3520 return 1;
3521
3522 default:
3523 return 0;
3524 }
3525 }
3526 \f
3527 /* ARG is a tree that is known to contain just arithmetic operations and
3528 comparisons. Evaluate the operations in the tree substituting NEW0 for
3529 any occurrence of OLD0 as an operand of a comparison and likewise for
3530 NEW1 and OLD1. */
3531
3532 static tree
3533 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3534 tree old1, tree new1)
3535 {
3536 tree type = TREE_TYPE (arg);
3537 enum tree_code code = TREE_CODE (arg);
3538 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3539
3540 /* We can handle some of the tcc_expression cases here. */
3541 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3542 tclass = tcc_unary;
3543 else if (tclass == tcc_expression
3544 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3545 tclass = tcc_binary;
3546
3547 switch (tclass)
3548 {
3549 case tcc_unary:
3550 return fold_build1_loc (loc, code, type,
3551 eval_subst (loc, TREE_OPERAND (arg, 0),
3552 old0, new0, old1, new1));
3553
3554 case tcc_binary:
3555 return fold_build2_loc (loc, code, type,
3556 eval_subst (loc, TREE_OPERAND (arg, 0),
3557 old0, new0, old1, new1),
3558 eval_subst (loc, TREE_OPERAND (arg, 1),
3559 old0, new0, old1, new1));
3560
3561 case tcc_expression:
3562 switch (code)
3563 {
3564 case SAVE_EXPR:
3565 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3566 old1, new1);
3567
3568 case COMPOUND_EXPR:
3569 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3570 old1, new1);
3571
3572 case COND_EXPR:
3573 return fold_build3_loc (loc, code, type,
3574 eval_subst (loc, TREE_OPERAND (arg, 0),
3575 old0, new0, old1, new1),
3576 eval_subst (loc, TREE_OPERAND (arg, 1),
3577 old0, new0, old1, new1),
3578 eval_subst (loc, TREE_OPERAND (arg, 2),
3579 old0, new0, old1, new1));
3580 default:
3581 break;
3582 }
3583 /* Fall through - ??? */
3584
3585 case tcc_comparison:
3586 {
3587 tree arg0 = TREE_OPERAND (arg, 0);
3588 tree arg1 = TREE_OPERAND (arg, 1);
3589
3590 /* We need to check both for exact equality and tree equality. The
3591 former will be true if the operand has a side-effect. In that
3592 case, we know the operand occurred exactly once. */
3593
3594 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3595 arg0 = new0;
3596 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3597 arg0 = new1;
3598
3599 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3600 arg1 = new0;
3601 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3602 arg1 = new1;
3603
3604 return fold_build2_loc (loc, code, type, arg0, arg1);
3605 }
3606
3607 default:
3608 return arg;
3609 }
3610 }
3611 \f
3612 /* Return a tree for the case when the result of an expression is RESULT
3613 converted to TYPE and OMITTED was previously an operand of the expression
3614 but is now not needed (e.g., we folded OMITTED * 0).
3615
3616 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3617 the conversion of RESULT to TYPE. */
3618
3619 tree
3620 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3621 {
3622 tree t = fold_convert_loc (loc, type, result);
3623
3624 /* If the resulting operand is an empty statement, just return the omitted
3625 statement casted to void. */
3626 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3627 return build1_loc (loc, NOP_EXPR, void_type_node,
3628 fold_ignored_result (omitted));
3629
3630 if (TREE_SIDE_EFFECTS (omitted))
3631 return build2_loc (loc, COMPOUND_EXPR, type,
3632 fold_ignored_result (omitted), t);
3633
3634 return non_lvalue_loc (loc, t);
3635 }
3636
3637 /* Return a tree for the case when the result of an expression is RESULT
3638 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3639 of the expression but are now not needed.
3640
3641 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3642 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3643 evaluated before OMITTED2. Otherwise, if neither has side effects,
3644 just do the conversion of RESULT to TYPE. */
3645
3646 tree
3647 omit_two_operands_loc (location_t loc, tree type, tree result,
3648 tree omitted1, tree omitted2)
3649 {
3650 tree t = fold_convert_loc (loc, type, result);
3651
3652 if (TREE_SIDE_EFFECTS (omitted2))
3653 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3654 if (TREE_SIDE_EFFECTS (omitted1))
3655 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3656
3657 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3658 }
3659
3660 \f
3661 /* Return a simplified tree node for the truth-negation of ARG. This
3662 never alters ARG itself. We assume that ARG is an operation that
3663 returns a truth value (0 or 1).
3664
3665 FIXME: one would think we would fold the result, but it causes
3666 problems with the dominator optimizer. */
3667
3668 static tree
3669 fold_truth_not_expr (location_t loc, tree arg)
3670 {
3671 tree type = TREE_TYPE (arg);
3672 enum tree_code code = TREE_CODE (arg);
3673 location_t loc1, loc2;
3674
3675 /* If this is a comparison, we can simply invert it, except for
3676 floating-point non-equality comparisons, in which case we just
3677 enclose a TRUTH_NOT_EXPR around what we have. */
3678
3679 if (TREE_CODE_CLASS (code) == tcc_comparison)
3680 {
3681 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3682 if (FLOAT_TYPE_P (op_type)
3683 && flag_trapping_math
3684 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3685 && code != NE_EXPR && code != EQ_EXPR)
3686 return NULL_TREE;
3687
3688 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3689 if (code == ERROR_MARK)
3690 return NULL_TREE;
3691
3692 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3693 TREE_OPERAND (arg, 1));
3694 if (TREE_NO_WARNING (arg))
3695 TREE_NO_WARNING (ret) = 1;
3696 return ret;
3697 }
3698
3699 switch (code)
3700 {
3701 case INTEGER_CST:
3702 return constant_boolean_node (integer_zerop (arg), type);
3703
3704 case TRUTH_AND_EXPR:
3705 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3706 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3707 return build2_loc (loc, TRUTH_OR_EXPR, type,
3708 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3709 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3710
3711 case TRUTH_OR_EXPR:
3712 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3713 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3714 return build2_loc (loc, TRUTH_AND_EXPR, type,
3715 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3716 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3717
3718 case TRUTH_XOR_EXPR:
3719 /* Here we can invert either operand. We invert the first operand
3720 unless the second operand is a TRUTH_NOT_EXPR in which case our
3721 result is the XOR of the first operand with the inside of the
3722 negation of the second operand. */
3723
3724 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3725 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3726 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3727 else
3728 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3729 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3730 TREE_OPERAND (arg, 1));
3731
3732 case TRUTH_ANDIF_EXPR:
3733 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3734 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3735 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3736 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3737 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3738
3739 case TRUTH_ORIF_EXPR:
3740 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3741 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3742 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3743 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3744 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3745
3746 case TRUTH_NOT_EXPR:
3747 return TREE_OPERAND (arg, 0);
3748
3749 case COND_EXPR:
3750 {
3751 tree arg1 = TREE_OPERAND (arg, 1);
3752 tree arg2 = TREE_OPERAND (arg, 2);
3753
3754 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3755 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3756
3757 /* A COND_EXPR may have a throw as one operand, which
3758 then has void type. Just leave void operands
3759 as they are. */
3760 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3761 VOID_TYPE_P (TREE_TYPE (arg1))
3762 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3763 VOID_TYPE_P (TREE_TYPE (arg2))
3764 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3765 }
3766
3767 case COMPOUND_EXPR:
3768 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3769 return build2_loc (loc, COMPOUND_EXPR, type,
3770 TREE_OPERAND (arg, 0),
3771 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3772
3773 case NON_LVALUE_EXPR:
3774 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3775 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3776
3777 CASE_CONVERT:
3778 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3779 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3780
3781 /* fall through */
3782
3783 case FLOAT_EXPR:
3784 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3785 return build1_loc (loc, TREE_CODE (arg), type,
3786 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3787
3788 case BIT_AND_EXPR:
3789 if (!integer_onep (TREE_OPERAND (arg, 1)))
3790 return NULL_TREE;
3791 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3792
3793 case SAVE_EXPR:
3794 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3795
3796 case CLEANUP_POINT_EXPR:
3797 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3798 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3799 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3800
3801 default:
3802 return NULL_TREE;
3803 }
3804 }
3805
3806 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3807 assume that ARG is an operation that returns a truth value (0 or 1
3808 for scalars, 0 or -1 for vectors). Return the folded expression if
3809 folding is successful. Otherwise, return NULL_TREE. */
3810
3811 static tree
3812 fold_invert_truthvalue (location_t loc, tree arg)
3813 {
3814 tree type = TREE_TYPE (arg);
3815 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3816 ? BIT_NOT_EXPR
3817 : TRUTH_NOT_EXPR,
3818 type, arg);
3819 }
3820
3821 /* Return a simplified tree node for the truth-negation of ARG. This
3822 never alters ARG itself. We assume that ARG is an operation that
3823 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3824
3825 tree
3826 invert_truthvalue_loc (location_t loc, tree arg)
3827 {
3828 if (TREE_CODE (arg) == ERROR_MARK)
3829 return arg;
3830
3831 tree type = TREE_TYPE (arg);
3832 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3833 ? BIT_NOT_EXPR
3834 : TRUTH_NOT_EXPR,
3835 type, arg);
3836 }
3837
3838 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3839 with code CODE. This optimization is unsafe. */
3840 static tree
3841 distribute_real_division (location_t loc, enum tree_code code, tree type,
3842 tree arg0, tree arg1)
3843 {
3844 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3845 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3846
3847 /* (A / C) +- (B / C) -> (A +- B) / C. */
3848 if (mul0 == mul1
3849 && operand_equal_p (TREE_OPERAND (arg0, 1),
3850 TREE_OPERAND (arg1, 1), 0))
3851 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3852 fold_build2_loc (loc, code, type,
3853 TREE_OPERAND (arg0, 0),
3854 TREE_OPERAND (arg1, 0)),
3855 TREE_OPERAND (arg0, 1));
3856
3857 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3858 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3859 TREE_OPERAND (arg1, 0), 0)
3860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3861 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3862 {
3863 REAL_VALUE_TYPE r0, r1;
3864 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3865 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3866 if (!mul0)
3867 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3868 if (!mul1)
3869 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3870 real_arithmetic (&r0, code, &r0, &r1);
3871 return fold_build2_loc (loc, MULT_EXPR, type,
3872 TREE_OPERAND (arg0, 0),
3873 build_real (type, r0));
3874 }
3875
3876 return NULL_TREE;
3877 }
3878 \f
3879 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3880 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3881 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3882 is the original memory reference used to preserve the alias set of
3883 the access. */
3884
3885 static tree
3886 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3887 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3888 int unsignedp, int reversep)
3889 {
3890 tree result, bftype;
3891
3892 /* Attempt not to lose the access path if possible. */
3893 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3894 {
3895 tree ninner = TREE_OPERAND (orig_inner, 0);
3896 machine_mode nmode;
3897 HOST_WIDE_INT nbitsize, nbitpos;
3898 tree noffset;
3899 int nunsignedp, nreversep, nvolatilep = 0;
3900 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3901 &noffset, &nmode, &nunsignedp,
3902 &nreversep, &nvolatilep);
3903 if (base == inner
3904 && noffset == NULL_TREE
3905 && nbitsize >= bitsize
3906 && nbitpos <= bitpos
3907 && bitpos + bitsize <= nbitpos + nbitsize
3908 && !reversep
3909 && !nreversep
3910 && !nvolatilep)
3911 {
3912 inner = ninner;
3913 bitpos -= nbitpos;
3914 }
3915 }
3916
3917 alias_set_type iset = get_alias_set (orig_inner);
3918 if (iset == 0 && get_alias_set (inner) != iset)
3919 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3920 build_fold_addr_expr (inner),
3921 build_int_cst (ptr_type_node, 0));
3922
3923 if (bitpos == 0 && !reversep)
3924 {
3925 tree size = TYPE_SIZE (TREE_TYPE (inner));
3926 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3927 || POINTER_TYPE_P (TREE_TYPE (inner)))
3928 && tree_fits_shwi_p (size)
3929 && tree_to_shwi (size) == bitsize)
3930 return fold_convert_loc (loc, type, inner);
3931 }
3932
3933 bftype = type;
3934 if (TYPE_PRECISION (bftype) != bitsize
3935 || TYPE_UNSIGNED (bftype) == !unsignedp)
3936 bftype = build_nonstandard_integer_type (bitsize, 0);
3937
3938 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3939 size_int (bitsize), bitsize_int (bitpos));
3940 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3941
3942 if (bftype != type)
3943 result = fold_convert_loc (loc, type, result);
3944
3945 return result;
3946 }
3947
3948 /* Optimize a bit-field compare.
3949
3950 There are two cases: First is a compare against a constant and the
3951 second is a comparison of two items where the fields are at the same
3952 bit position relative to the start of a chunk (byte, halfword, word)
3953 large enough to contain it. In these cases we can avoid the shift
3954 implicit in bitfield extractions.
3955
3956 For constants, we emit a compare of the shifted constant with the
3957 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3958 compared. For two fields at the same position, we do the ANDs with the
3959 similar mask and compare the result of the ANDs.
3960
3961 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3962 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3963 are the left and right operands of the comparison, respectively.
3964
3965 If the optimization described above can be done, we return the resulting
3966 tree. Otherwise we return zero. */
3967
3968 static tree
3969 optimize_bit_field_compare (location_t loc, enum tree_code code,
3970 tree compare_type, tree lhs, tree rhs)
3971 {
3972 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3973 tree type = TREE_TYPE (lhs);
3974 tree unsigned_type;
3975 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3976 machine_mode lmode, rmode, nmode;
3977 int lunsignedp, runsignedp;
3978 int lreversep, rreversep;
3979 int lvolatilep = 0, rvolatilep = 0;
3980 tree linner, rinner = NULL_TREE;
3981 tree mask;
3982 tree offset;
3983
3984 /* Get all the information about the extractions being done. If the bit size
3985 if the same as the size of the underlying object, we aren't doing an
3986 extraction at all and so can do nothing. We also don't want to
3987 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3988 then will no longer be able to replace it. */
3989 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3990 &lunsignedp, &lreversep, &lvolatilep);
3991 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3992 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3993 return 0;
3994
3995 if (const_p)
3996 rreversep = lreversep;
3997 else
3998 {
3999 /* If this is not a constant, we can only do something if bit positions,
4000 sizes, signedness and storage order are the same. */
4001 rinner
4002 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4003 &runsignedp, &rreversep, &rvolatilep);
4004
4005 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4006 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
4007 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
4008 return 0;
4009 }
4010
4011 /* Honor the C++ memory model and mimic what RTL expansion does. */
4012 unsigned HOST_WIDE_INT bitstart = 0;
4013 unsigned HOST_WIDE_INT bitend = 0;
4014 if (TREE_CODE (lhs) == COMPONENT_REF)
4015 {
4016 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4017 if (offset != NULL_TREE)
4018 return 0;
4019 }
4020
4021 /* See if we can find a mode to refer to this field. We should be able to,
4022 but fail if we can't. */
4023 nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4024 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4025 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4026 TYPE_ALIGN (TREE_TYPE (rinner))),
4027 word_mode, false);
4028 if (nmode == VOIDmode)
4029 return 0;
4030
4031 /* Set signed and unsigned types of the precision of this mode for the
4032 shifts below. */
4033 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4034
4035 /* Compute the bit position and size for the new reference and our offset
4036 within it. If the new reference is the same size as the original, we
4037 won't optimize anything, so return zero. */
4038 nbitsize = GET_MODE_BITSIZE (nmode);
4039 nbitpos = lbitpos & ~ (nbitsize - 1);
4040 lbitpos -= nbitpos;
4041 if (nbitsize == lbitsize)
4042 return 0;
4043
4044 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4045 lbitpos = nbitsize - lbitsize - lbitpos;
4046
4047 /* Make the mask to be used against the extracted field. */
4048 mask = build_int_cst_type (unsigned_type, -1);
4049 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4050 mask = const_binop (RSHIFT_EXPR, mask,
4051 size_int (nbitsize - lbitsize - lbitpos));
4052
4053 if (! const_p)
4054 /* If not comparing with constant, just rework the comparison
4055 and return. */
4056 return fold_build2_loc (loc, code, compare_type,
4057 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4058 make_bit_field_ref (loc, linner, lhs,
4059 unsigned_type,
4060 nbitsize, nbitpos,
4061 1, lreversep),
4062 mask),
4063 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4064 make_bit_field_ref (loc, rinner, rhs,
4065 unsigned_type,
4066 nbitsize, nbitpos,
4067 1, rreversep),
4068 mask));
4069
4070 /* Otherwise, we are handling the constant case. See if the constant is too
4071 big for the field. Warn and return a tree for 0 (false) if so. We do
4072 this not only for its own sake, but to avoid having to test for this
4073 error case below. If we didn't, we might generate wrong code.
4074
4075 For unsigned fields, the constant shifted right by the field length should
4076 be all zero. For signed fields, the high-order bits should agree with
4077 the sign bit. */
4078
4079 if (lunsignedp)
4080 {
4081 if (wi::lrshift (rhs, lbitsize) != 0)
4082 {
4083 warning (0, "comparison is always %d due to width of bit-field",
4084 code == NE_EXPR);
4085 return constant_boolean_node (code == NE_EXPR, compare_type);
4086 }
4087 }
4088 else
4089 {
4090 wide_int tem = wi::arshift (rhs, lbitsize - 1);
4091 if (tem != 0 && tem != -1)
4092 {
4093 warning (0, "comparison is always %d due to width of bit-field",
4094 code == NE_EXPR);
4095 return constant_boolean_node (code == NE_EXPR, compare_type);
4096 }
4097 }
4098
4099 /* Single-bit compares should always be against zero. */
4100 if (lbitsize == 1 && ! integer_zerop (rhs))
4101 {
4102 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4103 rhs = build_int_cst (type, 0);
4104 }
4105
4106 /* Make a new bitfield reference, shift the constant over the
4107 appropriate number of bits and mask it with the computed mask
4108 (in case this was a signed field). If we changed it, make a new one. */
4109 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4110 nbitsize, nbitpos, 1, lreversep);
4111
4112 rhs = const_binop (BIT_AND_EXPR,
4113 const_binop (LSHIFT_EXPR,
4114 fold_convert_loc (loc, unsigned_type, rhs),
4115 size_int (lbitpos)),
4116 mask);
4117
4118 lhs = build2_loc (loc, code, compare_type,
4119 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4120 return lhs;
4121 }
4122 \f
4123 /* Subroutine for fold_truth_andor_1: decode a field reference.
4124
4125 If EXP is a comparison reference, we return the innermost reference.
4126
4127 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4128 set to the starting bit number.
4129
4130 If the innermost field can be completely contained in a mode-sized
4131 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4132
4133 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4134 otherwise it is not changed.
4135
4136 *PUNSIGNEDP is set to the signedness of the field.
4137
4138 *PREVERSEP is set to the storage order of the field.
4139
4140 *PMASK is set to the mask used. This is either contained in a
4141 BIT_AND_EXPR or derived from the width of the field.
4142
4143 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4144
4145 Return 0 if this is not a component reference or is one that we can't
4146 do anything with. */
4147
4148 static tree
4149 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4150 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4151 int *punsignedp, int *preversep, int *pvolatilep,
4152 tree *pmask, tree *pand_mask)
4153 {
4154 tree exp = *exp_;
4155 tree outer_type = 0;
4156 tree and_mask = 0;
4157 tree mask, inner, offset;
4158 tree unsigned_type;
4159 unsigned int precision;
4160
4161 /* All the optimizations using this function assume integer fields.
4162 There are problems with FP fields since the type_for_size call
4163 below can fail for, e.g., XFmode. */
4164 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4165 return 0;
4166
4167 /* We are interested in the bare arrangement of bits, so strip everything
4168 that doesn't affect the machine mode. However, record the type of the
4169 outermost expression if it may matter below. */
4170 if (CONVERT_EXPR_P (exp)
4171 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4172 outer_type = TREE_TYPE (exp);
4173 STRIP_NOPS (exp);
4174
4175 if (TREE_CODE (exp) == BIT_AND_EXPR)
4176 {
4177 and_mask = TREE_OPERAND (exp, 1);
4178 exp = TREE_OPERAND (exp, 0);
4179 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4180 if (TREE_CODE (and_mask) != INTEGER_CST)
4181 return 0;
4182 }
4183
4184 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4185 punsignedp, preversep, pvolatilep);
4186 if ((inner == exp && and_mask == 0)
4187 || *pbitsize < 0 || offset != 0
4188 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4189 /* Reject out-of-bound accesses (PR79731). */
4190 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4191 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4192 *pbitpos + *pbitsize) < 0))
4193 return 0;
4194
4195 *exp_ = exp;
4196
4197 /* If the number of bits in the reference is the same as the bitsize of
4198 the outer type, then the outer type gives the signedness. Otherwise
4199 (in case of a small bitfield) the signedness is unchanged. */
4200 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4201 *punsignedp = TYPE_UNSIGNED (outer_type);
4202
4203 /* Compute the mask to access the bitfield. */
4204 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4205 precision = TYPE_PRECISION (unsigned_type);
4206
4207 mask = build_int_cst_type (unsigned_type, -1);
4208
4209 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4210 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4211
4212 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4213 if (and_mask != 0)
4214 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4215 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4216
4217 *pmask = mask;
4218 *pand_mask = and_mask;
4219 return inner;
4220 }
4221
4222 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4223 bit positions and MASK is SIGNED. */
4224
4225 static int
4226 all_ones_mask_p (const_tree mask, unsigned int size)
4227 {
4228 tree type = TREE_TYPE (mask);
4229 unsigned int precision = TYPE_PRECISION (type);
4230
4231 /* If this function returns true when the type of the mask is
4232 UNSIGNED, then there will be errors. In particular see
4233 gcc.c-torture/execute/990326-1.c. There does not appear to be
4234 any documentation paper trail as to why this is so. But the pre
4235 wide-int worked with that restriction and it has been preserved
4236 here. */
4237 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4238 return false;
4239
4240 return wi::mask (size, false, precision) == mask;
4241 }
4242
4243 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4244 represents the sign bit of EXP's type. If EXP represents a sign
4245 or zero extension, also test VAL against the unextended type.
4246 The return value is the (sub)expression whose sign bit is VAL,
4247 or NULL_TREE otherwise. */
4248
4249 tree
4250 sign_bit_p (tree exp, const_tree val)
4251 {
4252 int width;
4253 tree t;
4254
4255 /* Tree EXP must have an integral type. */
4256 t = TREE_TYPE (exp);
4257 if (! INTEGRAL_TYPE_P (t))
4258 return NULL_TREE;
4259
4260 /* Tree VAL must be an integer constant. */
4261 if (TREE_CODE (val) != INTEGER_CST
4262 || TREE_OVERFLOW (val))
4263 return NULL_TREE;
4264
4265 width = TYPE_PRECISION (t);
4266 if (wi::only_sign_bit_p (val, width))
4267 return exp;
4268
4269 /* Handle extension from a narrower type. */
4270 if (TREE_CODE (exp) == NOP_EXPR
4271 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4272 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4273
4274 return NULL_TREE;
4275 }
4276
4277 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4278 to be evaluated unconditionally. */
4279
4280 static int
4281 simple_operand_p (const_tree exp)
4282 {
4283 /* Strip any conversions that don't change the machine mode. */
4284 STRIP_NOPS (exp);
4285
4286 return (CONSTANT_CLASS_P (exp)
4287 || TREE_CODE (exp) == SSA_NAME
4288 || (DECL_P (exp)
4289 && ! TREE_ADDRESSABLE (exp)
4290 && ! TREE_THIS_VOLATILE (exp)
4291 && ! DECL_NONLOCAL (exp)
4292 /* Don't regard global variables as simple. They may be
4293 allocated in ways unknown to the compiler (shared memory,
4294 #pragma weak, etc). */
4295 && ! TREE_PUBLIC (exp)
4296 && ! DECL_EXTERNAL (exp)
4297 /* Weakrefs are not safe to be read, since they can be NULL.
4298 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4299 have DECL_WEAK flag set. */
4300 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4301 /* Loading a static variable is unduly expensive, but global
4302 registers aren't expensive. */
4303 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4304 }
4305
4306 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4307 to be evaluated unconditionally.
4308 I addition to simple_operand_p, we assume that comparisons, conversions,
4309 and logic-not operations are simple, if their operands are simple, too. */
4310
4311 static bool
4312 simple_operand_p_2 (tree exp)
4313 {
4314 enum tree_code code;
4315
4316 if (TREE_SIDE_EFFECTS (exp)
4317 || tree_could_trap_p (exp))
4318 return false;
4319
4320 while (CONVERT_EXPR_P (exp))
4321 exp = TREE_OPERAND (exp, 0);
4322
4323 code = TREE_CODE (exp);
4324
4325 if (TREE_CODE_CLASS (code) == tcc_comparison)
4326 return (simple_operand_p (TREE_OPERAND (exp, 0))
4327 && simple_operand_p (TREE_OPERAND (exp, 1)));
4328
4329 if (code == TRUTH_NOT_EXPR)
4330 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4331
4332 return simple_operand_p (exp);
4333 }
4334
4335 \f
4336 /* The following functions are subroutines to fold_range_test and allow it to
4337 try to change a logical combination of comparisons into a range test.
4338
4339 For example, both
4340 X == 2 || X == 3 || X == 4 || X == 5
4341 and
4342 X >= 2 && X <= 5
4343 are converted to
4344 (unsigned) (X - 2) <= 3
4345
4346 We describe each set of comparisons as being either inside or outside
4347 a range, using a variable named like IN_P, and then describe the
4348 range with a lower and upper bound. If one of the bounds is omitted,
4349 it represents either the highest or lowest value of the type.
4350
4351 In the comments below, we represent a range by two numbers in brackets
4352 preceded by a "+" to designate being inside that range, or a "-" to
4353 designate being outside that range, so the condition can be inverted by
4354 flipping the prefix. An omitted bound is represented by a "-". For
4355 example, "- [-, 10]" means being outside the range starting at the lowest
4356 possible value and ending at 10, in other words, being greater than 10.
4357 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4358 always false.
4359
4360 We set up things so that the missing bounds are handled in a consistent
4361 manner so neither a missing bound nor "true" and "false" need to be
4362 handled using a special case. */
4363
4364 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4365 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4366 and UPPER1_P are nonzero if the respective argument is an upper bound
4367 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4368 must be specified for a comparison. ARG1 will be converted to ARG0's
4369 type if both are specified. */
4370
4371 static tree
4372 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4373 tree arg1, int upper1_p)
4374 {
4375 tree tem;
4376 int result;
4377 int sgn0, sgn1;
4378
4379 /* If neither arg represents infinity, do the normal operation.
4380 Else, if not a comparison, return infinity. Else handle the special
4381 comparison rules. Note that most of the cases below won't occur, but
4382 are handled for consistency. */
4383
4384 if (arg0 != 0 && arg1 != 0)
4385 {
4386 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4387 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4388 STRIP_NOPS (tem);
4389 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4390 }
4391
4392 if (TREE_CODE_CLASS (code) != tcc_comparison)
4393 return 0;
4394
4395 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4396 for neither. In real maths, we cannot assume open ended ranges are
4397 the same. But, this is computer arithmetic, where numbers are finite.
4398 We can therefore make the transformation of any unbounded range with
4399 the value Z, Z being greater than any representable number. This permits
4400 us to treat unbounded ranges as equal. */
4401 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4402 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4403 switch (code)
4404 {
4405 case EQ_EXPR:
4406 result = sgn0 == sgn1;
4407 break;
4408 case NE_EXPR:
4409 result = sgn0 != sgn1;
4410 break;
4411 case LT_EXPR:
4412 result = sgn0 < sgn1;
4413 break;
4414 case LE_EXPR:
4415 result = sgn0 <= sgn1;
4416 break;
4417 case GT_EXPR:
4418 result = sgn0 > sgn1;
4419 break;
4420 case GE_EXPR:
4421 result = sgn0 >= sgn1;
4422 break;
4423 default:
4424 gcc_unreachable ();
4425 }
4426
4427 return constant_boolean_node (result, type);
4428 }
4429 \f
4430 /* Helper routine for make_range. Perform one step for it, return
4431 new expression if the loop should continue or NULL_TREE if it should
4432 stop. */
4433
4434 tree
4435 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4436 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4437 bool *strict_overflow_p)
4438 {
4439 tree arg0_type = TREE_TYPE (arg0);
4440 tree n_low, n_high, low = *p_low, high = *p_high;
4441 int in_p = *p_in_p, n_in_p;
4442
4443 switch (code)
4444 {
4445 case TRUTH_NOT_EXPR:
4446 /* We can only do something if the range is testing for zero. */
4447 if (low == NULL_TREE || high == NULL_TREE
4448 || ! integer_zerop (low) || ! integer_zerop (high))
4449 return NULL_TREE;
4450 *p_in_p = ! in_p;
4451 return arg0;
4452
4453 case EQ_EXPR: case NE_EXPR:
4454 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4455 /* We can only do something if the range is testing for zero
4456 and if the second operand is an integer constant. Note that
4457 saying something is "in" the range we make is done by
4458 complementing IN_P since it will set in the initial case of
4459 being not equal to zero; "out" is leaving it alone. */
4460 if (low == NULL_TREE || high == NULL_TREE
4461 || ! integer_zerop (low) || ! integer_zerop (high)
4462 || TREE_CODE (arg1) != INTEGER_CST)
4463 return NULL_TREE;
4464
4465 switch (code)
4466 {
4467 case NE_EXPR: /* - [c, c] */
4468 low = high = arg1;
4469 break;
4470 case EQ_EXPR: /* + [c, c] */
4471 in_p = ! in_p, low = high = arg1;
4472 break;
4473 case GT_EXPR: /* - [-, c] */
4474 low = 0, high = arg1;
4475 break;
4476 case GE_EXPR: /* + [c, -] */
4477 in_p = ! in_p, low = arg1, high = 0;
4478 break;
4479 case LT_EXPR: /* - [c, -] */
4480 low = arg1, high = 0;
4481 break;
4482 case LE_EXPR: /* + [-, c] */
4483 in_p = ! in_p, low = 0, high = arg1;
4484 break;
4485 default:
4486 gcc_unreachable ();
4487 }
4488
4489 /* If this is an unsigned comparison, we also know that EXP is
4490 greater than or equal to zero. We base the range tests we make
4491 on that fact, so we record it here so we can parse existing
4492 range tests. We test arg0_type since often the return type
4493 of, e.g. EQ_EXPR, is boolean. */
4494 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4495 {
4496 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4497 in_p, low, high, 1,
4498 build_int_cst (arg0_type, 0),
4499 NULL_TREE))
4500 return NULL_TREE;
4501
4502 in_p = n_in_p, low = n_low, high = n_high;
4503
4504 /* If the high bound is missing, but we have a nonzero low
4505 bound, reverse the range so it goes from zero to the low bound
4506 minus 1. */
4507 if (high == 0 && low && ! integer_zerop (low))
4508 {
4509 in_p = ! in_p;
4510 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4511 build_int_cst (TREE_TYPE (low), 1), 0);
4512 low = build_int_cst (arg0_type, 0);
4513 }
4514 }
4515
4516 *p_low = low;
4517 *p_high = high;
4518 *p_in_p = in_p;
4519 return arg0;
4520
4521 case NEGATE_EXPR:
4522 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4523 low and high are non-NULL, then normalize will DTRT. */
4524 if (!TYPE_UNSIGNED (arg0_type)
4525 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4526 {
4527 if (low == NULL_TREE)
4528 low = TYPE_MIN_VALUE (arg0_type);
4529 if (high == NULL_TREE)
4530 high = TYPE_MAX_VALUE (arg0_type);
4531 }
4532
4533 /* (-x) IN [a,b] -> x in [-b, -a] */
4534 n_low = range_binop (MINUS_EXPR, exp_type,
4535 build_int_cst (exp_type, 0),
4536 0, high, 1);
4537 n_high = range_binop (MINUS_EXPR, exp_type,
4538 build_int_cst (exp_type, 0),
4539 0, low, 0);
4540 if (n_high != 0 && TREE_OVERFLOW (n_high))
4541 return NULL_TREE;
4542 goto normalize;
4543
4544 case BIT_NOT_EXPR:
4545 /* ~ X -> -X - 1 */
4546 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4547 build_int_cst (exp_type, 1));
4548
4549 case PLUS_EXPR:
4550 case MINUS_EXPR:
4551 if (TREE_CODE (arg1) != INTEGER_CST)
4552 return NULL_TREE;
4553
4554 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4555 move a constant to the other side. */
4556 if (!TYPE_UNSIGNED (arg0_type)
4557 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4558 return NULL_TREE;
4559
4560 /* If EXP is signed, any overflow in the computation is undefined,
4561 so we don't worry about it so long as our computations on
4562 the bounds don't overflow. For unsigned, overflow is defined
4563 and this is exactly the right thing. */
4564 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4565 arg0_type, low, 0, arg1, 0);
4566 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4567 arg0_type, high, 1, arg1, 0);
4568 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4569 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4570 return NULL_TREE;
4571
4572 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4573 *strict_overflow_p = true;
4574
4575 normalize:
4576 /* Check for an unsigned range which has wrapped around the maximum
4577 value thus making n_high < n_low, and normalize it. */
4578 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4579 {
4580 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4581 build_int_cst (TREE_TYPE (n_high), 1), 0);
4582 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4583 build_int_cst (TREE_TYPE (n_low), 1), 0);
4584
4585 /* If the range is of the form +/- [ x+1, x ], we won't
4586 be able to normalize it. But then, it represents the
4587 whole range or the empty set, so make it
4588 +/- [ -, - ]. */
4589 if (tree_int_cst_equal (n_low, low)
4590 && tree_int_cst_equal (n_high, high))
4591 low = high = 0;
4592 else
4593 in_p = ! in_p;
4594 }
4595 else
4596 low = n_low, high = n_high;
4597
4598 *p_low = low;
4599 *p_high = high;
4600 *p_in_p = in_p;
4601 return arg0;
4602
4603 CASE_CONVERT:
4604 case NON_LVALUE_EXPR:
4605 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4606 return NULL_TREE;
4607
4608 if (! INTEGRAL_TYPE_P (arg0_type)
4609 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4610 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4611 return NULL_TREE;
4612
4613 n_low = low, n_high = high;
4614
4615 if (n_low != 0)
4616 n_low = fold_convert_loc (loc, arg0_type, n_low);
4617
4618 if (n_high != 0)
4619 n_high = fold_convert_loc (loc, arg0_type, n_high);
4620
4621 /* If we're converting arg0 from an unsigned type, to exp,
4622 a signed type, we will be doing the comparison as unsigned.
4623 The tests above have already verified that LOW and HIGH
4624 are both positive.
4625
4626 So we have to ensure that we will handle large unsigned
4627 values the same way that the current signed bounds treat
4628 negative values. */
4629
4630 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4631 {
4632 tree high_positive;
4633 tree equiv_type;
4634 /* For fixed-point modes, we need to pass the saturating flag
4635 as the 2nd parameter. */
4636 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4637 equiv_type
4638 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4639 TYPE_SATURATING (arg0_type));
4640 else
4641 equiv_type
4642 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4643
4644 /* A range without an upper bound is, naturally, unbounded.
4645 Since convert would have cropped a very large value, use
4646 the max value for the destination type. */
4647 high_positive
4648 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4649 : TYPE_MAX_VALUE (arg0_type);
4650
4651 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4652 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4653 fold_convert_loc (loc, arg0_type,
4654 high_positive),
4655 build_int_cst (arg0_type, 1));
4656
4657 /* If the low bound is specified, "and" the range with the
4658 range for which the original unsigned value will be
4659 positive. */
4660 if (low != 0)
4661 {
4662 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4663 1, fold_convert_loc (loc, arg0_type,
4664 integer_zero_node),
4665 high_positive))
4666 return NULL_TREE;
4667
4668 in_p = (n_in_p == in_p);
4669 }
4670 else
4671 {
4672 /* Otherwise, "or" the range with the range of the input
4673 that will be interpreted as negative. */
4674 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4675 1, fold_convert_loc (loc, arg0_type,
4676 integer_zero_node),
4677 high_positive))
4678 return NULL_TREE;
4679
4680 in_p = (in_p != n_in_p);
4681 }
4682 }
4683
4684 *p_low = n_low;
4685 *p_high = n_high;
4686 *p_in_p = in_p;
4687 return arg0;
4688
4689 default:
4690 return NULL_TREE;
4691 }
4692 }
4693
4694 /* Given EXP, a logical expression, set the range it is testing into
4695 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4696 actually being tested. *PLOW and *PHIGH will be made of the same
4697 type as the returned expression. If EXP is not a comparison, we
4698 will most likely not be returning a useful value and range. Set
4699 *STRICT_OVERFLOW_P to true if the return value is only valid
4700 because signed overflow is undefined; otherwise, do not change
4701 *STRICT_OVERFLOW_P. */
4702
4703 tree
4704 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4705 bool *strict_overflow_p)
4706 {
4707 enum tree_code code;
4708 tree arg0, arg1 = NULL_TREE;
4709 tree exp_type, nexp;
4710 int in_p;
4711 tree low, high;
4712 location_t loc = EXPR_LOCATION (exp);
4713
4714 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4715 and see if we can refine the range. Some of the cases below may not
4716 happen, but it doesn't seem worth worrying about this. We "continue"
4717 the outer loop when we've changed something; otherwise we "break"
4718 the switch, which will "break" the while. */
4719
4720 in_p = 0;
4721 low = high = build_int_cst (TREE_TYPE (exp), 0);
4722
4723 while (1)
4724 {
4725 code = TREE_CODE (exp);
4726 exp_type = TREE_TYPE (exp);
4727 arg0 = NULL_TREE;
4728
4729 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4730 {
4731 if (TREE_OPERAND_LENGTH (exp) > 0)
4732 arg0 = TREE_OPERAND (exp, 0);
4733 if (TREE_CODE_CLASS (code) == tcc_binary
4734 || TREE_CODE_CLASS (code) == tcc_comparison
4735 || (TREE_CODE_CLASS (code) == tcc_expression
4736 && TREE_OPERAND_LENGTH (exp) > 1))
4737 arg1 = TREE_OPERAND (exp, 1);
4738 }
4739 if (arg0 == NULL_TREE)
4740 break;
4741
4742 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4743 &high, &in_p, strict_overflow_p);
4744 if (nexp == NULL_TREE)
4745 break;
4746 exp = nexp;
4747 }
4748
4749 /* If EXP is a constant, we can evaluate whether this is true or false. */
4750 if (TREE_CODE (exp) == INTEGER_CST)
4751 {
4752 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4753 exp, 0, low, 0))
4754 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4755 exp, 1, high, 1)));
4756 low = high = 0;
4757 exp = 0;
4758 }
4759
4760 *pin_p = in_p, *plow = low, *phigh = high;
4761 return exp;
4762 }
4763
4764 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4765 a bitwise check i.e. when
4766 LOW == 0xXX...X00...0
4767 HIGH == 0xXX...X11...1
4768 Return corresponding mask in MASK and stem in VALUE. */
4769
4770 static bool
4771 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4772 tree *value)
4773 {
4774 if (TREE_CODE (low) != INTEGER_CST
4775 || TREE_CODE (high) != INTEGER_CST)
4776 return false;
4777
4778 unsigned prec = TYPE_PRECISION (type);
4779 wide_int lo = wi::to_wide (low, prec);
4780 wide_int hi = wi::to_wide (high, prec);
4781
4782 wide_int end_mask = lo ^ hi;
4783 if ((end_mask & (end_mask + 1)) != 0
4784 || (lo & end_mask) != 0)
4785 return false;
4786
4787 wide_int stem_mask = ~end_mask;
4788 wide_int stem = lo & stem_mask;
4789 if (stem != (hi & stem_mask))
4790 return false;
4791
4792 *mask = wide_int_to_tree (type, stem_mask);
4793 *value = wide_int_to_tree (type, stem);
4794
4795 return true;
4796 }
4797 \f
4798 /* Helper routine for build_range_check and match.pd. Return the type to
4799 perform the check or NULL if it shouldn't be optimized. */
4800
4801 tree
4802 range_check_type (tree etype)
4803 {
4804 /* First make sure that arithmetics in this type is valid, then make sure
4805 that it wraps around. */
4806 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4807 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4808 TYPE_UNSIGNED (etype));
4809
4810 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4811 {
4812 tree utype, minv, maxv;
4813
4814 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4815 for the type in question, as we rely on this here. */
4816 utype = unsigned_type_for (etype);
4817 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4818 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4819 build_int_cst (TREE_TYPE (maxv), 1), 1);
4820 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4821
4822 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4823 minv, 1, maxv, 1)))
4824 etype = utype;
4825 else
4826 return NULL_TREE;
4827 }
4828 return etype;
4829 }
4830
4831 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4832 type, TYPE, return an expression to test if EXP is in (or out of, depending
4833 on IN_P) the range. Return 0 if the test couldn't be created. */
4834
4835 tree
4836 build_range_check (location_t loc, tree type, tree exp, int in_p,
4837 tree low, tree high)
4838 {
4839 tree etype = TREE_TYPE (exp), mask, value;
4840
4841 /* Disable this optimization for function pointer expressions
4842 on targets that require function pointer canonicalization. */
4843 if (targetm.have_canonicalize_funcptr_for_compare ()
4844 && TREE_CODE (etype) == POINTER_TYPE
4845 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4846 return NULL_TREE;
4847
4848 if (! in_p)
4849 {
4850 value = build_range_check (loc, type, exp, 1, low, high);
4851 if (value != 0)
4852 return invert_truthvalue_loc (loc, value);
4853
4854 return 0;
4855 }
4856
4857 if (low == 0 && high == 0)
4858 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4859
4860 if (low == 0)
4861 return fold_build2_loc (loc, LE_EXPR, type, exp,
4862 fold_convert_loc (loc, etype, high));
4863
4864 if (high == 0)
4865 return fold_build2_loc (loc, GE_EXPR, type, exp,
4866 fold_convert_loc (loc, etype, low));
4867
4868 if (operand_equal_p (low, high, 0))
4869 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4870 fold_convert_loc (loc, etype, low));
4871
4872 if (TREE_CODE (exp) == BIT_AND_EXPR
4873 && maskable_range_p (low, high, etype, &mask, &value))
4874 return fold_build2_loc (loc, EQ_EXPR, type,
4875 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4876 exp, mask),
4877 value);
4878
4879 if (integer_zerop (low))
4880 {
4881 if (! TYPE_UNSIGNED (etype))
4882 {
4883 etype = unsigned_type_for (etype);
4884 high = fold_convert_loc (loc, etype, high);
4885 exp = fold_convert_loc (loc, etype, exp);
4886 }
4887 return build_range_check (loc, type, exp, 1, 0, high);
4888 }
4889
4890 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4891 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4892 {
4893 int prec = TYPE_PRECISION (etype);
4894
4895 if (wi::mask (prec - 1, false, prec) == high)
4896 {
4897 if (TYPE_UNSIGNED (etype))
4898 {
4899 tree signed_etype = signed_type_for (etype);
4900 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4901 etype
4902 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4903 else
4904 etype = signed_etype;
4905 exp = fold_convert_loc (loc, etype, exp);
4906 }
4907 return fold_build2_loc (loc, GT_EXPR, type, exp,
4908 build_int_cst (etype, 0));
4909 }
4910 }
4911
4912 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4913 This requires wrap-around arithmetics for the type of the expression. */
4914 etype = range_check_type (etype);
4915 if (etype == NULL_TREE)
4916 return NULL_TREE;
4917
4918 high = fold_convert_loc (loc, etype, high);
4919 low = fold_convert_loc (loc, etype, low);
4920 exp = fold_convert_loc (loc, etype, exp);
4921
4922 value = const_binop (MINUS_EXPR, high, low);
4923
4924
4925 if (POINTER_TYPE_P (etype))
4926 {
4927 if (value != 0 && !TREE_OVERFLOW (value))
4928 {
4929 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4930 return build_range_check (loc, type,
4931 fold_build_pointer_plus_loc (loc, exp, low),
4932 1, build_int_cst (etype, 0), value);
4933 }
4934 return 0;
4935 }
4936
4937 if (value != 0 && !TREE_OVERFLOW (value))
4938 return build_range_check (loc, type,
4939 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4940 1, build_int_cst (etype, 0), value);
4941
4942 return 0;
4943 }
4944 \f
4945 /* Return the predecessor of VAL in its type, handling the infinite case. */
4946
4947 static tree
4948 range_predecessor (tree val)
4949 {
4950 tree type = TREE_TYPE (val);
4951
4952 if (INTEGRAL_TYPE_P (type)
4953 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4954 return 0;
4955 else
4956 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4957 build_int_cst (TREE_TYPE (val), 1), 0);
4958 }
4959
4960 /* Return the successor of VAL in its type, handling the infinite case. */
4961
4962 static tree
4963 range_successor (tree val)
4964 {
4965 tree type = TREE_TYPE (val);
4966
4967 if (INTEGRAL_TYPE_P (type)
4968 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4969 return 0;
4970 else
4971 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4972 build_int_cst (TREE_TYPE (val), 1), 0);
4973 }
4974
4975 /* Given two ranges, see if we can merge them into one. Return 1 if we
4976 can, 0 if we can't. Set the output range into the specified parameters. */
4977
4978 bool
4979 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4980 tree high0, int in1_p, tree low1, tree high1)
4981 {
4982 int no_overlap;
4983 int subset;
4984 int temp;
4985 tree tem;
4986 int in_p;
4987 tree low, high;
4988 int lowequal = ((low0 == 0 && low1 == 0)
4989 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4990 low0, 0, low1, 0)));
4991 int highequal = ((high0 == 0 && high1 == 0)
4992 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4993 high0, 1, high1, 1)));
4994
4995 /* Make range 0 be the range that starts first, or ends last if they
4996 start at the same value. Swap them if it isn't. */
4997 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4998 low0, 0, low1, 0))
4999 || (lowequal
5000 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5001 high1, 1, high0, 1))))
5002 {
5003 temp = in0_p, in0_p = in1_p, in1_p = temp;
5004 tem = low0, low0 = low1, low1 = tem;
5005 tem = high0, high0 = high1, high1 = tem;
5006 }
5007
5008 /* Now flag two cases, whether the ranges are disjoint or whether the
5009 second range is totally subsumed in the first. Note that the tests
5010 below are simplified by the ones above. */
5011 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5012 high0, 1, low1, 0));
5013 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5014 high1, 1, high0, 1));
5015
5016 /* We now have four cases, depending on whether we are including or
5017 excluding the two ranges. */
5018 if (in0_p && in1_p)
5019 {
5020 /* If they don't overlap, the result is false. If the second range
5021 is a subset it is the result. Otherwise, the range is from the start
5022 of the second to the end of the first. */
5023 if (no_overlap)
5024 in_p = 0, low = high = 0;
5025 else if (subset)
5026 in_p = 1, low = low1, high = high1;
5027 else
5028 in_p = 1, low = low1, high = high0;
5029 }
5030
5031 else if (in0_p && ! in1_p)
5032 {
5033 /* If they don't overlap, the result is the first range. If they are
5034 equal, the result is false. If the second range is a subset of the
5035 first, and the ranges begin at the same place, we go from just after
5036 the end of the second range to the end of the first. If the second
5037 range is not a subset of the first, or if it is a subset and both
5038 ranges end at the same place, the range starts at the start of the
5039 first range and ends just before the second range.
5040 Otherwise, we can't describe this as a single range. */
5041 if (no_overlap)
5042 in_p = 1, low = low0, high = high0;
5043 else if (lowequal && highequal)
5044 in_p = 0, low = high = 0;
5045 else if (subset && lowequal)
5046 {
5047 low = range_successor (high1);
5048 high = high0;
5049 in_p = 1;
5050 if (low == 0)
5051 {
5052 /* We are in the weird situation where high0 > high1 but
5053 high1 has no successor. Punt. */
5054 return 0;
5055 }
5056 }
5057 else if (! subset || highequal)
5058 {
5059 low = low0;
5060 high = range_predecessor (low1);
5061 in_p = 1;
5062 if (high == 0)
5063 {
5064 /* low0 < low1 but low1 has no predecessor. Punt. */
5065 return 0;
5066 }
5067 }
5068 else
5069 return 0;
5070 }
5071
5072 else if (! in0_p && in1_p)
5073 {
5074 /* If they don't overlap, the result is the second range. If the second
5075 is a subset of the first, the result is false. Otherwise,
5076 the range starts just after the first range and ends at the
5077 end of the second. */
5078 if (no_overlap)
5079 in_p = 1, low = low1, high = high1;
5080 else if (subset || highequal)
5081 in_p = 0, low = high = 0;
5082 else
5083 {
5084 low = range_successor (high0);
5085 high = high1;
5086 in_p = 1;
5087 if (low == 0)
5088 {
5089 /* high1 > high0 but high0 has no successor. Punt. */
5090 return 0;
5091 }
5092 }
5093 }
5094
5095 else
5096 {
5097 /* The case where we are excluding both ranges. Here the complex case
5098 is if they don't overlap. In that case, the only time we have a
5099 range is if they are adjacent. If the second is a subset of the
5100 first, the result is the first. Otherwise, the range to exclude
5101 starts at the beginning of the first range and ends at the end of the
5102 second. */
5103 if (no_overlap)
5104 {
5105 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5106 range_successor (high0),
5107 1, low1, 0)))
5108 in_p = 0, low = low0, high = high1;
5109 else
5110 {
5111 /* Canonicalize - [min, x] into - [-, x]. */
5112 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5113 switch (TREE_CODE (TREE_TYPE (low0)))
5114 {
5115 case ENUMERAL_TYPE:
5116 if (TYPE_PRECISION (TREE_TYPE (low0))
5117 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5118 break;
5119 /* FALLTHROUGH */
5120 case INTEGER_TYPE:
5121 if (tree_int_cst_equal (low0,
5122 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5123 low0 = 0;
5124 break;
5125 case POINTER_TYPE:
5126 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5127 && integer_zerop (low0))
5128 low0 = 0;
5129 break;
5130 default:
5131 break;
5132 }
5133
5134 /* Canonicalize - [x, max] into - [x, -]. */
5135 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5136 switch (TREE_CODE (TREE_TYPE (high1)))
5137 {
5138 case ENUMERAL_TYPE:
5139 if (TYPE_PRECISION (TREE_TYPE (high1))
5140 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5141 break;
5142 /* FALLTHROUGH */
5143 case INTEGER_TYPE:
5144 if (tree_int_cst_equal (high1,
5145 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5146 high1 = 0;
5147 break;
5148 case POINTER_TYPE:
5149 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5150 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5151 high1, 1,
5152 build_int_cst (TREE_TYPE (high1), 1),
5153 1)))
5154 high1 = 0;
5155 break;
5156 default:
5157 break;
5158 }
5159
5160 /* The ranges might be also adjacent between the maximum and
5161 minimum values of the given type. For
5162 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5163 return + [x + 1, y - 1]. */
5164 if (low0 == 0 && high1 == 0)
5165 {
5166 low = range_successor (high0);
5167 high = range_predecessor (low1);
5168 if (low == 0 || high == 0)
5169 return 0;
5170
5171 in_p = 1;
5172 }
5173 else
5174 return 0;
5175 }
5176 }
5177 else if (subset)
5178 in_p = 0, low = low0, high = high0;
5179 else
5180 in_p = 0, low = low0, high = high1;
5181 }
5182
5183 *pin_p = in_p, *plow = low, *phigh = high;
5184 return 1;
5185 }
5186 \f
5187
5188 /* Subroutine of fold, looking inside expressions of the form
5189 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5190 of the COND_EXPR. This function is being used also to optimize
5191 A op B ? C : A, by reversing the comparison first.
5192
5193 Return a folded expression whose code is not a COND_EXPR
5194 anymore, or NULL_TREE if no folding opportunity is found. */
5195
5196 static tree
5197 fold_cond_expr_with_comparison (location_t loc, tree type,
5198 tree arg0, tree arg1, tree arg2)
5199 {
5200 enum tree_code comp_code = TREE_CODE (arg0);
5201 tree arg00 = TREE_OPERAND (arg0, 0);
5202 tree arg01 = TREE_OPERAND (arg0, 1);
5203 tree arg1_type = TREE_TYPE (arg1);
5204 tree tem;
5205
5206 STRIP_NOPS (arg1);
5207 STRIP_NOPS (arg2);
5208
5209 /* If we have A op 0 ? A : -A, consider applying the following
5210 transformations:
5211
5212 A == 0? A : -A same as -A
5213 A != 0? A : -A same as A
5214 A >= 0? A : -A same as abs (A)
5215 A > 0? A : -A same as abs (A)
5216 A <= 0? A : -A same as -abs (A)
5217 A < 0? A : -A same as -abs (A)
5218
5219 None of these transformations work for modes with signed
5220 zeros. If A is +/-0, the first two transformations will
5221 change the sign of the result (from +0 to -0, or vice
5222 versa). The last four will fix the sign of the result,
5223 even though the original expressions could be positive or
5224 negative, depending on the sign of A.
5225
5226 Note that all these transformations are correct if A is
5227 NaN, since the two alternatives (A and -A) are also NaNs. */
5228 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5229 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5230 ? real_zerop (arg01)
5231 : integer_zerop (arg01))
5232 && ((TREE_CODE (arg2) == NEGATE_EXPR
5233 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5234 /* In the case that A is of the form X-Y, '-A' (arg2) may
5235 have already been folded to Y-X, check for that. */
5236 || (TREE_CODE (arg1) == MINUS_EXPR
5237 && TREE_CODE (arg2) == MINUS_EXPR
5238 && operand_equal_p (TREE_OPERAND (arg1, 0),
5239 TREE_OPERAND (arg2, 1), 0)
5240 && operand_equal_p (TREE_OPERAND (arg1, 1),
5241 TREE_OPERAND (arg2, 0), 0))))
5242 switch (comp_code)
5243 {
5244 case EQ_EXPR:
5245 case UNEQ_EXPR:
5246 tem = fold_convert_loc (loc, arg1_type, arg1);
5247 return fold_convert_loc (loc, type, negate_expr (tem));
5248 case NE_EXPR:
5249 case LTGT_EXPR:
5250 return fold_convert_loc (loc, type, arg1);
5251 case UNGE_EXPR:
5252 case UNGT_EXPR:
5253 if (flag_trapping_math)
5254 break;
5255 /* Fall through. */
5256 case GE_EXPR:
5257 case GT_EXPR:
5258 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5259 break;
5260 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5261 return fold_convert_loc (loc, type, tem);
5262 case UNLE_EXPR:
5263 case UNLT_EXPR:
5264 if (flag_trapping_math)
5265 break;
5266 /* FALLTHRU */
5267 case LE_EXPR:
5268 case LT_EXPR:
5269 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5270 break;
5271 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5272 return negate_expr (fold_convert_loc (loc, type, tem));
5273 default:
5274 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5275 break;
5276 }
5277
5278 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5279 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5280 both transformations are correct when A is NaN: A != 0
5281 is then true, and A == 0 is false. */
5282
5283 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5284 && integer_zerop (arg01) && integer_zerop (arg2))
5285 {
5286 if (comp_code == NE_EXPR)
5287 return fold_convert_loc (loc, type, arg1);
5288 else if (comp_code == EQ_EXPR)
5289 return build_zero_cst (type);
5290 }
5291
5292 /* Try some transformations of A op B ? A : B.
5293
5294 A == B? A : B same as B
5295 A != B? A : B same as A
5296 A >= B? A : B same as max (A, B)
5297 A > B? A : B same as max (B, A)
5298 A <= B? A : B same as min (A, B)
5299 A < B? A : B same as min (B, A)
5300
5301 As above, these transformations don't work in the presence
5302 of signed zeros. For example, if A and B are zeros of
5303 opposite sign, the first two transformations will change
5304 the sign of the result. In the last four, the original
5305 expressions give different results for (A=+0, B=-0) and
5306 (A=-0, B=+0), but the transformed expressions do not.
5307
5308 The first two transformations are correct if either A or B
5309 is a NaN. In the first transformation, the condition will
5310 be false, and B will indeed be chosen. In the case of the
5311 second transformation, the condition A != B will be true,
5312 and A will be chosen.
5313
5314 The conversions to max() and min() are not correct if B is
5315 a number and A is not. The conditions in the original
5316 expressions will be false, so all four give B. The min()
5317 and max() versions would give a NaN instead. */
5318 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5319 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5320 /* Avoid these transformations if the COND_EXPR may be used
5321 as an lvalue in the C++ front-end. PR c++/19199. */
5322 && (in_gimple_form
5323 || VECTOR_TYPE_P (type)
5324 || (! lang_GNU_CXX ()
5325 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5326 || ! maybe_lvalue_p (arg1)
5327 || ! maybe_lvalue_p (arg2)))
5328 {
5329 tree comp_op0 = arg00;
5330 tree comp_op1 = arg01;
5331 tree comp_type = TREE_TYPE (comp_op0);
5332
5333 switch (comp_code)
5334 {
5335 case EQ_EXPR:
5336 return fold_convert_loc (loc, type, arg2);
5337 case NE_EXPR:
5338 return fold_convert_loc (loc, type, arg1);
5339 case LE_EXPR:
5340 case LT_EXPR:
5341 case UNLE_EXPR:
5342 case UNLT_EXPR:
5343 /* In C++ a ?: expression can be an lvalue, so put the
5344 operand which will be used if they are equal first
5345 so that we can convert this back to the
5346 corresponding COND_EXPR. */
5347 if (!HONOR_NANS (arg1))
5348 {
5349 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5350 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5351 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5352 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5353 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5354 comp_op1, comp_op0);
5355 return fold_convert_loc (loc, type, tem);
5356 }
5357 break;
5358 case GE_EXPR:
5359 case GT_EXPR:
5360 case UNGE_EXPR:
5361 case UNGT_EXPR:
5362 if (!HONOR_NANS (arg1))
5363 {
5364 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5365 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5366 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5367 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5368 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5369 comp_op1, comp_op0);
5370 return fold_convert_loc (loc, type, tem);
5371 }
5372 break;
5373 case UNEQ_EXPR:
5374 if (!HONOR_NANS (arg1))
5375 return fold_convert_loc (loc, type, arg2);
5376 break;
5377 case LTGT_EXPR:
5378 if (!HONOR_NANS (arg1))
5379 return fold_convert_loc (loc, type, arg1);
5380 break;
5381 default:
5382 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5383 break;
5384 }
5385 }
5386
5387 return NULL_TREE;
5388 }
5389
5390
5391 \f
5392 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5393 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5394 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5395 false) >= 2)
5396 #endif
5397
5398 /* EXP is some logical combination of boolean tests. See if we can
5399 merge it into some range test. Return the new tree if so. */
5400
5401 static tree
5402 fold_range_test (location_t loc, enum tree_code code, tree type,
5403 tree op0, tree op1)
5404 {
5405 int or_op = (code == TRUTH_ORIF_EXPR
5406 || code == TRUTH_OR_EXPR);
5407 int in0_p, in1_p, in_p;
5408 tree low0, low1, low, high0, high1, high;
5409 bool strict_overflow_p = false;
5410 tree tem, lhs, rhs;
5411 const char * const warnmsg = G_("assuming signed overflow does not occur "
5412 "when simplifying range test");
5413
5414 if (!INTEGRAL_TYPE_P (type))
5415 return 0;
5416
5417 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5418 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5419
5420 /* If this is an OR operation, invert both sides; we will invert
5421 again at the end. */
5422 if (or_op)
5423 in0_p = ! in0_p, in1_p = ! in1_p;
5424
5425 /* If both expressions are the same, if we can merge the ranges, and we
5426 can build the range test, return it or it inverted. If one of the
5427 ranges is always true or always false, consider it to be the same
5428 expression as the other. */
5429 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5430 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5431 in1_p, low1, high1)
5432 && 0 != (tem = (build_range_check (loc, type,
5433 lhs != 0 ? lhs
5434 : rhs != 0 ? rhs : integer_zero_node,
5435 in_p, low, high))))
5436 {
5437 if (strict_overflow_p)
5438 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5439 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5440 }
5441
5442 /* On machines where the branch cost is expensive, if this is a
5443 short-circuited branch and the underlying object on both sides
5444 is the same, make a non-short-circuit operation. */
5445 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5446 && lhs != 0 && rhs != 0
5447 && (code == TRUTH_ANDIF_EXPR
5448 || code == TRUTH_ORIF_EXPR)
5449 && operand_equal_p (lhs, rhs, 0))
5450 {
5451 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5452 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5453 which cases we can't do this. */
5454 if (simple_operand_p (lhs))
5455 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5456 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5457 type, op0, op1);
5458
5459 else if (!lang_hooks.decls.global_bindings_p ()
5460 && !CONTAINS_PLACEHOLDER_P (lhs))
5461 {
5462 tree common = save_expr (lhs);
5463
5464 if (0 != (lhs = build_range_check (loc, type, common,
5465 or_op ? ! in0_p : in0_p,
5466 low0, high0))
5467 && (0 != (rhs = build_range_check (loc, type, common,
5468 or_op ? ! in1_p : in1_p,
5469 low1, high1))))
5470 {
5471 if (strict_overflow_p)
5472 fold_overflow_warning (warnmsg,
5473 WARN_STRICT_OVERFLOW_COMPARISON);
5474 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5475 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5476 type, lhs, rhs);
5477 }
5478 }
5479 }
5480
5481 return 0;
5482 }
5483 \f
5484 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5485 bit value. Arrange things so the extra bits will be set to zero if and
5486 only if C is signed-extended to its full width. If MASK is nonzero,
5487 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5488
5489 static tree
5490 unextend (tree c, int p, int unsignedp, tree mask)
5491 {
5492 tree type = TREE_TYPE (c);
5493 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5494 tree temp;
5495
5496 if (p == modesize || unsignedp)
5497 return c;
5498
5499 /* We work by getting just the sign bit into the low-order bit, then
5500 into the high-order bit, then sign-extend. We then XOR that value
5501 with C. */
5502 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5503
5504 /* We must use a signed type in order to get an arithmetic right shift.
5505 However, we must also avoid introducing accidental overflows, so that
5506 a subsequent call to integer_zerop will work. Hence we must
5507 do the type conversion here. At this point, the constant is either
5508 zero or one, and the conversion to a signed type can never overflow.
5509 We could get an overflow if this conversion is done anywhere else. */
5510 if (TYPE_UNSIGNED (type))
5511 temp = fold_convert (signed_type_for (type), temp);
5512
5513 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5514 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5515 if (mask != 0)
5516 temp = const_binop (BIT_AND_EXPR, temp,
5517 fold_convert (TREE_TYPE (c), mask));
5518 /* If necessary, convert the type back to match the type of C. */
5519 if (TYPE_UNSIGNED (type))
5520 temp = fold_convert (type, temp);
5521
5522 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5523 }
5524 \f
5525 /* For an expression that has the form
5526 (A && B) || ~B
5527 or
5528 (A || B) && ~B,
5529 we can drop one of the inner expressions and simplify to
5530 A || ~B
5531 or
5532 A && ~B
5533 LOC is the location of the resulting expression. OP is the inner
5534 logical operation; the left-hand side in the examples above, while CMPOP
5535 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5536 removing a condition that guards another, as in
5537 (A != NULL && A->...) || A == NULL
5538 which we must not transform. If RHS_ONLY is true, only eliminate the
5539 right-most operand of the inner logical operation. */
5540
5541 static tree
5542 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5543 bool rhs_only)
5544 {
5545 tree type = TREE_TYPE (cmpop);
5546 enum tree_code code = TREE_CODE (cmpop);
5547 enum tree_code truthop_code = TREE_CODE (op);
5548 tree lhs = TREE_OPERAND (op, 0);
5549 tree rhs = TREE_OPERAND (op, 1);
5550 tree orig_lhs = lhs, orig_rhs = rhs;
5551 enum tree_code rhs_code = TREE_CODE (rhs);
5552 enum tree_code lhs_code = TREE_CODE (lhs);
5553 enum tree_code inv_code;
5554
5555 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5556 return NULL_TREE;
5557
5558 if (TREE_CODE_CLASS (code) != tcc_comparison)
5559 return NULL_TREE;
5560
5561 if (rhs_code == truthop_code)
5562 {
5563 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5564 if (newrhs != NULL_TREE)
5565 {
5566 rhs = newrhs;
5567 rhs_code = TREE_CODE (rhs);
5568 }
5569 }
5570 if (lhs_code == truthop_code && !rhs_only)
5571 {
5572 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5573 if (newlhs != NULL_TREE)
5574 {
5575 lhs = newlhs;
5576 lhs_code = TREE_CODE (lhs);
5577 }
5578 }
5579
5580 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5581 if (inv_code == rhs_code
5582 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5583 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5584 return lhs;
5585 if (!rhs_only && inv_code == lhs_code
5586 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5587 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5588 return rhs;
5589 if (rhs != orig_rhs || lhs != orig_lhs)
5590 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5591 lhs, rhs);
5592 return NULL_TREE;
5593 }
5594
5595 /* Find ways of folding logical expressions of LHS and RHS:
5596 Try to merge two comparisons to the same innermost item.
5597 Look for range tests like "ch >= '0' && ch <= '9'".
5598 Look for combinations of simple terms on machines with expensive branches
5599 and evaluate the RHS unconditionally.
5600
5601 For example, if we have p->a == 2 && p->b == 4 and we can make an
5602 object large enough to span both A and B, we can do this with a comparison
5603 against the object ANDed with the a mask.
5604
5605 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5606 operations to do this with one comparison.
5607
5608 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5609 function and the one above.
5610
5611 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5612 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5613
5614 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5615 two operands.
5616
5617 We return the simplified tree or 0 if no optimization is possible. */
5618
5619 static tree
5620 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5621 tree lhs, tree rhs)
5622 {
5623 /* If this is the "or" of two comparisons, we can do something if
5624 the comparisons are NE_EXPR. If this is the "and", we can do something
5625 if the comparisons are EQ_EXPR. I.e.,
5626 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5627
5628 WANTED_CODE is this operation code. For single bit fields, we can
5629 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5630 comparison for one-bit fields. */
5631
5632 enum tree_code wanted_code;
5633 enum tree_code lcode, rcode;
5634 tree ll_arg, lr_arg, rl_arg, rr_arg;
5635 tree ll_inner, lr_inner, rl_inner, rr_inner;
5636 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5637 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5638 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5639 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5640 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5641 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5642 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5643 machine_mode lnmode, rnmode;
5644 tree ll_mask, lr_mask, rl_mask, rr_mask;
5645 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5646 tree l_const, r_const;
5647 tree lntype, rntype, result;
5648 HOST_WIDE_INT first_bit, end_bit;
5649 int volatilep;
5650
5651 /* Start by getting the comparison codes. Fail if anything is volatile.
5652 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5653 it were surrounded with a NE_EXPR. */
5654
5655 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5656 return 0;
5657
5658 lcode = TREE_CODE (lhs);
5659 rcode = TREE_CODE (rhs);
5660
5661 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5662 {
5663 lhs = build2 (NE_EXPR, truth_type, lhs,
5664 build_int_cst (TREE_TYPE (lhs), 0));
5665 lcode = NE_EXPR;
5666 }
5667
5668 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5669 {
5670 rhs = build2 (NE_EXPR, truth_type, rhs,
5671 build_int_cst (TREE_TYPE (rhs), 0));
5672 rcode = NE_EXPR;
5673 }
5674
5675 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5676 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5677 return 0;
5678
5679 ll_arg = TREE_OPERAND (lhs, 0);
5680 lr_arg = TREE_OPERAND (lhs, 1);
5681 rl_arg = TREE_OPERAND (rhs, 0);
5682 rr_arg = TREE_OPERAND (rhs, 1);
5683
5684 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5685 if (simple_operand_p (ll_arg)
5686 && simple_operand_p (lr_arg))
5687 {
5688 if (operand_equal_p (ll_arg, rl_arg, 0)
5689 && operand_equal_p (lr_arg, rr_arg, 0))
5690 {
5691 result = combine_comparisons (loc, code, lcode, rcode,
5692 truth_type, ll_arg, lr_arg);
5693 if (result)
5694 return result;
5695 }
5696 else if (operand_equal_p (ll_arg, rr_arg, 0)
5697 && operand_equal_p (lr_arg, rl_arg, 0))
5698 {
5699 result = combine_comparisons (loc, code, lcode,
5700 swap_tree_comparison (rcode),
5701 truth_type, ll_arg, lr_arg);
5702 if (result)
5703 return result;
5704 }
5705 }
5706
5707 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5708 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5709
5710 /* If the RHS can be evaluated unconditionally and its operands are
5711 simple, it wins to evaluate the RHS unconditionally on machines
5712 with expensive branches. In this case, this isn't a comparison
5713 that can be merged. */
5714
5715 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5716 false) >= 2
5717 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5718 && simple_operand_p (rl_arg)
5719 && simple_operand_p (rr_arg))
5720 {
5721 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5722 if (code == TRUTH_OR_EXPR
5723 && lcode == NE_EXPR && integer_zerop (lr_arg)
5724 && rcode == NE_EXPR && integer_zerop (rr_arg)
5725 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5726 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5727 return build2_loc (loc, NE_EXPR, truth_type,
5728 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5729 ll_arg, rl_arg),
5730 build_int_cst (TREE_TYPE (ll_arg), 0));
5731
5732 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5733 if (code == TRUTH_AND_EXPR
5734 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5735 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5736 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5737 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5738 return build2_loc (loc, EQ_EXPR, truth_type,
5739 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5740 ll_arg, rl_arg),
5741 build_int_cst (TREE_TYPE (ll_arg), 0));
5742 }
5743
5744 /* See if the comparisons can be merged. Then get all the parameters for
5745 each side. */
5746
5747 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5748 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5749 return 0;
5750
5751 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5752 volatilep = 0;
5753 ll_inner = decode_field_reference (loc, &ll_arg,
5754 &ll_bitsize, &ll_bitpos, &ll_mode,
5755 &ll_unsignedp, &ll_reversep, &volatilep,
5756 &ll_mask, &ll_and_mask);
5757 lr_inner = decode_field_reference (loc, &lr_arg,
5758 &lr_bitsize, &lr_bitpos, &lr_mode,
5759 &lr_unsignedp, &lr_reversep, &volatilep,
5760 &lr_mask, &lr_and_mask);
5761 rl_inner = decode_field_reference (loc, &rl_arg,
5762 &rl_bitsize, &rl_bitpos, &rl_mode,
5763 &rl_unsignedp, &rl_reversep, &volatilep,
5764 &rl_mask, &rl_and_mask);
5765 rr_inner = decode_field_reference (loc, &rr_arg,
5766 &rr_bitsize, &rr_bitpos, &rr_mode,
5767 &rr_unsignedp, &rr_reversep, &volatilep,
5768 &rr_mask, &rr_and_mask);
5769
5770 /* It must be true that the inner operation on the lhs of each
5771 comparison must be the same if we are to be able to do anything.
5772 Then see if we have constants. If not, the same must be true for
5773 the rhs's. */
5774 if (volatilep
5775 || ll_reversep != rl_reversep
5776 || ll_inner == 0 || rl_inner == 0
5777 || ! operand_equal_p (ll_inner, rl_inner, 0))
5778 return 0;
5779
5780 if (TREE_CODE (lr_arg) == INTEGER_CST
5781 && TREE_CODE (rr_arg) == INTEGER_CST)
5782 {
5783 l_const = lr_arg, r_const = rr_arg;
5784 lr_reversep = ll_reversep;
5785 }
5786 else if (lr_reversep != rr_reversep
5787 || lr_inner == 0 || rr_inner == 0
5788 || ! operand_equal_p (lr_inner, rr_inner, 0))
5789 return 0;
5790 else
5791 l_const = r_const = 0;
5792
5793 /* If either comparison code is not correct for our logical operation,
5794 fail. However, we can convert a one-bit comparison against zero into
5795 the opposite comparison against that bit being set in the field. */
5796
5797 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5798 if (lcode != wanted_code)
5799 {
5800 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5801 {
5802 /* Make the left operand unsigned, since we are only interested
5803 in the value of one bit. Otherwise we are doing the wrong
5804 thing below. */
5805 ll_unsignedp = 1;
5806 l_const = ll_mask;
5807 }
5808 else
5809 return 0;
5810 }
5811
5812 /* This is analogous to the code for l_const above. */
5813 if (rcode != wanted_code)
5814 {
5815 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5816 {
5817 rl_unsignedp = 1;
5818 r_const = rl_mask;
5819 }
5820 else
5821 return 0;
5822 }
5823
5824 /* See if we can find a mode that contains both fields being compared on
5825 the left. If we can't, fail. Otherwise, update all constants and masks
5826 to be relative to a field of that size. */
5827 first_bit = MIN (ll_bitpos, rl_bitpos);
5828 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5829 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5830 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5831 volatilep);
5832 if (lnmode == VOIDmode)
5833 return 0;
5834
5835 lnbitsize = GET_MODE_BITSIZE (lnmode);
5836 lnbitpos = first_bit & ~ (lnbitsize - 1);
5837 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5838 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5839
5840 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5841 {
5842 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5843 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5844 }
5845
5846 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5847 size_int (xll_bitpos));
5848 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5849 size_int (xrl_bitpos));
5850
5851 if (l_const)
5852 {
5853 l_const = fold_convert_loc (loc, lntype, l_const);
5854 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5855 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5856 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5857 fold_build1_loc (loc, BIT_NOT_EXPR,
5858 lntype, ll_mask))))
5859 {
5860 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5861
5862 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5863 }
5864 }
5865 if (r_const)
5866 {
5867 r_const = fold_convert_loc (loc, lntype, r_const);
5868 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5869 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5870 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5871 fold_build1_loc (loc, BIT_NOT_EXPR,
5872 lntype, rl_mask))))
5873 {
5874 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5875
5876 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5877 }
5878 }
5879
5880 /* If the right sides are not constant, do the same for it. Also,
5881 disallow this optimization if a size or signedness mismatch occurs
5882 between the left and right sides. */
5883 if (l_const == 0)
5884 {
5885 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5886 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5887 /* Make sure the two fields on the right
5888 correspond to the left without being swapped. */
5889 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5890 return 0;
5891
5892 first_bit = MIN (lr_bitpos, rr_bitpos);
5893 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5894 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5895 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5896 volatilep);
5897 if (rnmode == VOIDmode)
5898 return 0;
5899
5900 rnbitsize = GET_MODE_BITSIZE (rnmode);
5901 rnbitpos = first_bit & ~ (rnbitsize - 1);
5902 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5903 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5904
5905 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5906 {
5907 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5908 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5909 }
5910
5911 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5912 rntype, lr_mask),
5913 size_int (xlr_bitpos));
5914 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5915 rntype, rr_mask),
5916 size_int (xrr_bitpos));
5917
5918 /* Make a mask that corresponds to both fields being compared.
5919 Do this for both items being compared. If the operands are the
5920 same size and the bits being compared are in the same position
5921 then we can do this by masking both and comparing the masked
5922 results. */
5923 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5924 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5925 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5926 {
5927 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5928 lntype, lnbitsize, lnbitpos,
5929 ll_unsignedp || rl_unsignedp, ll_reversep);
5930 if (! all_ones_mask_p (ll_mask, lnbitsize))
5931 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5932
5933 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5934 rntype, rnbitsize, rnbitpos,
5935 lr_unsignedp || rr_unsignedp, lr_reversep);
5936 if (! all_ones_mask_p (lr_mask, rnbitsize))
5937 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5938
5939 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5940 }
5941
5942 /* There is still another way we can do something: If both pairs of
5943 fields being compared are adjacent, we may be able to make a wider
5944 field containing them both.
5945
5946 Note that we still must mask the lhs/rhs expressions. Furthermore,
5947 the mask must be shifted to account for the shift done by
5948 make_bit_field_ref. */
5949 if ((ll_bitsize + ll_bitpos == rl_bitpos
5950 && lr_bitsize + lr_bitpos == rr_bitpos)
5951 || (ll_bitpos == rl_bitpos + rl_bitsize
5952 && lr_bitpos == rr_bitpos + rr_bitsize))
5953 {
5954 tree type;
5955
5956 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5957 ll_bitsize + rl_bitsize,
5958 MIN (ll_bitpos, rl_bitpos),
5959 ll_unsignedp, ll_reversep);
5960 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5961 lr_bitsize + rr_bitsize,
5962 MIN (lr_bitpos, rr_bitpos),
5963 lr_unsignedp, lr_reversep);
5964
5965 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5966 size_int (MIN (xll_bitpos, xrl_bitpos)));
5967 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5968 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5969
5970 /* Convert to the smaller type before masking out unwanted bits. */
5971 type = lntype;
5972 if (lntype != rntype)
5973 {
5974 if (lnbitsize > rnbitsize)
5975 {
5976 lhs = fold_convert_loc (loc, rntype, lhs);
5977 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5978 type = rntype;
5979 }
5980 else if (lnbitsize < rnbitsize)
5981 {
5982 rhs = fold_convert_loc (loc, lntype, rhs);
5983 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5984 type = lntype;
5985 }
5986 }
5987
5988 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5989 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5990
5991 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5992 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5993
5994 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5995 }
5996
5997 return 0;
5998 }
5999
6000 /* Handle the case of comparisons with constants. If there is something in
6001 common between the masks, those bits of the constants must be the same.
6002 If not, the condition is always false. Test for this to avoid generating
6003 incorrect code below. */
6004 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6005 if (! integer_zerop (result)
6006 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6007 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6008 {
6009 if (wanted_code == NE_EXPR)
6010 {
6011 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6012 return constant_boolean_node (true, truth_type);
6013 }
6014 else
6015 {
6016 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6017 return constant_boolean_node (false, truth_type);
6018 }
6019 }
6020
6021 /* Construct the expression we will return. First get the component
6022 reference we will make. Unless the mask is all ones the width of
6023 that field, perform the mask operation. Then compare with the
6024 merged constant. */
6025 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6026 lntype, lnbitsize, lnbitpos,
6027 ll_unsignedp || rl_unsignedp, ll_reversep);
6028
6029 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6030 if (! all_ones_mask_p (ll_mask, lnbitsize))
6031 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6032
6033 return build2_loc (loc, wanted_code, truth_type, result,
6034 const_binop (BIT_IOR_EXPR, l_const, r_const));
6035 }
6036 \f
6037 /* T is an integer expression that is being multiplied, divided, or taken a
6038 modulus (CODE says which and what kind of divide or modulus) by a
6039 constant C. See if we can eliminate that operation by folding it with
6040 other operations already in T. WIDE_TYPE, if non-null, is a type that
6041 should be used for the computation if wider than our type.
6042
6043 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6044 (X * 2) + (Y * 4). We must, however, be assured that either the original
6045 expression would not overflow or that overflow is undefined for the type
6046 in the language in question.
6047
6048 If we return a non-null expression, it is an equivalent form of the
6049 original computation, but need not be in the original type.
6050
6051 We set *STRICT_OVERFLOW_P to true if the return values depends on
6052 signed overflow being undefined. Otherwise we do not change
6053 *STRICT_OVERFLOW_P. */
6054
6055 static tree
6056 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6057 bool *strict_overflow_p)
6058 {
6059 /* To avoid exponential search depth, refuse to allow recursion past
6060 three levels. Beyond that (1) it's highly unlikely that we'll find
6061 something interesting and (2) we've probably processed it before
6062 when we built the inner expression. */
6063
6064 static int depth;
6065 tree ret;
6066
6067 if (depth > 3)
6068 return NULL;
6069
6070 depth++;
6071 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6072 depth--;
6073
6074 return ret;
6075 }
6076
6077 static tree
6078 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6079 bool *strict_overflow_p)
6080 {
6081 tree type = TREE_TYPE (t);
6082 enum tree_code tcode = TREE_CODE (t);
6083 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6084 > GET_MODE_SIZE (TYPE_MODE (type)))
6085 ? wide_type : type);
6086 tree t1, t2;
6087 int same_p = tcode == code;
6088 tree op0 = NULL_TREE, op1 = NULL_TREE;
6089 bool sub_strict_overflow_p;
6090
6091 /* Don't deal with constants of zero here; they confuse the code below. */
6092 if (integer_zerop (c))
6093 return NULL_TREE;
6094
6095 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6096 op0 = TREE_OPERAND (t, 0);
6097
6098 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6099 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6100
6101 /* Note that we need not handle conditional operations here since fold
6102 already handles those cases. So just do arithmetic here. */
6103 switch (tcode)
6104 {
6105 case INTEGER_CST:
6106 /* For a constant, we can always simplify if we are a multiply
6107 or (for divide and modulus) if it is a multiple of our constant. */
6108 if (code == MULT_EXPR
6109 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6110 {
6111 tree tem = const_binop (code, fold_convert (ctype, t),
6112 fold_convert (ctype, c));
6113 /* If the multiplication overflowed, we lost information on it.
6114 See PR68142 and PR69845. */
6115 if (TREE_OVERFLOW (tem))
6116 return NULL_TREE;
6117 return tem;
6118 }
6119 break;
6120
6121 CASE_CONVERT: case NON_LVALUE_EXPR:
6122 /* If op0 is an expression ... */
6123 if ((COMPARISON_CLASS_P (op0)
6124 || UNARY_CLASS_P (op0)
6125 || BINARY_CLASS_P (op0)
6126 || VL_EXP_CLASS_P (op0)
6127 || EXPRESSION_CLASS_P (op0))
6128 /* ... and has wrapping overflow, and its type is smaller
6129 than ctype, then we cannot pass through as widening. */
6130 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6131 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6132 && (TYPE_PRECISION (ctype)
6133 > TYPE_PRECISION (TREE_TYPE (op0))))
6134 /* ... or this is a truncation (t is narrower than op0),
6135 then we cannot pass through this narrowing. */
6136 || (TYPE_PRECISION (type)
6137 < TYPE_PRECISION (TREE_TYPE (op0)))
6138 /* ... or signedness changes for division or modulus,
6139 then we cannot pass through this conversion. */
6140 || (code != MULT_EXPR
6141 && (TYPE_UNSIGNED (ctype)
6142 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6143 /* ... or has undefined overflow while the converted to
6144 type has not, we cannot do the operation in the inner type
6145 as that would introduce undefined overflow. */
6146 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6147 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6148 && !TYPE_OVERFLOW_UNDEFINED (type))))
6149 break;
6150
6151 /* Pass the constant down and see if we can make a simplification. If
6152 we can, replace this expression with the inner simplification for
6153 possible later conversion to our or some other type. */
6154 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6155 && TREE_CODE (t2) == INTEGER_CST
6156 && !TREE_OVERFLOW (t2)
6157 && (0 != (t1 = extract_muldiv (op0, t2, code,
6158 code == MULT_EXPR
6159 ? ctype : NULL_TREE,
6160 strict_overflow_p))))
6161 return t1;
6162 break;
6163
6164 case ABS_EXPR:
6165 /* If widening the type changes it from signed to unsigned, then we
6166 must avoid building ABS_EXPR itself as unsigned. */
6167 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6168 {
6169 tree cstype = (*signed_type_for) (ctype);
6170 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6171 != 0)
6172 {
6173 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6174 return fold_convert (ctype, t1);
6175 }
6176 break;
6177 }
6178 /* If the constant is negative, we cannot simplify this. */
6179 if (tree_int_cst_sgn (c) == -1)
6180 break;
6181 /* FALLTHROUGH */
6182 case NEGATE_EXPR:
6183 /* For division and modulus, type can't be unsigned, as e.g.
6184 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6185 For signed types, even with wrapping overflow, this is fine. */
6186 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6187 break;
6188 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6189 != 0)
6190 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6191 break;
6192
6193 case MIN_EXPR: case MAX_EXPR:
6194 /* If widening the type changes the signedness, then we can't perform
6195 this optimization as that changes the result. */
6196 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6197 break;
6198
6199 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6200 sub_strict_overflow_p = false;
6201 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6202 &sub_strict_overflow_p)) != 0
6203 && (t2 = extract_muldiv (op1, c, code, wide_type,
6204 &sub_strict_overflow_p)) != 0)
6205 {
6206 if (tree_int_cst_sgn (c) < 0)
6207 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6208 if (sub_strict_overflow_p)
6209 *strict_overflow_p = true;
6210 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6211 fold_convert (ctype, t2));
6212 }
6213 break;
6214
6215 case LSHIFT_EXPR: case RSHIFT_EXPR:
6216 /* If the second operand is constant, this is a multiplication
6217 or floor division, by a power of two, so we can treat it that
6218 way unless the multiplier or divisor overflows. Signed
6219 left-shift overflow is implementation-defined rather than
6220 undefined in C90, so do not convert signed left shift into
6221 multiplication. */
6222 if (TREE_CODE (op1) == INTEGER_CST
6223 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6224 /* const_binop may not detect overflow correctly,
6225 so check for it explicitly here. */
6226 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6227 && 0 != (t1 = fold_convert (ctype,
6228 const_binop (LSHIFT_EXPR,
6229 size_one_node,
6230 op1)))
6231 && !TREE_OVERFLOW (t1))
6232 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6233 ? MULT_EXPR : FLOOR_DIV_EXPR,
6234 ctype,
6235 fold_convert (ctype, op0),
6236 t1),
6237 c, code, wide_type, strict_overflow_p);
6238 break;
6239
6240 case PLUS_EXPR: case MINUS_EXPR:
6241 /* See if we can eliminate the operation on both sides. If we can, we
6242 can return a new PLUS or MINUS. If we can't, the only remaining
6243 cases where we can do anything are if the second operand is a
6244 constant. */
6245 sub_strict_overflow_p = false;
6246 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6247 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6248 if (t1 != 0 && t2 != 0
6249 && TYPE_OVERFLOW_WRAPS (ctype)
6250 && (code == MULT_EXPR
6251 /* If not multiplication, we can only do this if both operands
6252 are divisible by c. */
6253 || (multiple_of_p (ctype, op0, c)
6254 && multiple_of_p (ctype, op1, c))))
6255 {
6256 if (sub_strict_overflow_p)
6257 *strict_overflow_p = true;
6258 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6259 fold_convert (ctype, t2));
6260 }
6261
6262 /* If this was a subtraction, negate OP1 and set it to be an addition.
6263 This simplifies the logic below. */
6264 if (tcode == MINUS_EXPR)
6265 {
6266 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6267 /* If OP1 was not easily negatable, the constant may be OP0. */
6268 if (TREE_CODE (op0) == INTEGER_CST)
6269 {
6270 std::swap (op0, op1);
6271 std::swap (t1, t2);
6272 }
6273 }
6274
6275 if (TREE_CODE (op1) != INTEGER_CST)
6276 break;
6277
6278 /* If either OP1 or C are negative, this optimization is not safe for
6279 some of the division and remainder types while for others we need
6280 to change the code. */
6281 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6282 {
6283 if (code == CEIL_DIV_EXPR)
6284 code = FLOOR_DIV_EXPR;
6285 else if (code == FLOOR_DIV_EXPR)
6286 code = CEIL_DIV_EXPR;
6287 else if (code != MULT_EXPR
6288 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6289 break;
6290 }
6291
6292 /* If it's a multiply or a division/modulus operation of a multiple
6293 of our constant, do the operation and verify it doesn't overflow. */
6294 if (code == MULT_EXPR
6295 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6296 {
6297 op1 = const_binop (code, fold_convert (ctype, op1),
6298 fold_convert (ctype, c));
6299 /* We allow the constant to overflow with wrapping semantics. */
6300 if (op1 == 0
6301 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6302 break;
6303 }
6304 else
6305 break;
6306
6307 /* If we have an unsigned type, we cannot widen the operation since it
6308 will change the result if the original computation overflowed. */
6309 if (TYPE_UNSIGNED (ctype) && ctype != type)
6310 break;
6311
6312 /* The last case is if we are a multiply. In that case, we can
6313 apply the distributive law to commute the multiply and addition
6314 if the multiplication of the constants doesn't overflow
6315 and overflow is defined. With undefined overflow
6316 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6317 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6318 return fold_build2 (tcode, ctype,
6319 fold_build2 (code, ctype,
6320 fold_convert (ctype, op0),
6321 fold_convert (ctype, c)),
6322 op1);
6323
6324 break;
6325
6326 case MULT_EXPR:
6327 /* We have a special case here if we are doing something like
6328 (C * 8) % 4 since we know that's zero. */
6329 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6330 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6331 /* If the multiplication can overflow we cannot optimize this. */
6332 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6333 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6334 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6335 {
6336 *strict_overflow_p = true;
6337 return omit_one_operand (type, integer_zero_node, op0);
6338 }
6339
6340 /* ... fall through ... */
6341
6342 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6343 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6344 /* If we can extract our operation from the LHS, do so and return a
6345 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6346 do something only if the second operand is a constant. */
6347 if (same_p
6348 && TYPE_OVERFLOW_WRAPS (ctype)
6349 && (t1 = extract_muldiv (op0, c, code, wide_type,
6350 strict_overflow_p)) != 0)
6351 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6352 fold_convert (ctype, op1));
6353 else if (tcode == MULT_EXPR && code == MULT_EXPR
6354 && TYPE_OVERFLOW_WRAPS (ctype)
6355 && (t1 = extract_muldiv (op1, c, code, wide_type,
6356 strict_overflow_p)) != 0)
6357 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6358 fold_convert (ctype, t1));
6359 else if (TREE_CODE (op1) != INTEGER_CST)
6360 return 0;
6361
6362 /* If these are the same operation types, we can associate them
6363 assuming no overflow. */
6364 if (tcode == code)
6365 {
6366 bool overflow_p = false;
6367 bool overflow_mul_p;
6368 signop sign = TYPE_SIGN (ctype);
6369 unsigned prec = TYPE_PRECISION (ctype);
6370 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6371 wi::to_wide (c, prec),
6372 sign, &overflow_mul_p);
6373 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6374 if (overflow_mul_p
6375 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6376 overflow_p = true;
6377 if (!overflow_p)
6378 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6379 wide_int_to_tree (ctype, mul));
6380 }
6381
6382 /* If these operations "cancel" each other, we have the main
6383 optimizations of this pass, which occur when either constant is a
6384 multiple of the other, in which case we replace this with either an
6385 operation or CODE or TCODE.
6386
6387 If we have an unsigned type, we cannot do this since it will change
6388 the result if the original computation overflowed. */
6389 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6390 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6391 || (tcode == MULT_EXPR
6392 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6393 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6394 && code != MULT_EXPR)))
6395 {
6396 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6397 {
6398 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6399 *strict_overflow_p = true;
6400 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6401 fold_convert (ctype,
6402 const_binop (TRUNC_DIV_EXPR,
6403 op1, c)));
6404 }
6405 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6406 {
6407 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6408 *strict_overflow_p = true;
6409 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6410 fold_convert (ctype,
6411 const_binop (TRUNC_DIV_EXPR,
6412 c, op1)));
6413 }
6414 }
6415 break;
6416
6417 default:
6418 break;
6419 }
6420
6421 return 0;
6422 }
6423 \f
6424 /* Return a node which has the indicated constant VALUE (either 0 or
6425 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6426 and is of the indicated TYPE. */
6427
6428 tree
6429 constant_boolean_node (bool value, tree type)
6430 {
6431 if (type == integer_type_node)
6432 return value ? integer_one_node : integer_zero_node;
6433 else if (type == boolean_type_node)
6434 return value ? boolean_true_node : boolean_false_node;
6435 else if (TREE_CODE (type) == VECTOR_TYPE)
6436 return build_vector_from_val (type,
6437 build_int_cst (TREE_TYPE (type),
6438 value ? -1 : 0));
6439 else
6440 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6441 }
6442
6443
6444 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6445 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6446 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6447 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6448 COND is the first argument to CODE; otherwise (as in the example
6449 given here), it is the second argument. TYPE is the type of the
6450 original expression. Return NULL_TREE if no simplification is
6451 possible. */
6452
6453 static tree
6454 fold_binary_op_with_conditional_arg (location_t loc,
6455 enum tree_code code,
6456 tree type, tree op0, tree op1,
6457 tree cond, tree arg, int cond_first_p)
6458 {
6459 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6460 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6461 tree test, true_value, false_value;
6462 tree lhs = NULL_TREE;
6463 tree rhs = NULL_TREE;
6464 enum tree_code cond_code = COND_EXPR;
6465
6466 if (TREE_CODE (cond) == COND_EXPR
6467 || TREE_CODE (cond) == VEC_COND_EXPR)
6468 {
6469 test = TREE_OPERAND (cond, 0);
6470 true_value = TREE_OPERAND (cond, 1);
6471 false_value = TREE_OPERAND (cond, 2);
6472 /* If this operand throws an expression, then it does not make
6473 sense to try to perform a logical or arithmetic operation
6474 involving it. */
6475 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6476 lhs = true_value;
6477 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6478 rhs = false_value;
6479 }
6480 else if (!(TREE_CODE (type) != VECTOR_TYPE
6481 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6482 {
6483 tree testtype = TREE_TYPE (cond);
6484 test = cond;
6485 true_value = constant_boolean_node (true, testtype);
6486 false_value = constant_boolean_node (false, testtype);
6487 }
6488 else
6489 /* Detect the case of mixing vector and scalar types - bail out. */
6490 return NULL_TREE;
6491
6492 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6493 cond_code = VEC_COND_EXPR;
6494
6495 /* This transformation is only worthwhile if we don't have to wrap ARG
6496 in a SAVE_EXPR and the operation can be simplified without recursing
6497 on at least one of the branches once its pushed inside the COND_EXPR. */
6498 if (!TREE_CONSTANT (arg)
6499 && (TREE_SIDE_EFFECTS (arg)
6500 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6501 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6502 return NULL_TREE;
6503
6504 arg = fold_convert_loc (loc, arg_type, arg);
6505 if (lhs == 0)
6506 {
6507 true_value = fold_convert_loc (loc, cond_type, true_value);
6508 if (cond_first_p)
6509 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6510 else
6511 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6512 }
6513 if (rhs == 0)
6514 {
6515 false_value = fold_convert_loc (loc, cond_type, false_value);
6516 if (cond_first_p)
6517 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6518 else
6519 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6520 }
6521
6522 /* Check that we have simplified at least one of the branches. */
6523 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6524 return NULL_TREE;
6525
6526 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6527 }
6528
6529 \f
6530 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6531
6532 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6533 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6534 ADDEND is the same as X.
6535
6536 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6537 and finite. The problematic cases are when X is zero, and its mode
6538 has signed zeros. In the case of rounding towards -infinity,
6539 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6540 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6541
6542 bool
6543 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6544 {
6545 if (!real_zerop (addend))
6546 return false;
6547
6548 /* Don't allow the fold with -fsignaling-nans. */
6549 if (HONOR_SNANS (element_mode (type)))
6550 return false;
6551
6552 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6553 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6554 return true;
6555
6556 /* In a vector or complex, we would need to check the sign of all zeros. */
6557 if (TREE_CODE (addend) != REAL_CST)
6558 return false;
6559
6560 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6561 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6562 negate = !negate;
6563
6564 /* The mode has signed zeros, and we have to honor their sign.
6565 In this situation, there is only one case we can return true for.
6566 X - 0 is the same as X unless rounding towards -infinity is
6567 supported. */
6568 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6569 }
6570
6571 /* Subroutine of match.pd that optimizes comparisons of a division by
6572 a nonzero integer constant against an integer constant, i.e.
6573 X/C1 op C2.
6574
6575 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6576 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6577
6578 enum tree_code
6579 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6580 tree *hi, bool *neg_overflow)
6581 {
6582 tree prod, tmp, type = TREE_TYPE (c1);
6583 signop sign = TYPE_SIGN (type);
6584 bool overflow;
6585
6586 /* We have to do this the hard way to detect unsigned overflow.
6587 prod = int_const_binop (MULT_EXPR, c1, c2); */
6588 wide_int val = wi::mul (c1, c2, sign, &overflow);
6589 prod = force_fit_type (type, val, -1, overflow);
6590 *neg_overflow = false;
6591
6592 if (sign == UNSIGNED)
6593 {
6594 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6595 *lo = prod;
6596
6597 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6598 val = wi::add (prod, tmp, sign, &overflow);
6599 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6600 }
6601 else if (tree_int_cst_sgn (c1) >= 0)
6602 {
6603 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6604 switch (tree_int_cst_sgn (c2))
6605 {
6606 case -1:
6607 *neg_overflow = true;
6608 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6609 *hi = prod;
6610 break;
6611
6612 case 0:
6613 *lo = fold_negate_const (tmp, type);
6614 *hi = tmp;
6615 break;
6616
6617 case 1:
6618 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6619 *lo = prod;
6620 break;
6621
6622 default:
6623 gcc_unreachable ();
6624 }
6625 }
6626 else
6627 {
6628 /* A negative divisor reverses the relational operators. */
6629 code = swap_tree_comparison (code);
6630
6631 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6632 switch (tree_int_cst_sgn (c2))
6633 {
6634 case -1:
6635 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6636 *lo = prod;
6637 break;
6638
6639 case 0:
6640 *hi = fold_negate_const (tmp, type);
6641 *lo = tmp;
6642 break;
6643
6644 case 1:
6645 *neg_overflow = true;
6646 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6647 *hi = prod;
6648 break;
6649
6650 default:
6651 gcc_unreachable ();
6652 }
6653 }
6654
6655 if (code != EQ_EXPR && code != NE_EXPR)
6656 return code;
6657
6658 if (TREE_OVERFLOW (*lo)
6659 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6660 *lo = NULL_TREE;
6661 if (TREE_OVERFLOW (*hi)
6662 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6663 *hi = NULL_TREE;
6664
6665 return code;
6666 }
6667
6668
6669 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6670 equality/inequality test, then return a simplified form of the test
6671 using a sign testing. Otherwise return NULL. TYPE is the desired
6672 result type. */
6673
6674 static tree
6675 fold_single_bit_test_into_sign_test (location_t loc,
6676 enum tree_code code, tree arg0, tree arg1,
6677 tree result_type)
6678 {
6679 /* If this is testing a single bit, we can optimize the test. */
6680 if ((code == NE_EXPR || code == EQ_EXPR)
6681 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6682 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6683 {
6684 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6685 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6686 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6687
6688 if (arg00 != NULL_TREE
6689 /* This is only a win if casting to a signed type is cheap,
6690 i.e. when arg00's type is not a partial mode. */
6691 && TYPE_PRECISION (TREE_TYPE (arg00))
6692 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6693 {
6694 tree stype = signed_type_for (TREE_TYPE (arg00));
6695 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6696 result_type,
6697 fold_convert_loc (loc, stype, arg00),
6698 build_int_cst (stype, 0));
6699 }
6700 }
6701
6702 return NULL_TREE;
6703 }
6704
6705 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6706 equality/inequality test, then return a simplified form of
6707 the test using shifts and logical operations. Otherwise return
6708 NULL. TYPE is the desired result type. */
6709
6710 tree
6711 fold_single_bit_test (location_t loc, enum tree_code code,
6712 tree arg0, tree arg1, tree result_type)
6713 {
6714 /* If this is testing a single bit, we can optimize the test. */
6715 if ((code == NE_EXPR || code == EQ_EXPR)
6716 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6717 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6718 {
6719 tree inner = TREE_OPERAND (arg0, 0);
6720 tree type = TREE_TYPE (arg0);
6721 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6722 machine_mode operand_mode = TYPE_MODE (type);
6723 int ops_unsigned;
6724 tree signed_type, unsigned_type, intermediate_type;
6725 tree tem, one;
6726
6727 /* First, see if we can fold the single bit test into a sign-bit
6728 test. */
6729 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6730 result_type);
6731 if (tem)
6732 return tem;
6733
6734 /* Otherwise we have (A & C) != 0 where C is a single bit,
6735 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6736 Similarly for (A & C) == 0. */
6737
6738 /* If INNER is a right shift of a constant and it plus BITNUM does
6739 not overflow, adjust BITNUM and INNER. */
6740 if (TREE_CODE (inner) == RSHIFT_EXPR
6741 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6742 && bitnum < TYPE_PRECISION (type)
6743 && wi::ltu_p (TREE_OPERAND (inner, 1),
6744 TYPE_PRECISION (type) - bitnum))
6745 {
6746 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6747 inner = TREE_OPERAND (inner, 0);
6748 }
6749
6750 /* If we are going to be able to omit the AND below, we must do our
6751 operations as unsigned. If we must use the AND, we have a choice.
6752 Normally unsigned is faster, but for some machines signed is. */
6753 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6754 && !flag_syntax_only) ? 0 : 1;
6755
6756 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6757 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6758 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6759 inner = fold_convert_loc (loc, intermediate_type, inner);
6760
6761 if (bitnum != 0)
6762 inner = build2 (RSHIFT_EXPR, intermediate_type,
6763 inner, size_int (bitnum));
6764
6765 one = build_int_cst (intermediate_type, 1);
6766
6767 if (code == EQ_EXPR)
6768 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6769
6770 /* Put the AND last so it can combine with more things. */
6771 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6772
6773 /* Make sure to return the proper type. */
6774 inner = fold_convert_loc (loc, result_type, inner);
6775
6776 return inner;
6777 }
6778 return NULL_TREE;
6779 }
6780
6781 /* Test whether it is preferable two swap two operands, ARG0 and
6782 ARG1, for example because ARG0 is an integer constant and ARG1
6783 isn't. */
6784
6785 bool
6786 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6787 {
6788 if (CONSTANT_CLASS_P (arg1))
6789 return 0;
6790 if (CONSTANT_CLASS_P (arg0))
6791 return 1;
6792
6793 STRIP_NOPS (arg0);
6794 STRIP_NOPS (arg1);
6795
6796 if (TREE_CONSTANT (arg1))
6797 return 0;
6798 if (TREE_CONSTANT (arg0))
6799 return 1;
6800
6801 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6802 for commutative and comparison operators. Ensuring a canonical
6803 form allows the optimizers to find additional redundancies without
6804 having to explicitly check for both orderings. */
6805 if (TREE_CODE (arg0) == SSA_NAME
6806 && TREE_CODE (arg1) == SSA_NAME
6807 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6808 return 1;
6809
6810 /* Put SSA_NAMEs last. */
6811 if (TREE_CODE (arg1) == SSA_NAME)
6812 return 0;
6813 if (TREE_CODE (arg0) == SSA_NAME)
6814 return 1;
6815
6816 /* Put variables last. */
6817 if (DECL_P (arg1))
6818 return 0;
6819 if (DECL_P (arg0))
6820 return 1;
6821
6822 return 0;
6823 }
6824
6825
6826 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6827 means A >= Y && A != MAX, but in this case we know that
6828 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6829
6830 static tree
6831 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6832 {
6833 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6834
6835 if (TREE_CODE (bound) == LT_EXPR)
6836 a = TREE_OPERAND (bound, 0);
6837 else if (TREE_CODE (bound) == GT_EXPR)
6838 a = TREE_OPERAND (bound, 1);
6839 else
6840 return NULL_TREE;
6841
6842 typea = TREE_TYPE (a);
6843 if (!INTEGRAL_TYPE_P (typea)
6844 && !POINTER_TYPE_P (typea))
6845 return NULL_TREE;
6846
6847 if (TREE_CODE (ineq) == LT_EXPR)
6848 {
6849 a1 = TREE_OPERAND (ineq, 1);
6850 y = TREE_OPERAND (ineq, 0);
6851 }
6852 else if (TREE_CODE (ineq) == GT_EXPR)
6853 {
6854 a1 = TREE_OPERAND (ineq, 0);
6855 y = TREE_OPERAND (ineq, 1);
6856 }
6857 else
6858 return NULL_TREE;
6859
6860 if (TREE_TYPE (a1) != typea)
6861 return NULL_TREE;
6862
6863 if (POINTER_TYPE_P (typea))
6864 {
6865 /* Convert the pointer types into integer before taking the difference. */
6866 tree ta = fold_convert_loc (loc, ssizetype, a);
6867 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6868 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6869 }
6870 else
6871 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6872
6873 if (!diff || !integer_onep (diff))
6874 return NULL_TREE;
6875
6876 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6877 }
6878
6879 /* Fold a sum or difference of at least one multiplication.
6880 Returns the folded tree or NULL if no simplification could be made. */
6881
6882 static tree
6883 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6884 tree arg0, tree arg1)
6885 {
6886 tree arg00, arg01, arg10, arg11;
6887 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6888
6889 /* (A * C) +- (B * C) -> (A+-B) * C.
6890 (A * C) +- A -> A * (C+-1).
6891 We are most concerned about the case where C is a constant,
6892 but other combinations show up during loop reduction. Since
6893 it is not difficult, try all four possibilities. */
6894
6895 if (TREE_CODE (arg0) == MULT_EXPR)
6896 {
6897 arg00 = TREE_OPERAND (arg0, 0);
6898 arg01 = TREE_OPERAND (arg0, 1);
6899 }
6900 else if (TREE_CODE (arg0) == INTEGER_CST)
6901 {
6902 arg00 = build_one_cst (type);
6903 arg01 = arg0;
6904 }
6905 else
6906 {
6907 /* We cannot generate constant 1 for fract. */
6908 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6909 return NULL_TREE;
6910 arg00 = arg0;
6911 arg01 = build_one_cst (type);
6912 }
6913 if (TREE_CODE (arg1) == MULT_EXPR)
6914 {
6915 arg10 = TREE_OPERAND (arg1, 0);
6916 arg11 = TREE_OPERAND (arg1, 1);
6917 }
6918 else if (TREE_CODE (arg1) == INTEGER_CST)
6919 {
6920 arg10 = build_one_cst (type);
6921 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6922 the purpose of this canonicalization. */
6923 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6924 && negate_expr_p (arg1)
6925 && code == PLUS_EXPR)
6926 {
6927 arg11 = negate_expr (arg1);
6928 code = MINUS_EXPR;
6929 }
6930 else
6931 arg11 = arg1;
6932 }
6933 else
6934 {
6935 /* We cannot generate constant 1 for fract. */
6936 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6937 return NULL_TREE;
6938 arg10 = arg1;
6939 arg11 = build_one_cst (type);
6940 }
6941 same = NULL_TREE;
6942
6943 /* Prefer factoring a common non-constant. */
6944 if (operand_equal_p (arg00, arg10, 0))
6945 same = arg00, alt0 = arg01, alt1 = arg11;
6946 else if (operand_equal_p (arg01, arg11, 0))
6947 same = arg01, alt0 = arg00, alt1 = arg10;
6948 else if (operand_equal_p (arg00, arg11, 0))
6949 same = arg00, alt0 = arg01, alt1 = arg10;
6950 else if (operand_equal_p (arg01, arg10, 0))
6951 same = arg01, alt0 = arg00, alt1 = arg11;
6952
6953 /* No identical multiplicands; see if we can find a common
6954 power-of-two factor in non-power-of-two multiplies. This
6955 can help in multi-dimensional array access. */
6956 else if (tree_fits_shwi_p (arg01)
6957 && tree_fits_shwi_p (arg11))
6958 {
6959 HOST_WIDE_INT int01, int11, tmp;
6960 bool swap = false;
6961 tree maybe_same;
6962 int01 = tree_to_shwi (arg01);
6963 int11 = tree_to_shwi (arg11);
6964
6965 /* Move min of absolute values to int11. */
6966 if (absu_hwi (int01) < absu_hwi (int11))
6967 {
6968 tmp = int01, int01 = int11, int11 = tmp;
6969 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6970 maybe_same = arg01;
6971 swap = true;
6972 }
6973 else
6974 maybe_same = arg11;
6975
6976 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6977 /* The remainder should not be a constant, otherwise we
6978 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6979 increased the number of multiplications necessary. */
6980 && TREE_CODE (arg10) != INTEGER_CST)
6981 {
6982 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6983 build_int_cst (TREE_TYPE (arg00),
6984 int01 / int11));
6985 alt1 = arg10;
6986 same = maybe_same;
6987 if (swap)
6988 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6989 }
6990 }
6991
6992 if (!same)
6993 return NULL_TREE;
6994
6995 if (! INTEGRAL_TYPE_P (type)
6996 || TYPE_OVERFLOW_WRAPS (type)
6997 /* We are neither factoring zero nor minus one. */
6998 || TREE_CODE (same) == INTEGER_CST)
6999 return fold_build2_loc (loc, MULT_EXPR, type,
7000 fold_build2_loc (loc, code, type,
7001 fold_convert_loc (loc, type, alt0),
7002 fold_convert_loc (loc, type, alt1)),
7003 fold_convert_loc (loc, type, same));
7004
7005 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7006 same may be minus one and thus the multiplication may overflow. Perform
7007 the operations in an unsigned type. */
7008 tree utype = unsigned_type_for (type);
7009 tree tem = fold_build2_loc (loc, code, utype,
7010 fold_convert_loc (loc, utype, alt0),
7011 fold_convert_loc (loc, utype, alt1));
7012 /* If the sum evaluated to a constant that is not -INF the multiplication
7013 cannot overflow. */
7014 if (TREE_CODE (tem) == INTEGER_CST
7015 && ! wi::eq_p (tem, wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7016 return fold_build2_loc (loc, MULT_EXPR, type,
7017 fold_convert (type, tem), same);
7018
7019 return fold_convert_loc (loc, type,
7020 fold_build2_loc (loc, MULT_EXPR, utype, tem,
7021 fold_convert_loc (loc, utype, same)));
7022 }
7023
7024 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7025 specified by EXPR into the buffer PTR of length LEN bytes.
7026 Return the number of bytes placed in the buffer, or zero
7027 upon failure. */
7028
7029 static int
7030 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7031 {
7032 tree type = TREE_TYPE (expr);
7033 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7034 int byte, offset, word, words;
7035 unsigned char value;
7036
7037 if ((off == -1 && total_bytes > len)
7038 || off >= total_bytes)
7039 return 0;
7040 if (off == -1)
7041 off = 0;
7042 words = total_bytes / UNITS_PER_WORD;
7043
7044 for (byte = 0; byte < total_bytes; byte++)
7045 {
7046 int bitpos = byte * BITS_PER_UNIT;
7047 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7048 number of bytes. */
7049 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7050
7051 if (total_bytes > UNITS_PER_WORD)
7052 {
7053 word = byte / UNITS_PER_WORD;
7054 if (WORDS_BIG_ENDIAN)
7055 word = (words - 1) - word;
7056 offset = word * UNITS_PER_WORD;
7057 if (BYTES_BIG_ENDIAN)
7058 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7059 else
7060 offset += byte % UNITS_PER_WORD;
7061 }
7062 else
7063 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7064 if (offset >= off
7065 && offset - off < len)
7066 ptr[offset - off] = value;
7067 }
7068 return MIN (len, total_bytes - off);
7069 }
7070
7071
7072 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7073 specified by EXPR into the buffer PTR of length LEN bytes.
7074 Return the number of bytes placed in the buffer, or zero
7075 upon failure. */
7076
7077 static int
7078 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7079 {
7080 tree type = TREE_TYPE (expr);
7081 machine_mode mode = TYPE_MODE (type);
7082 int total_bytes = GET_MODE_SIZE (mode);
7083 FIXED_VALUE_TYPE value;
7084 tree i_value, i_type;
7085
7086 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7087 return 0;
7088
7089 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7090
7091 if (NULL_TREE == i_type
7092 || TYPE_PRECISION (i_type) != total_bytes)
7093 return 0;
7094
7095 value = TREE_FIXED_CST (expr);
7096 i_value = double_int_to_tree (i_type, value.data);
7097
7098 return native_encode_int (i_value, ptr, len, off);
7099 }
7100
7101
7102 /* Subroutine of native_encode_expr. Encode the REAL_CST
7103 specified by EXPR into the buffer PTR of length LEN bytes.
7104 Return the number of bytes placed in the buffer, or zero
7105 upon failure. */
7106
7107 static int
7108 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7109 {
7110 tree type = TREE_TYPE (expr);
7111 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7112 int byte, offset, word, words, bitpos;
7113 unsigned char value;
7114
7115 /* There are always 32 bits in each long, no matter the size of
7116 the hosts long. We handle floating point representations with
7117 up to 192 bits. */
7118 long tmp[6];
7119
7120 if ((off == -1 && total_bytes > len)
7121 || off >= total_bytes)
7122 return 0;
7123 if (off == -1)
7124 off = 0;
7125 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7126
7127 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7128
7129 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7130 bitpos += BITS_PER_UNIT)
7131 {
7132 byte = (bitpos / BITS_PER_UNIT) & 3;
7133 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7134
7135 if (UNITS_PER_WORD < 4)
7136 {
7137 word = byte / UNITS_PER_WORD;
7138 if (WORDS_BIG_ENDIAN)
7139 word = (words - 1) - word;
7140 offset = word * UNITS_PER_WORD;
7141 if (BYTES_BIG_ENDIAN)
7142 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7143 else
7144 offset += byte % UNITS_PER_WORD;
7145 }
7146 else
7147 {
7148 offset = byte;
7149 if (BYTES_BIG_ENDIAN)
7150 {
7151 /* Reverse bytes within each long, or within the entire float
7152 if it's smaller than a long (for HFmode). */
7153 offset = MIN (3, total_bytes - 1) - offset;
7154 gcc_assert (offset >= 0);
7155 }
7156 }
7157 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7158 if (offset >= off
7159 && offset - off < len)
7160 ptr[offset - off] = value;
7161 }
7162 return MIN (len, total_bytes - off);
7163 }
7164
7165 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7166 specified by EXPR into the buffer PTR of length LEN bytes.
7167 Return the number of bytes placed in the buffer, or zero
7168 upon failure. */
7169
7170 static int
7171 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7172 {
7173 int rsize, isize;
7174 tree part;
7175
7176 part = TREE_REALPART (expr);
7177 rsize = native_encode_expr (part, ptr, len, off);
7178 if (off == -1
7179 && rsize == 0)
7180 return 0;
7181 part = TREE_IMAGPART (expr);
7182 if (off != -1)
7183 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7184 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7185 if (off == -1
7186 && isize != rsize)
7187 return 0;
7188 return rsize + isize;
7189 }
7190
7191
7192 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7193 specified by EXPR into the buffer PTR of length LEN bytes.
7194 Return the number of bytes placed in the buffer, or zero
7195 upon failure. */
7196
7197 static int
7198 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7199 {
7200 unsigned i, count;
7201 int size, offset;
7202 tree itype, elem;
7203
7204 offset = 0;
7205 count = VECTOR_CST_NELTS (expr);
7206 itype = TREE_TYPE (TREE_TYPE (expr));
7207 size = GET_MODE_SIZE (TYPE_MODE (itype));
7208 for (i = 0; i < count; i++)
7209 {
7210 if (off >= size)
7211 {
7212 off -= size;
7213 continue;
7214 }
7215 elem = VECTOR_CST_ELT (expr, i);
7216 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7217 if ((off == -1 && res != size)
7218 || res == 0)
7219 return 0;
7220 offset += res;
7221 if (offset >= len)
7222 return offset;
7223 if (off != -1)
7224 off = 0;
7225 }
7226 return offset;
7227 }
7228
7229
7230 /* Subroutine of native_encode_expr. Encode the STRING_CST
7231 specified by EXPR into the buffer PTR of length LEN bytes.
7232 Return the number of bytes placed in the buffer, or zero
7233 upon failure. */
7234
7235 static int
7236 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7237 {
7238 tree type = TREE_TYPE (expr);
7239 HOST_WIDE_INT total_bytes;
7240
7241 if (TREE_CODE (type) != ARRAY_TYPE
7242 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7243 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7244 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7245 return 0;
7246 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7247 if ((off == -1 && total_bytes > len)
7248 || off >= total_bytes)
7249 return 0;
7250 if (off == -1)
7251 off = 0;
7252 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7253 {
7254 int written = 0;
7255 if (off < TREE_STRING_LENGTH (expr))
7256 {
7257 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7258 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7259 }
7260 memset (ptr + written, 0,
7261 MIN (total_bytes - written, len - written));
7262 }
7263 else
7264 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7265 return MIN (total_bytes - off, len);
7266 }
7267
7268
7269 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7270 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7271 buffer PTR of length LEN bytes. If OFF is not -1 then start
7272 the encoding at byte offset OFF and encode at most LEN bytes.
7273 Return the number of bytes placed in the buffer, or zero upon failure. */
7274
7275 int
7276 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7277 {
7278 /* We don't support starting at negative offset and -1 is special. */
7279 if (off < -1)
7280 return 0;
7281
7282 switch (TREE_CODE (expr))
7283 {
7284 case INTEGER_CST:
7285 return native_encode_int (expr, ptr, len, off);
7286
7287 case REAL_CST:
7288 return native_encode_real (expr, ptr, len, off);
7289
7290 case FIXED_CST:
7291 return native_encode_fixed (expr, ptr, len, off);
7292
7293 case COMPLEX_CST:
7294 return native_encode_complex (expr, ptr, len, off);
7295
7296 case VECTOR_CST:
7297 return native_encode_vector (expr, ptr, len, off);
7298
7299 case STRING_CST:
7300 return native_encode_string (expr, ptr, len, off);
7301
7302 default:
7303 return 0;
7304 }
7305 }
7306
7307
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7311
7312 static tree
7313 native_interpret_int (tree type, const unsigned char *ptr, int len)
7314 {
7315 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7316
7317 if (total_bytes > len
7318 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7319 return NULL_TREE;
7320
7321 wide_int result = wi::from_buffer (ptr, total_bytes);
7322
7323 return wide_int_to_tree (type, result);
7324 }
7325
7326
7327 /* Subroutine of native_interpret_expr. Interpret the contents of
7328 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7329 If the buffer cannot be interpreted, return NULL_TREE. */
7330
7331 static tree
7332 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7333 {
7334 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7335 double_int result;
7336 FIXED_VALUE_TYPE fixed_value;
7337
7338 if (total_bytes > len
7339 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7340 return NULL_TREE;
7341
7342 result = double_int::from_buffer (ptr, total_bytes);
7343 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7344
7345 return build_fixed (type, fixed_value);
7346 }
7347
7348
7349 /* Subroutine of native_interpret_expr. Interpret the contents of
7350 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7351 If the buffer cannot be interpreted, return NULL_TREE. */
7352
7353 static tree
7354 native_interpret_real (tree type, const unsigned char *ptr, int len)
7355 {
7356 machine_mode mode = TYPE_MODE (type);
7357 int total_bytes = GET_MODE_SIZE (mode);
7358 unsigned char value;
7359 /* There are always 32 bits in each long, no matter the size of
7360 the hosts long. We handle floating point representations with
7361 up to 192 bits. */
7362 REAL_VALUE_TYPE r;
7363 long tmp[6];
7364
7365 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7366 if (total_bytes > len || total_bytes > 24)
7367 return NULL_TREE;
7368 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7369
7370 memset (tmp, 0, sizeof (tmp));
7371 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7372 bitpos += BITS_PER_UNIT)
7373 {
7374 /* Both OFFSET and BYTE index within a long;
7375 bitpos indexes the whole float. */
7376 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7377 if (UNITS_PER_WORD < 4)
7378 {
7379 int word = byte / UNITS_PER_WORD;
7380 if (WORDS_BIG_ENDIAN)
7381 word = (words - 1) - word;
7382 offset = word * UNITS_PER_WORD;
7383 if (BYTES_BIG_ENDIAN)
7384 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7385 else
7386 offset += byte % UNITS_PER_WORD;
7387 }
7388 else
7389 {
7390 offset = byte;
7391 if (BYTES_BIG_ENDIAN)
7392 {
7393 /* Reverse bytes within each long, or within the entire float
7394 if it's smaller than a long (for HFmode). */
7395 offset = MIN (3, total_bytes - 1) - offset;
7396 gcc_assert (offset >= 0);
7397 }
7398 }
7399 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7400
7401 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7402 }
7403
7404 real_from_target (&r, tmp, mode);
7405 return build_real (type, r);
7406 }
7407
7408
7409 /* Subroutine of native_interpret_expr. Interpret the contents of
7410 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7411 If the buffer cannot be interpreted, return NULL_TREE. */
7412
7413 static tree
7414 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7415 {
7416 tree etype, rpart, ipart;
7417 int size;
7418
7419 etype = TREE_TYPE (type);
7420 size = GET_MODE_SIZE (TYPE_MODE (etype));
7421 if (size * 2 > len)
7422 return NULL_TREE;
7423 rpart = native_interpret_expr (etype, ptr, size);
7424 if (!rpart)
7425 return NULL_TREE;
7426 ipart = native_interpret_expr (etype, ptr+size, size);
7427 if (!ipart)
7428 return NULL_TREE;
7429 return build_complex (type, rpart, ipart);
7430 }
7431
7432
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7436
7437 static tree
7438 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7439 {
7440 tree etype, elem;
7441 int i, size, count;
7442 tree *elements;
7443
7444 etype = TREE_TYPE (type);
7445 size = GET_MODE_SIZE (TYPE_MODE (etype));
7446 count = TYPE_VECTOR_SUBPARTS (type);
7447 if (size * count > len)
7448 return NULL_TREE;
7449
7450 elements = XALLOCAVEC (tree, count);
7451 for (i = count - 1; i >= 0; i--)
7452 {
7453 elem = native_interpret_expr (etype, ptr+(i*size), size);
7454 if (!elem)
7455 return NULL_TREE;
7456 elements[i] = elem;
7457 }
7458 return build_vector (type, elements);
7459 }
7460
7461
7462 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7463 the buffer PTR of length LEN as a constant of type TYPE. For
7464 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7465 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7466 return NULL_TREE. */
7467
7468 tree
7469 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7470 {
7471 switch (TREE_CODE (type))
7472 {
7473 case INTEGER_TYPE:
7474 case ENUMERAL_TYPE:
7475 case BOOLEAN_TYPE:
7476 case POINTER_TYPE:
7477 case REFERENCE_TYPE:
7478 return native_interpret_int (type, ptr, len);
7479
7480 case REAL_TYPE:
7481 return native_interpret_real (type, ptr, len);
7482
7483 case FIXED_POINT_TYPE:
7484 return native_interpret_fixed (type, ptr, len);
7485
7486 case COMPLEX_TYPE:
7487 return native_interpret_complex (type, ptr, len);
7488
7489 case VECTOR_TYPE:
7490 return native_interpret_vector (type, ptr, len);
7491
7492 default:
7493 return NULL_TREE;
7494 }
7495 }
7496
7497 /* Returns true if we can interpret the contents of a native encoding
7498 as TYPE. */
7499
7500 static bool
7501 can_native_interpret_type_p (tree type)
7502 {
7503 switch (TREE_CODE (type))
7504 {
7505 case INTEGER_TYPE:
7506 case ENUMERAL_TYPE:
7507 case BOOLEAN_TYPE:
7508 case POINTER_TYPE:
7509 case REFERENCE_TYPE:
7510 case FIXED_POINT_TYPE:
7511 case REAL_TYPE:
7512 case COMPLEX_TYPE:
7513 case VECTOR_TYPE:
7514 return true;
7515 default:
7516 return false;
7517 }
7518 }
7519
7520 /* Return true iff a constant of type TYPE is accepted by
7521 native_encode_expr. */
7522
7523 bool
7524 can_native_encode_type_p (tree type)
7525 {
7526 switch (TREE_CODE (type))
7527 {
7528 case INTEGER_TYPE:
7529 case REAL_TYPE:
7530 case FIXED_POINT_TYPE:
7531 case COMPLEX_TYPE:
7532 case VECTOR_TYPE:
7533 case POINTER_TYPE:
7534 return true;
7535 default:
7536 return false;
7537 }
7538 }
7539
7540 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7541 TYPE at compile-time. If we're unable to perform the conversion
7542 return NULL_TREE. */
7543
7544 static tree
7545 fold_view_convert_expr (tree type, tree expr)
7546 {
7547 /* We support up to 512-bit values (for V8DFmode). */
7548 unsigned char buffer[64];
7549 int len;
7550
7551 /* Check that the host and target are sane. */
7552 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7553 return NULL_TREE;
7554
7555 len = native_encode_expr (expr, buffer, sizeof (buffer));
7556 if (len == 0)
7557 return NULL_TREE;
7558
7559 return native_interpret_expr (type, buffer, len);
7560 }
7561
7562 /* Build an expression for the address of T. Folds away INDIRECT_REF
7563 to avoid confusing the gimplify process. */
7564
7565 tree
7566 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7567 {
7568 /* The size of the object is not relevant when talking about its address. */
7569 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7570 t = TREE_OPERAND (t, 0);
7571
7572 if (TREE_CODE (t) == INDIRECT_REF)
7573 {
7574 t = TREE_OPERAND (t, 0);
7575
7576 if (TREE_TYPE (t) != ptrtype)
7577 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7578 }
7579 else if (TREE_CODE (t) == MEM_REF
7580 && integer_zerop (TREE_OPERAND (t, 1)))
7581 return TREE_OPERAND (t, 0);
7582 else if (TREE_CODE (t) == MEM_REF
7583 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7584 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7585 TREE_OPERAND (t, 0),
7586 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7587 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7588 {
7589 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7590
7591 if (TREE_TYPE (t) != ptrtype)
7592 t = fold_convert_loc (loc, ptrtype, t);
7593 }
7594 else
7595 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7596
7597 return t;
7598 }
7599
7600 /* Build an expression for the address of T. */
7601
7602 tree
7603 build_fold_addr_expr_loc (location_t loc, tree t)
7604 {
7605 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7606
7607 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7608 }
7609
7610 /* Fold a unary expression of code CODE and type TYPE with operand
7611 OP0. Return the folded expression if folding is successful.
7612 Otherwise, return NULL_TREE. */
7613
7614 tree
7615 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7616 {
7617 tree tem;
7618 tree arg0;
7619 enum tree_code_class kind = TREE_CODE_CLASS (code);
7620
7621 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7622 && TREE_CODE_LENGTH (code) == 1);
7623
7624 arg0 = op0;
7625 if (arg0)
7626 {
7627 if (CONVERT_EXPR_CODE_P (code)
7628 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7629 {
7630 /* Don't use STRIP_NOPS, because signedness of argument type
7631 matters. */
7632 STRIP_SIGN_NOPS (arg0);
7633 }
7634 else
7635 {
7636 /* Strip any conversions that don't change the mode. This
7637 is safe for every expression, except for a comparison
7638 expression because its signedness is derived from its
7639 operands.
7640
7641 Note that this is done as an internal manipulation within
7642 the constant folder, in order to find the simplest
7643 representation of the arguments so that their form can be
7644 studied. In any cases, the appropriate type conversions
7645 should be put back in the tree that will get out of the
7646 constant folder. */
7647 STRIP_NOPS (arg0);
7648 }
7649
7650 if (CONSTANT_CLASS_P (arg0))
7651 {
7652 tree tem = const_unop (code, type, arg0);
7653 if (tem)
7654 {
7655 if (TREE_TYPE (tem) != type)
7656 tem = fold_convert_loc (loc, type, tem);
7657 return tem;
7658 }
7659 }
7660 }
7661
7662 tem = generic_simplify (loc, code, type, op0);
7663 if (tem)
7664 return tem;
7665
7666 if (TREE_CODE_CLASS (code) == tcc_unary)
7667 {
7668 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7669 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7670 fold_build1_loc (loc, code, type,
7671 fold_convert_loc (loc, TREE_TYPE (op0),
7672 TREE_OPERAND (arg0, 1))));
7673 else if (TREE_CODE (arg0) == COND_EXPR)
7674 {
7675 tree arg01 = TREE_OPERAND (arg0, 1);
7676 tree arg02 = TREE_OPERAND (arg0, 2);
7677 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7678 arg01 = fold_build1_loc (loc, code, type,
7679 fold_convert_loc (loc,
7680 TREE_TYPE (op0), arg01));
7681 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7682 arg02 = fold_build1_loc (loc, code, type,
7683 fold_convert_loc (loc,
7684 TREE_TYPE (op0), arg02));
7685 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7686 arg01, arg02);
7687
7688 /* If this was a conversion, and all we did was to move into
7689 inside the COND_EXPR, bring it back out. But leave it if
7690 it is a conversion from integer to integer and the
7691 result precision is no wider than a word since such a
7692 conversion is cheap and may be optimized away by combine,
7693 while it couldn't if it were outside the COND_EXPR. Then return
7694 so we don't get into an infinite recursion loop taking the
7695 conversion out and then back in. */
7696
7697 if ((CONVERT_EXPR_CODE_P (code)
7698 || code == NON_LVALUE_EXPR)
7699 && TREE_CODE (tem) == COND_EXPR
7700 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7701 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7702 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7703 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7704 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7705 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7706 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7707 && (INTEGRAL_TYPE_P
7708 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7709 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7710 || flag_syntax_only))
7711 tem = build1_loc (loc, code, type,
7712 build3 (COND_EXPR,
7713 TREE_TYPE (TREE_OPERAND
7714 (TREE_OPERAND (tem, 1), 0)),
7715 TREE_OPERAND (tem, 0),
7716 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7717 TREE_OPERAND (TREE_OPERAND (tem, 2),
7718 0)));
7719 return tem;
7720 }
7721 }
7722
7723 switch (code)
7724 {
7725 case NON_LVALUE_EXPR:
7726 if (!maybe_lvalue_p (op0))
7727 return fold_convert_loc (loc, type, op0);
7728 return NULL_TREE;
7729
7730 CASE_CONVERT:
7731 case FLOAT_EXPR:
7732 case FIX_TRUNC_EXPR:
7733 if (COMPARISON_CLASS_P (op0))
7734 {
7735 /* If we have (type) (a CMP b) and type is an integral type, return
7736 new expression involving the new type. Canonicalize
7737 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7738 non-integral type.
7739 Do not fold the result as that would not simplify further, also
7740 folding again results in recursions. */
7741 if (TREE_CODE (type) == BOOLEAN_TYPE)
7742 return build2_loc (loc, TREE_CODE (op0), type,
7743 TREE_OPERAND (op0, 0),
7744 TREE_OPERAND (op0, 1));
7745 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7746 && TREE_CODE (type) != VECTOR_TYPE)
7747 return build3_loc (loc, COND_EXPR, type, op0,
7748 constant_boolean_node (true, type),
7749 constant_boolean_node (false, type));
7750 }
7751
7752 /* Handle (T *)&A.B.C for A being of type T and B and C
7753 living at offset zero. This occurs frequently in
7754 C++ upcasting and then accessing the base. */
7755 if (TREE_CODE (op0) == ADDR_EXPR
7756 && POINTER_TYPE_P (type)
7757 && handled_component_p (TREE_OPERAND (op0, 0)))
7758 {
7759 HOST_WIDE_INT bitsize, bitpos;
7760 tree offset;
7761 machine_mode mode;
7762 int unsignedp, reversep, volatilep;
7763 tree base
7764 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7765 &offset, &mode, &unsignedp, &reversep,
7766 &volatilep);
7767 /* If the reference was to a (constant) zero offset, we can use
7768 the address of the base if it has the same base type
7769 as the result type and the pointer type is unqualified. */
7770 if (! offset && bitpos == 0
7771 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7772 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7773 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7774 return fold_convert_loc (loc, type,
7775 build_fold_addr_expr_loc (loc, base));
7776 }
7777
7778 if (TREE_CODE (op0) == MODIFY_EXPR
7779 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7780 /* Detect assigning a bitfield. */
7781 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7782 && DECL_BIT_FIELD
7783 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7784 {
7785 /* Don't leave an assignment inside a conversion
7786 unless assigning a bitfield. */
7787 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7788 /* First do the assignment, then return converted constant. */
7789 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7790 TREE_NO_WARNING (tem) = 1;
7791 TREE_USED (tem) = 1;
7792 return tem;
7793 }
7794
7795 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7796 constants (if x has signed type, the sign bit cannot be set
7797 in c). This folds extension into the BIT_AND_EXPR.
7798 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7799 very likely don't have maximal range for their precision and this
7800 transformation effectively doesn't preserve non-maximal ranges. */
7801 if (TREE_CODE (type) == INTEGER_TYPE
7802 && TREE_CODE (op0) == BIT_AND_EXPR
7803 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7804 {
7805 tree and_expr = op0;
7806 tree and0 = TREE_OPERAND (and_expr, 0);
7807 tree and1 = TREE_OPERAND (and_expr, 1);
7808 int change = 0;
7809
7810 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7811 || (TYPE_PRECISION (type)
7812 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7813 change = 1;
7814 else if (TYPE_PRECISION (TREE_TYPE (and1))
7815 <= HOST_BITS_PER_WIDE_INT
7816 && tree_fits_uhwi_p (and1))
7817 {
7818 unsigned HOST_WIDE_INT cst;
7819
7820 cst = tree_to_uhwi (and1);
7821 cst &= HOST_WIDE_INT_M1U
7822 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7823 change = (cst == 0);
7824 if (change
7825 && !flag_syntax_only
7826 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7827 == ZERO_EXTEND))
7828 {
7829 tree uns = unsigned_type_for (TREE_TYPE (and0));
7830 and0 = fold_convert_loc (loc, uns, and0);
7831 and1 = fold_convert_loc (loc, uns, and1);
7832 }
7833 }
7834 if (change)
7835 {
7836 tem = force_fit_type (type, wi::to_widest (and1), 0,
7837 TREE_OVERFLOW (and1));
7838 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7839 fold_convert_loc (loc, type, and0), tem);
7840 }
7841 }
7842
7843 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7844 cast (T1)X will fold away. We assume that this happens when X itself
7845 is a cast. */
7846 if (POINTER_TYPE_P (type)
7847 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7848 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7849 {
7850 tree arg00 = TREE_OPERAND (arg0, 0);
7851 tree arg01 = TREE_OPERAND (arg0, 1);
7852
7853 return fold_build_pointer_plus_loc
7854 (loc, fold_convert_loc (loc, type, arg00), arg01);
7855 }
7856
7857 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7858 of the same precision, and X is an integer type not narrower than
7859 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7860 if (INTEGRAL_TYPE_P (type)
7861 && TREE_CODE (op0) == BIT_NOT_EXPR
7862 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7863 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7864 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7865 {
7866 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7867 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7868 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7869 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7870 fold_convert_loc (loc, type, tem));
7871 }
7872
7873 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7874 type of X and Y (integer types only). */
7875 if (INTEGRAL_TYPE_P (type)
7876 && TREE_CODE (op0) == MULT_EXPR
7877 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7878 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7879 {
7880 /* Be careful not to introduce new overflows. */
7881 tree mult_type;
7882 if (TYPE_OVERFLOW_WRAPS (type))
7883 mult_type = type;
7884 else
7885 mult_type = unsigned_type_for (type);
7886
7887 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7888 {
7889 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7890 fold_convert_loc (loc, mult_type,
7891 TREE_OPERAND (op0, 0)),
7892 fold_convert_loc (loc, mult_type,
7893 TREE_OPERAND (op0, 1)));
7894 return fold_convert_loc (loc, type, tem);
7895 }
7896 }
7897
7898 return NULL_TREE;
7899
7900 case VIEW_CONVERT_EXPR:
7901 if (TREE_CODE (op0) == MEM_REF)
7902 {
7903 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7904 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7905 tem = fold_build2_loc (loc, MEM_REF, type,
7906 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7907 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7908 return tem;
7909 }
7910
7911 return NULL_TREE;
7912
7913 case NEGATE_EXPR:
7914 tem = fold_negate_expr (loc, arg0);
7915 if (tem)
7916 return fold_convert_loc (loc, type, tem);
7917 return NULL_TREE;
7918
7919 case ABS_EXPR:
7920 /* Convert fabs((double)float) into (double)fabsf(float). */
7921 if (TREE_CODE (arg0) == NOP_EXPR
7922 && TREE_CODE (type) == REAL_TYPE)
7923 {
7924 tree targ0 = strip_float_extensions (arg0);
7925 if (targ0 != arg0)
7926 return fold_convert_loc (loc, type,
7927 fold_build1_loc (loc, ABS_EXPR,
7928 TREE_TYPE (targ0),
7929 targ0));
7930 }
7931 return NULL_TREE;
7932
7933 case BIT_NOT_EXPR:
7934 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7935 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7936 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7937 fold_convert_loc (loc, type,
7938 TREE_OPERAND (arg0, 0)))))
7939 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7940 fold_convert_loc (loc, type,
7941 TREE_OPERAND (arg0, 1)));
7942 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7943 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7944 fold_convert_loc (loc, type,
7945 TREE_OPERAND (arg0, 1)))))
7946 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7947 fold_convert_loc (loc, type,
7948 TREE_OPERAND (arg0, 0)), tem);
7949
7950 return NULL_TREE;
7951
7952 case TRUTH_NOT_EXPR:
7953 /* Note that the operand of this must be an int
7954 and its values must be 0 or 1.
7955 ("true" is a fixed value perhaps depending on the language,
7956 but we don't handle values other than 1 correctly yet.) */
7957 tem = fold_truth_not_expr (loc, arg0);
7958 if (!tem)
7959 return NULL_TREE;
7960 return fold_convert_loc (loc, type, tem);
7961
7962 case INDIRECT_REF:
7963 /* Fold *&X to X if X is an lvalue. */
7964 if (TREE_CODE (op0) == ADDR_EXPR)
7965 {
7966 tree op00 = TREE_OPERAND (op0, 0);
7967 if ((VAR_P (op00)
7968 || TREE_CODE (op00) == PARM_DECL
7969 || TREE_CODE (op00) == RESULT_DECL)
7970 && !TREE_READONLY (op00))
7971 return op00;
7972 }
7973 return NULL_TREE;
7974
7975 default:
7976 return NULL_TREE;
7977 } /* switch (code) */
7978 }
7979
7980
7981 /* If the operation was a conversion do _not_ mark a resulting constant
7982 with TREE_OVERFLOW if the original constant was not. These conversions
7983 have implementation defined behavior and retaining the TREE_OVERFLOW
7984 flag here would confuse later passes such as VRP. */
7985 tree
7986 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7987 tree type, tree op0)
7988 {
7989 tree res = fold_unary_loc (loc, code, type, op0);
7990 if (res
7991 && TREE_CODE (res) == INTEGER_CST
7992 && TREE_CODE (op0) == INTEGER_CST
7993 && CONVERT_EXPR_CODE_P (code))
7994 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7995
7996 return res;
7997 }
7998
7999 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8000 operands OP0 and OP1. LOC is the location of the resulting expression.
8001 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8002 Return the folded expression if folding is successful. Otherwise,
8003 return NULL_TREE. */
8004 static tree
8005 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8006 tree arg0, tree arg1, tree op0, tree op1)
8007 {
8008 tree tem;
8009
8010 /* We only do these simplifications if we are optimizing. */
8011 if (!optimize)
8012 return NULL_TREE;
8013
8014 /* Check for things like (A || B) && (A || C). We can convert this
8015 to A || (B && C). Note that either operator can be any of the four
8016 truth and/or operations and the transformation will still be
8017 valid. Also note that we only care about order for the
8018 ANDIF and ORIF operators. If B contains side effects, this
8019 might change the truth-value of A. */
8020 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8021 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8022 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8023 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8024 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8025 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8026 {
8027 tree a00 = TREE_OPERAND (arg0, 0);
8028 tree a01 = TREE_OPERAND (arg0, 1);
8029 tree a10 = TREE_OPERAND (arg1, 0);
8030 tree a11 = TREE_OPERAND (arg1, 1);
8031 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8032 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8033 && (code == TRUTH_AND_EXPR
8034 || code == TRUTH_OR_EXPR));
8035
8036 if (operand_equal_p (a00, a10, 0))
8037 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8038 fold_build2_loc (loc, code, type, a01, a11));
8039 else if (commutative && operand_equal_p (a00, a11, 0))
8040 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8041 fold_build2_loc (loc, code, type, a01, a10));
8042 else if (commutative && operand_equal_p (a01, a10, 0))
8043 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8044 fold_build2_loc (loc, code, type, a00, a11));
8045
8046 /* This case if tricky because we must either have commutative
8047 operators or else A10 must not have side-effects. */
8048
8049 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8050 && operand_equal_p (a01, a11, 0))
8051 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8052 fold_build2_loc (loc, code, type, a00, a10),
8053 a01);
8054 }
8055
8056 /* See if we can build a range comparison. */
8057 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8058 return tem;
8059
8060 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8061 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8062 {
8063 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8064 if (tem)
8065 return fold_build2_loc (loc, code, type, tem, arg1);
8066 }
8067
8068 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8069 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8070 {
8071 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8072 if (tem)
8073 return fold_build2_loc (loc, code, type, arg0, tem);
8074 }
8075
8076 /* Check for the possibility of merging component references. If our
8077 lhs is another similar operation, try to merge its rhs with our
8078 rhs. Then try to merge our lhs and rhs. */
8079 if (TREE_CODE (arg0) == code
8080 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8081 TREE_OPERAND (arg0, 1), arg1)))
8082 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8083
8084 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8085 return tem;
8086
8087 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8088 && (code == TRUTH_AND_EXPR
8089 || code == TRUTH_ANDIF_EXPR
8090 || code == TRUTH_OR_EXPR
8091 || code == TRUTH_ORIF_EXPR))
8092 {
8093 enum tree_code ncode, icode;
8094
8095 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8096 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8097 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8098
8099 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8100 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8101 We don't want to pack more than two leafs to a non-IF AND/OR
8102 expression.
8103 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8104 equal to IF-CODE, then we don't want to add right-hand operand.
8105 If the inner right-hand side of left-hand operand has
8106 side-effects, or isn't simple, then we can't add to it,
8107 as otherwise we might destroy if-sequence. */
8108 if (TREE_CODE (arg0) == icode
8109 && simple_operand_p_2 (arg1)
8110 /* Needed for sequence points to handle trappings, and
8111 side-effects. */
8112 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8113 {
8114 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8115 arg1);
8116 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8117 tem);
8118 }
8119 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8120 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8121 else if (TREE_CODE (arg1) == icode
8122 && simple_operand_p_2 (arg0)
8123 /* Needed for sequence points to handle trappings, and
8124 side-effects. */
8125 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8126 {
8127 tem = fold_build2_loc (loc, ncode, type,
8128 arg0, TREE_OPERAND (arg1, 0));
8129 return fold_build2_loc (loc, icode, type, tem,
8130 TREE_OPERAND (arg1, 1));
8131 }
8132 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8133 into (A OR B).
8134 For sequence point consistancy, we need to check for trapping,
8135 and side-effects. */
8136 else if (code == icode && simple_operand_p_2 (arg0)
8137 && simple_operand_p_2 (arg1))
8138 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8139 }
8140
8141 return NULL_TREE;
8142 }
8143
8144 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8145 by changing CODE to reduce the magnitude of constants involved in
8146 ARG0 of the comparison.
8147 Returns a canonicalized comparison tree if a simplification was
8148 possible, otherwise returns NULL_TREE.
8149 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8150 valid if signed overflow is undefined. */
8151
8152 static tree
8153 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8154 tree arg0, tree arg1,
8155 bool *strict_overflow_p)
8156 {
8157 enum tree_code code0 = TREE_CODE (arg0);
8158 tree t, cst0 = NULL_TREE;
8159 int sgn0;
8160
8161 /* Match A +- CST code arg1. We can change this only if overflow
8162 is undefined. */
8163 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8164 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8165 /* In principle pointers also have undefined overflow behavior,
8166 but that causes problems elsewhere. */
8167 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8168 && (code0 == MINUS_EXPR
8169 || code0 == PLUS_EXPR)
8170 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8171 return NULL_TREE;
8172
8173 /* Identify the constant in arg0 and its sign. */
8174 cst0 = TREE_OPERAND (arg0, 1);
8175 sgn0 = tree_int_cst_sgn (cst0);
8176
8177 /* Overflowed constants and zero will cause problems. */
8178 if (integer_zerop (cst0)
8179 || TREE_OVERFLOW (cst0))
8180 return NULL_TREE;
8181
8182 /* See if we can reduce the magnitude of the constant in
8183 arg0 by changing the comparison code. */
8184 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8185 if (code == LT_EXPR
8186 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8187 code = LE_EXPR;
8188 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8189 else if (code == GT_EXPR
8190 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8191 code = GE_EXPR;
8192 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8193 else if (code == LE_EXPR
8194 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8195 code = LT_EXPR;
8196 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8197 else if (code == GE_EXPR
8198 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8199 code = GT_EXPR;
8200 else
8201 return NULL_TREE;
8202 *strict_overflow_p = true;
8203
8204 /* Now build the constant reduced in magnitude. But not if that
8205 would produce one outside of its types range. */
8206 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8207 && ((sgn0 == 1
8208 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8209 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8210 || (sgn0 == -1
8211 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8212 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8213 return NULL_TREE;
8214
8215 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8216 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8217 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8218 t = fold_convert (TREE_TYPE (arg1), t);
8219
8220 return fold_build2_loc (loc, code, type, t, arg1);
8221 }
8222
8223 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8224 overflow further. Try to decrease the magnitude of constants involved
8225 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8226 and put sole constants at the second argument position.
8227 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8228
8229 static tree
8230 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8231 tree arg0, tree arg1)
8232 {
8233 tree t;
8234 bool strict_overflow_p;
8235 const char * const warnmsg = G_("assuming signed overflow does not occur "
8236 "when reducing constant in comparison");
8237
8238 /* Try canonicalization by simplifying arg0. */
8239 strict_overflow_p = false;
8240 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8241 &strict_overflow_p);
8242 if (t)
8243 {
8244 if (strict_overflow_p)
8245 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8246 return t;
8247 }
8248
8249 /* Try canonicalization by simplifying arg1 using the swapped
8250 comparison. */
8251 code = swap_tree_comparison (code);
8252 strict_overflow_p = false;
8253 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8254 &strict_overflow_p);
8255 if (t && strict_overflow_p)
8256 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8257 return t;
8258 }
8259
8260 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8261 space. This is used to avoid issuing overflow warnings for
8262 expressions like &p->x which can not wrap. */
8263
8264 static bool
8265 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8266 {
8267 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8268 return true;
8269
8270 if (bitpos < 0)
8271 return true;
8272
8273 wide_int wi_offset;
8274 int precision = TYPE_PRECISION (TREE_TYPE (base));
8275 if (offset == NULL_TREE)
8276 wi_offset = wi::zero (precision);
8277 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8278 return true;
8279 else
8280 wi_offset = offset;
8281
8282 bool overflow;
8283 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8284 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8285 if (overflow)
8286 return true;
8287
8288 if (!wi::fits_uhwi_p (total))
8289 return true;
8290
8291 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8292 if (size <= 0)
8293 return true;
8294
8295 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8296 array. */
8297 if (TREE_CODE (base) == ADDR_EXPR)
8298 {
8299 HOST_WIDE_INT base_size;
8300
8301 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8302 if (base_size > 0 && size < base_size)
8303 size = base_size;
8304 }
8305
8306 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8307 }
8308
8309 /* Return a positive integer when the symbol DECL is known to have
8310 a nonzero address, zero when it's known not to (e.g., it's a weak
8311 symbol), and a negative integer when the symbol is not yet in the
8312 symbol table and so whether or not its address is zero is unknown.
8313 For function local objects always return positive integer. */
8314 static int
8315 maybe_nonzero_address (tree decl)
8316 {
8317 if (DECL_P (decl) && decl_in_symtab_p (decl))
8318 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8319 return symbol->nonzero_address ();
8320
8321 /* Function local objects are never NULL. */
8322 if (DECL_P (decl)
8323 && (DECL_CONTEXT (decl)
8324 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8325 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8326 return 1;
8327
8328 return -1;
8329 }
8330
8331 /* Subroutine of fold_binary. This routine performs all of the
8332 transformations that are common to the equality/inequality
8333 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8334 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8335 fold_binary should call fold_binary. Fold a comparison with
8336 tree code CODE and type TYPE with operands OP0 and OP1. Return
8337 the folded comparison or NULL_TREE. */
8338
8339 static tree
8340 fold_comparison (location_t loc, enum tree_code code, tree type,
8341 tree op0, tree op1)
8342 {
8343 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8344 tree arg0, arg1, tem;
8345
8346 arg0 = op0;
8347 arg1 = op1;
8348
8349 STRIP_SIGN_NOPS (arg0);
8350 STRIP_SIGN_NOPS (arg1);
8351
8352 /* For comparisons of pointers we can decompose it to a compile time
8353 comparison of the base objects and the offsets into the object.
8354 This requires at least one operand being an ADDR_EXPR or a
8355 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8356 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8357 && (TREE_CODE (arg0) == ADDR_EXPR
8358 || TREE_CODE (arg1) == ADDR_EXPR
8359 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8360 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8361 {
8362 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8363 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8364 machine_mode mode;
8365 int volatilep, reversep, unsignedp;
8366 bool indirect_base0 = false, indirect_base1 = false;
8367
8368 /* Get base and offset for the access. Strip ADDR_EXPR for
8369 get_inner_reference, but put it back by stripping INDIRECT_REF
8370 off the base object if possible. indirect_baseN will be true
8371 if baseN is not an address but refers to the object itself. */
8372 base0 = arg0;
8373 if (TREE_CODE (arg0) == ADDR_EXPR)
8374 {
8375 base0
8376 = get_inner_reference (TREE_OPERAND (arg0, 0),
8377 &bitsize, &bitpos0, &offset0, &mode,
8378 &unsignedp, &reversep, &volatilep);
8379 if (TREE_CODE (base0) == INDIRECT_REF)
8380 base0 = TREE_OPERAND (base0, 0);
8381 else
8382 indirect_base0 = true;
8383 }
8384 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8385 {
8386 base0 = TREE_OPERAND (arg0, 0);
8387 STRIP_SIGN_NOPS (base0);
8388 if (TREE_CODE (base0) == ADDR_EXPR)
8389 {
8390 base0
8391 = get_inner_reference (TREE_OPERAND (base0, 0),
8392 &bitsize, &bitpos0, &offset0, &mode,
8393 &unsignedp, &reversep, &volatilep);
8394 if (TREE_CODE (base0) == INDIRECT_REF)
8395 base0 = TREE_OPERAND (base0, 0);
8396 else
8397 indirect_base0 = true;
8398 }
8399 if (offset0 == NULL_TREE || integer_zerop (offset0))
8400 offset0 = TREE_OPERAND (arg0, 1);
8401 else
8402 offset0 = size_binop (PLUS_EXPR, offset0,
8403 TREE_OPERAND (arg0, 1));
8404 if (TREE_CODE (offset0) == INTEGER_CST)
8405 {
8406 offset_int tem = wi::sext (wi::to_offset (offset0),
8407 TYPE_PRECISION (sizetype));
8408 tem <<= LOG2_BITS_PER_UNIT;
8409 tem += bitpos0;
8410 if (wi::fits_shwi_p (tem))
8411 {
8412 bitpos0 = tem.to_shwi ();
8413 offset0 = NULL_TREE;
8414 }
8415 }
8416 }
8417
8418 base1 = arg1;
8419 if (TREE_CODE (arg1) == ADDR_EXPR)
8420 {
8421 base1
8422 = get_inner_reference (TREE_OPERAND (arg1, 0),
8423 &bitsize, &bitpos1, &offset1, &mode,
8424 &unsignedp, &reversep, &volatilep);
8425 if (TREE_CODE (base1) == INDIRECT_REF)
8426 base1 = TREE_OPERAND (base1, 0);
8427 else
8428 indirect_base1 = true;
8429 }
8430 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8431 {
8432 base1 = TREE_OPERAND (arg1, 0);
8433 STRIP_SIGN_NOPS (base1);
8434 if (TREE_CODE (base1) == ADDR_EXPR)
8435 {
8436 base1
8437 = get_inner_reference (TREE_OPERAND (base1, 0),
8438 &bitsize, &bitpos1, &offset1, &mode,
8439 &unsignedp, &reversep, &volatilep);
8440 if (TREE_CODE (base1) == INDIRECT_REF)
8441 base1 = TREE_OPERAND (base1, 0);
8442 else
8443 indirect_base1 = true;
8444 }
8445 if (offset1 == NULL_TREE || integer_zerop (offset1))
8446 offset1 = TREE_OPERAND (arg1, 1);
8447 else
8448 offset1 = size_binop (PLUS_EXPR, offset1,
8449 TREE_OPERAND (arg1, 1));
8450 if (TREE_CODE (offset1) == INTEGER_CST)
8451 {
8452 offset_int tem = wi::sext (wi::to_offset (offset1),
8453 TYPE_PRECISION (sizetype));
8454 tem <<= LOG2_BITS_PER_UNIT;
8455 tem += bitpos1;
8456 if (wi::fits_shwi_p (tem))
8457 {
8458 bitpos1 = tem.to_shwi ();
8459 offset1 = NULL_TREE;
8460 }
8461 }
8462 }
8463
8464 /* If we have equivalent bases we might be able to simplify. */
8465 if (indirect_base0 == indirect_base1
8466 && operand_equal_p (base0, base1,
8467 indirect_base0 ? OEP_ADDRESS_OF : 0))
8468 {
8469 /* We can fold this expression to a constant if the non-constant
8470 offset parts are equal. */
8471 if ((offset0 == offset1
8472 || (offset0 && offset1
8473 && operand_equal_p (offset0, offset1, 0)))
8474 && (equality_code
8475 || (indirect_base0
8476 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8477 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8478
8479 {
8480 if (!equality_code
8481 && bitpos0 != bitpos1
8482 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8483 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8484 fold_overflow_warning (("assuming pointer wraparound does not "
8485 "occur when comparing P +- C1 with "
8486 "P +- C2"),
8487 WARN_STRICT_OVERFLOW_CONDITIONAL);
8488
8489 switch (code)
8490 {
8491 case EQ_EXPR:
8492 return constant_boolean_node (bitpos0 == bitpos1, type);
8493 case NE_EXPR:
8494 return constant_boolean_node (bitpos0 != bitpos1, type);
8495 case LT_EXPR:
8496 return constant_boolean_node (bitpos0 < bitpos1, type);
8497 case LE_EXPR:
8498 return constant_boolean_node (bitpos0 <= bitpos1, type);
8499 case GE_EXPR:
8500 return constant_boolean_node (bitpos0 >= bitpos1, type);
8501 case GT_EXPR:
8502 return constant_boolean_node (bitpos0 > bitpos1, type);
8503 default:;
8504 }
8505 }
8506 /* We can simplify the comparison to a comparison of the variable
8507 offset parts if the constant offset parts are equal.
8508 Be careful to use signed sizetype here because otherwise we
8509 mess with array offsets in the wrong way. This is possible
8510 because pointer arithmetic is restricted to retain within an
8511 object and overflow on pointer differences is undefined as of
8512 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8513 else if (bitpos0 == bitpos1
8514 && (equality_code
8515 || (indirect_base0
8516 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8517 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8518 {
8519 /* By converting to signed sizetype we cover middle-end pointer
8520 arithmetic which operates on unsigned pointer types of size
8521 type size and ARRAY_REF offsets which are properly sign or
8522 zero extended from their type in case it is narrower than
8523 sizetype. */
8524 if (offset0 == NULL_TREE)
8525 offset0 = build_int_cst (ssizetype, 0);
8526 else
8527 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8528 if (offset1 == NULL_TREE)
8529 offset1 = build_int_cst (ssizetype, 0);
8530 else
8531 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8532
8533 if (!equality_code
8534 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8535 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8536 fold_overflow_warning (("assuming pointer wraparound does not "
8537 "occur when comparing P +- C1 with "
8538 "P +- C2"),
8539 WARN_STRICT_OVERFLOW_COMPARISON);
8540
8541 return fold_build2_loc (loc, code, type, offset0, offset1);
8542 }
8543 }
8544 /* For equal offsets we can simplify to a comparison of the
8545 base addresses. */
8546 else if (bitpos0 == bitpos1
8547 && (indirect_base0
8548 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8549 && (indirect_base1
8550 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8551 && ((offset0 == offset1)
8552 || (offset0 && offset1
8553 && operand_equal_p (offset0, offset1, 0))))
8554 {
8555 if (indirect_base0)
8556 base0 = build_fold_addr_expr_loc (loc, base0);
8557 if (indirect_base1)
8558 base1 = build_fold_addr_expr_loc (loc, base1);
8559 return fold_build2_loc (loc, code, type, base0, base1);
8560 }
8561 /* Comparison between an ordinary (non-weak) symbol and a null
8562 pointer can be eliminated since such symbols must have a non
8563 null address. In C, relational expressions between pointers
8564 to objects and null pointers are undefined. The results
8565 below follow the C++ rules with the additional property that
8566 every object pointer compares greater than a null pointer.
8567 */
8568 else if (((DECL_P (base0)
8569 && maybe_nonzero_address (base0) > 0
8570 /* Avoid folding references to struct members at offset 0 to
8571 prevent tests like '&ptr->firstmember == 0' from getting
8572 eliminated. When ptr is null, although the -> expression
8573 is strictly speaking invalid, GCC retains it as a matter
8574 of QoI. See PR c/44555. */
8575 && (offset0 == NULL_TREE && bitpos0 != 0))
8576 || CONSTANT_CLASS_P (base0))
8577 && indirect_base0
8578 /* The caller guarantees that when one of the arguments is
8579 constant (i.e., null in this case) it is second. */
8580 && integer_zerop (arg1))
8581 {
8582 switch (code)
8583 {
8584 case EQ_EXPR:
8585 case LE_EXPR:
8586 case LT_EXPR:
8587 return constant_boolean_node (false, type);
8588 case GE_EXPR:
8589 case GT_EXPR:
8590 case NE_EXPR:
8591 return constant_boolean_node (true, type);
8592 default:
8593 gcc_unreachable ();
8594 }
8595 }
8596 }
8597
8598 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8599 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8600 the resulting offset is smaller in absolute value than the
8601 original one and has the same sign. */
8602 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8603 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8604 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8605 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8606 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8607 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8608 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8609 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8610 {
8611 tree const1 = TREE_OPERAND (arg0, 1);
8612 tree const2 = TREE_OPERAND (arg1, 1);
8613 tree variable1 = TREE_OPERAND (arg0, 0);
8614 tree variable2 = TREE_OPERAND (arg1, 0);
8615 tree cst;
8616 const char * const warnmsg = G_("assuming signed overflow does not "
8617 "occur when combining constants around "
8618 "a comparison");
8619
8620 /* Put the constant on the side where it doesn't overflow and is
8621 of lower absolute value and of same sign than before. */
8622 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8623 ? MINUS_EXPR : PLUS_EXPR,
8624 const2, const1);
8625 if (!TREE_OVERFLOW (cst)
8626 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8627 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8628 {
8629 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8630 return fold_build2_loc (loc, code, type,
8631 variable1,
8632 fold_build2_loc (loc, TREE_CODE (arg1),
8633 TREE_TYPE (arg1),
8634 variable2, cst));
8635 }
8636
8637 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8638 ? MINUS_EXPR : PLUS_EXPR,
8639 const1, const2);
8640 if (!TREE_OVERFLOW (cst)
8641 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8642 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8643 {
8644 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8645 return fold_build2_loc (loc, code, type,
8646 fold_build2_loc (loc, TREE_CODE (arg0),
8647 TREE_TYPE (arg0),
8648 variable1, cst),
8649 variable2);
8650 }
8651 }
8652
8653 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8654 if (tem)
8655 return tem;
8656
8657 /* If we are comparing an expression that just has comparisons
8658 of two integer values, arithmetic expressions of those comparisons,
8659 and constants, we can simplify it. There are only three cases
8660 to check: the two values can either be equal, the first can be
8661 greater, or the second can be greater. Fold the expression for
8662 those three values. Since each value must be 0 or 1, we have
8663 eight possibilities, each of which corresponds to the constant 0
8664 or 1 or one of the six possible comparisons.
8665
8666 This handles common cases like (a > b) == 0 but also handles
8667 expressions like ((x > y) - (y > x)) > 0, which supposedly
8668 occur in macroized code. */
8669
8670 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8671 {
8672 tree cval1 = 0, cval2 = 0;
8673 int save_p = 0;
8674
8675 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8676 /* Don't handle degenerate cases here; they should already
8677 have been handled anyway. */
8678 && cval1 != 0 && cval2 != 0
8679 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8680 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8681 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8682 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8683 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8684 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8685 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8686 {
8687 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8688 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8689
8690 /* We can't just pass T to eval_subst in case cval1 or cval2
8691 was the same as ARG1. */
8692
8693 tree high_result
8694 = fold_build2_loc (loc, code, type,
8695 eval_subst (loc, arg0, cval1, maxval,
8696 cval2, minval),
8697 arg1);
8698 tree equal_result
8699 = fold_build2_loc (loc, code, type,
8700 eval_subst (loc, arg0, cval1, maxval,
8701 cval2, maxval),
8702 arg1);
8703 tree low_result
8704 = fold_build2_loc (loc, code, type,
8705 eval_subst (loc, arg0, cval1, minval,
8706 cval2, maxval),
8707 arg1);
8708
8709 /* All three of these results should be 0 or 1. Confirm they are.
8710 Then use those values to select the proper code to use. */
8711
8712 if (TREE_CODE (high_result) == INTEGER_CST
8713 && TREE_CODE (equal_result) == INTEGER_CST
8714 && TREE_CODE (low_result) == INTEGER_CST)
8715 {
8716 /* Make a 3-bit mask with the high-order bit being the
8717 value for `>', the next for '=', and the low for '<'. */
8718 switch ((integer_onep (high_result) * 4)
8719 + (integer_onep (equal_result) * 2)
8720 + integer_onep (low_result))
8721 {
8722 case 0:
8723 /* Always false. */
8724 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8725 case 1:
8726 code = LT_EXPR;
8727 break;
8728 case 2:
8729 code = EQ_EXPR;
8730 break;
8731 case 3:
8732 code = LE_EXPR;
8733 break;
8734 case 4:
8735 code = GT_EXPR;
8736 break;
8737 case 5:
8738 code = NE_EXPR;
8739 break;
8740 case 6:
8741 code = GE_EXPR;
8742 break;
8743 case 7:
8744 /* Always true. */
8745 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8746 }
8747
8748 if (save_p)
8749 {
8750 tem = save_expr (build2 (code, type, cval1, cval2));
8751 protected_set_expr_location (tem, loc);
8752 return tem;
8753 }
8754 return fold_build2_loc (loc, code, type, cval1, cval2);
8755 }
8756 }
8757 }
8758
8759 return NULL_TREE;
8760 }
8761
8762
8763 /* Subroutine of fold_binary. Optimize complex multiplications of the
8764 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8765 argument EXPR represents the expression "z" of type TYPE. */
8766
8767 static tree
8768 fold_mult_zconjz (location_t loc, tree type, tree expr)
8769 {
8770 tree itype = TREE_TYPE (type);
8771 tree rpart, ipart, tem;
8772
8773 if (TREE_CODE (expr) == COMPLEX_EXPR)
8774 {
8775 rpart = TREE_OPERAND (expr, 0);
8776 ipart = TREE_OPERAND (expr, 1);
8777 }
8778 else if (TREE_CODE (expr) == COMPLEX_CST)
8779 {
8780 rpart = TREE_REALPART (expr);
8781 ipart = TREE_IMAGPART (expr);
8782 }
8783 else
8784 {
8785 expr = save_expr (expr);
8786 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8787 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8788 }
8789
8790 rpart = save_expr (rpart);
8791 ipart = save_expr (ipart);
8792 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8793 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8794 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8795 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8796 build_zero_cst (itype));
8797 }
8798
8799
8800 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8801 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8802
8803 static bool
8804 vec_cst_ctor_to_array (tree arg, tree *elts)
8805 {
8806 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8807
8808 if (TREE_CODE (arg) == VECTOR_CST)
8809 {
8810 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8811 elts[i] = VECTOR_CST_ELT (arg, i);
8812 }
8813 else if (TREE_CODE (arg) == CONSTRUCTOR)
8814 {
8815 constructor_elt *elt;
8816
8817 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8818 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8819 return false;
8820 else
8821 elts[i] = elt->value;
8822 }
8823 else
8824 return false;
8825 for (; i < nelts; i++)
8826 elts[i]
8827 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8828 return true;
8829 }
8830
8831 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8832 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8833 NULL_TREE otherwise. */
8834
8835 static tree
8836 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8837 {
8838 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8839 tree *elts;
8840 bool need_ctor = false;
8841
8842 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8843 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8844 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8845 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8846 return NULL_TREE;
8847
8848 elts = XALLOCAVEC (tree, nelts * 3);
8849 if (!vec_cst_ctor_to_array (arg0, elts)
8850 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8851 return NULL_TREE;
8852
8853 for (i = 0; i < nelts; i++)
8854 {
8855 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8856 need_ctor = true;
8857 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8858 }
8859
8860 if (need_ctor)
8861 {
8862 vec<constructor_elt, va_gc> *v;
8863 vec_alloc (v, nelts);
8864 for (i = 0; i < nelts; i++)
8865 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8866 return build_constructor (type, v);
8867 }
8868 else
8869 return build_vector (type, &elts[2 * nelts]);
8870 }
8871
8872 /* Try to fold a pointer difference of type TYPE two address expressions of
8873 array references AREF0 and AREF1 using location LOC. Return a
8874 simplified expression for the difference or NULL_TREE. */
8875
8876 static tree
8877 fold_addr_of_array_ref_difference (location_t loc, tree type,
8878 tree aref0, tree aref1)
8879 {
8880 tree base0 = TREE_OPERAND (aref0, 0);
8881 tree base1 = TREE_OPERAND (aref1, 0);
8882 tree base_offset = build_int_cst (type, 0);
8883
8884 /* If the bases are array references as well, recurse. If the bases
8885 are pointer indirections compute the difference of the pointers.
8886 If the bases are equal, we are set. */
8887 if ((TREE_CODE (base0) == ARRAY_REF
8888 && TREE_CODE (base1) == ARRAY_REF
8889 && (base_offset
8890 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8891 || (INDIRECT_REF_P (base0)
8892 && INDIRECT_REF_P (base1)
8893 && (base_offset
8894 = fold_binary_loc (loc, MINUS_EXPR, type,
8895 fold_convert (type, TREE_OPERAND (base0, 0)),
8896 fold_convert (type,
8897 TREE_OPERAND (base1, 0)))))
8898 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8899 {
8900 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8901 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8902 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8903 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8904 return fold_build2_loc (loc, PLUS_EXPR, type,
8905 base_offset,
8906 fold_build2_loc (loc, MULT_EXPR, type,
8907 diff, esz));
8908 }
8909 return NULL_TREE;
8910 }
8911
8912 /* If the real or vector real constant CST of type TYPE has an exact
8913 inverse, return it, else return NULL. */
8914
8915 tree
8916 exact_inverse (tree type, tree cst)
8917 {
8918 REAL_VALUE_TYPE r;
8919 tree unit_type, *elts;
8920 machine_mode mode;
8921 unsigned vec_nelts, i;
8922
8923 switch (TREE_CODE (cst))
8924 {
8925 case REAL_CST:
8926 r = TREE_REAL_CST (cst);
8927
8928 if (exact_real_inverse (TYPE_MODE (type), &r))
8929 return build_real (type, r);
8930
8931 return NULL_TREE;
8932
8933 case VECTOR_CST:
8934 vec_nelts = VECTOR_CST_NELTS (cst);
8935 elts = XALLOCAVEC (tree, vec_nelts);
8936 unit_type = TREE_TYPE (type);
8937 mode = TYPE_MODE (unit_type);
8938
8939 for (i = 0; i < vec_nelts; i++)
8940 {
8941 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8942 if (!exact_real_inverse (mode, &r))
8943 return NULL_TREE;
8944 elts[i] = build_real (unit_type, r);
8945 }
8946
8947 return build_vector (type, elts);
8948
8949 default:
8950 return NULL_TREE;
8951 }
8952 }
8953
8954 /* Mask out the tz least significant bits of X of type TYPE where
8955 tz is the number of trailing zeroes in Y. */
8956 static wide_int
8957 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8958 {
8959 int tz = wi::ctz (y);
8960 if (tz > 0)
8961 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8962 return x;
8963 }
8964
8965 /* Return true when T is an address and is known to be nonzero.
8966 For floating point we further ensure that T is not denormal.
8967 Similar logic is present in nonzero_address in rtlanal.h.
8968
8969 If the return value is based on the assumption that signed overflow
8970 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8971 change *STRICT_OVERFLOW_P. */
8972
8973 static bool
8974 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8975 {
8976 tree type = TREE_TYPE (t);
8977 enum tree_code code;
8978
8979 /* Doing something useful for floating point would need more work. */
8980 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8981 return false;
8982
8983 code = TREE_CODE (t);
8984 switch (TREE_CODE_CLASS (code))
8985 {
8986 case tcc_unary:
8987 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8988 strict_overflow_p);
8989 case tcc_binary:
8990 case tcc_comparison:
8991 return tree_binary_nonzero_warnv_p (code, type,
8992 TREE_OPERAND (t, 0),
8993 TREE_OPERAND (t, 1),
8994 strict_overflow_p);
8995 case tcc_constant:
8996 case tcc_declaration:
8997 case tcc_reference:
8998 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8999
9000 default:
9001 break;
9002 }
9003
9004 switch (code)
9005 {
9006 case TRUTH_NOT_EXPR:
9007 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9008 strict_overflow_p);
9009
9010 case TRUTH_AND_EXPR:
9011 case TRUTH_OR_EXPR:
9012 case TRUTH_XOR_EXPR:
9013 return tree_binary_nonzero_warnv_p (code, type,
9014 TREE_OPERAND (t, 0),
9015 TREE_OPERAND (t, 1),
9016 strict_overflow_p);
9017
9018 case COND_EXPR:
9019 case CONSTRUCTOR:
9020 case OBJ_TYPE_REF:
9021 case ASSERT_EXPR:
9022 case ADDR_EXPR:
9023 case WITH_SIZE_EXPR:
9024 case SSA_NAME:
9025 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9026
9027 case COMPOUND_EXPR:
9028 case MODIFY_EXPR:
9029 case BIND_EXPR:
9030 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9031 strict_overflow_p);
9032
9033 case SAVE_EXPR:
9034 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9035 strict_overflow_p);
9036
9037 case CALL_EXPR:
9038 {
9039 tree fndecl = get_callee_fndecl (t);
9040 if (!fndecl) return false;
9041 if (flag_delete_null_pointer_checks && !flag_check_new
9042 && DECL_IS_OPERATOR_NEW (fndecl)
9043 && !TREE_NOTHROW (fndecl))
9044 return true;
9045 if (flag_delete_null_pointer_checks
9046 && lookup_attribute ("returns_nonnull",
9047 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9048 return true;
9049 return alloca_call_p (t);
9050 }
9051
9052 default:
9053 break;
9054 }
9055 return false;
9056 }
9057
9058 /* Return true when T is an address and is known to be nonzero.
9059 Handle warnings about undefined signed overflow. */
9060
9061 bool
9062 tree_expr_nonzero_p (tree t)
9063 {
9064 bool ret, strict_overflow_p;
9065
9066 strict_overflow_p = false;
9067 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9068 if (strict_overflow_p)
9069 fold_overflow_warning (("assuming signed overflow does not occur when "
9070 "determining that expression is always "
9071 "non-zero"),
9072 WARN_STRICT_OVERFLOW_MISC);
9073 return ret;
9074 }
9075
9076 /* Return true if T is known not to be equal to an integer W. */
9077
9078 bool
9079 expr_not_equal_to (tree t, const wide_int &w)
9080 {
9081 wide_int min, max, nz;
9082 value_range_type rtype;
9083 switch (TREE_CODE (t))
9084 {
9085 case INTEGER_CST:
9086 return wi::ne_p (t, w);
9087
9088 case SSA_NAME:
9089 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9090 return false;
9091 rtype = get_range_info (t, &min, &max);
9092 if (rtype == VR_RANGE)
9093 {
9094 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9095 return true;
9096 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9097 return true;
9098 }
9099 else if (rtype == VR_ANTI_RANGE
9100 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9101 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9102 return true;
9103 /* If T has some known zero bits and W has any of those bits set,
9104 then T is known not to be equal to W. */
9105 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9106 TYPE_PRECISION (TREE_TYPE (t))), 0))
9107 return true;
9108 return false;
9109
9110 default:
9111 return false;
9112 }
9113 }
9114
9115 /* Fold a binary expression of code CODE and type TYPE with operands
9116 OP0 and OP1. LOC is the location of the resulting expression.
9117 Return the folded expression if folding is successful. Otherwise,
9118 return NULL_TREE. */
9119
9120 tree
9121 fold_binary_loc (location_t loc,
9122 enum tree_code code, tree type, tree op0, tree op1)
9123 {
9124 enum tree_code_class kind = TREE_CODE_CLASS (code);
9125 tree arg0, arg1, tem;
9126 tree t1 = NULL_TREE;
9127 bool strict_overflow_p;
9128 unsigned int prec;
9129
9130 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9131 && TREE_CODE_LENGTH (code) == 2
9132 && op0 != NULL_TREE
9133 && op1 != NULL_TREE);
9134
9135 arg0 = op0;
9136 arg1 = op1;
9137
9138 /* Strip any conversions that don't change the mode. This is
9139 safe for every expression, except for a comparison expression
9140 because its signedness is derived from its operands. So, in
9141 the latter case, only strip conversions that don't change the
9142 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9143 preserved.
9144
9145 Note that this is done as an internal manipulation within the
9146 constant folder, in order to find the simplest representation
9147 of the arguments so that their form can be studied. In any
9148 cases, the appropriate type conversions should be put back in
9149 the tree that will get out of the constant folder. */
9150
9151 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9152 {
9153 STRIP_SIGN_NOPS (arg0);
9154 STRIP_SIGN_NOPS (arg1);
9155 }
9156 else
9157 {
9158 STRIP_NOPS (arg0);
9159 STRIP_NOPS (arg1);
9160 }
9161
9162 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9163 constant but we can't do arithmetic on them. */
9164 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9165 {
9166 tem = const_binop (code, type, arg0, arg1);
9167 if (tem != NULL_TREE)
9168 {
9169 if (TREE_TYPE (tem) != type)
9170 tem = fold_convert_loc (loc, type, tem);
9171 return tem;
9172 }
9173 }
9174
9175 /* If this is a commutative operation, and ARG0 is a constant, move it
9176 to ARG1 to reduce the number of tests below. */
9177 if (commutative_tree_code (code)
9178 && tree_swap_operands_p (arg0, arg1))
9179 return fold_build2_loc (loc, code, type, op1, op0);
9180
9181 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9182 to ARG1 to reduce the number of tests below. */
9183 if (kind == tcc_comparison
9184 && tree_swap_operands_p (arg0, arg1))
9185 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9186
9187 tem = generic_simplify (loc, code, type, op0, op1);
9188 if (tem)
9189 return tem;
9190
9191 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9192
9193 First check for cases where an arithmetic operation is applied to a
9194 compound, conditional, or comparison operation. Push the arithmetic
9195 operation inside the compound or conditional to see if any folding
9196 can then be done. Convert comparison to conditional for this purpose.
9197 The also optimizes non-constant cases that used to be done in
9198 expand_expr.
9199
9200 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9201 one of the operands is a comparison and the other is a comparison, a
9202 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9203 code below would make the expression more complex. Change it to a
9204 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9205 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9206
9207 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9208 || code == EQ_EXPR || code == NE_EXPR)
9209 && TREE_CODE (type) != VECTOR_TYPE
9210 && ((truth_value_p (TREE_CODE (arg0))
9211 && (truth_value_p (TREE_CODE (arg1))
9212 || (TREE_CODE (arg1) == BIT_AND_EXPR
9213 && integer_onep (TREE_OPERAND (arg1, 1)))))
9214 || (truth_value_p (TREE_CODE (arg1))
9215 && (truth_value_p (TREE_CODE (arg0))
9216 || (TREE_CODE (arg0) == BIT_AND_EXPR
9217 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9218 {
9219 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9220 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9221 : TRUTH_XOR_EXPR,
9222 boolean_type_node,
9223 fold_convert_loc (loc, boolean_type_node, arg0),
9224 fold_convert_loc (loc, boolean_type_node, arg1));
9225
9226 if (code == EQ_EXPR)
9227 tem = invert_truthvalue_loc (loc, tem);
9228
9229 return fold_convert_loc (loc, type, tem);
9230 }
9231
9232 if (TREE_CODE_CLASS (code) == tcc_binary
9233 || TREE_CODE_CLASS (code) == tcc_comparison)
9234 {
9235 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9236 {
9237 tem = fold_build2_loc (loc, code, type,
9238 fold_convert_loc (loc, TREE_TYPE (op0),
9239 TREE_OPERAND (arg0, 1)), op1);
9240 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9241 tem);
9242 }
9243 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9244 {
9245 tem = fold_build2_loc (loc, code, type, op0,
9246 fold_convert_loc (loc, TREE_TYPE (op1),
9247 TREE_OPERAND (arg1, 1)));
9248 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9249 tem);
9250 }
9251
9252 if (TREE_CODE (arg0) == COND_EXPR
9253 || TREE_CODE (arg0) == VEC_COND_EXPR
9254 || COMPARISON_CLASS_P (arg0))
9255 {
9256 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9257 arg0, arg1,
9258 /*cond_first_p=*/1);
9259 if (tem != NULL_TREE)
9260 return tem;
9261 }
9262
9263 if (TREE_CODE (arg1) == COND_EXPR
9264 || TREE_CODE (arg1) == VEC_COND_EXPR
9265 || COMPARISON_CLASS_P (arg1))
9266 {
9267 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9268 arg1, arg0,
9269 /*cond_first_p=*/0);
9270 if (tem != NULL_TREE)
9271 return tem;
9272 }
9273 }
9274
9275 switch (code)
9276 {
9277 case MEM_REF:
9278 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9279 if (TREE_CODE (arg0) == ADDR_EXPR
9280 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9281 {
9282 tree iref = TREE_OPERAND (arg0, 0);
9283 return fold_build2 (MEM_REF, type,
9284 TREE_OPERAND (iref, 0),
9285 int_const_binop (PLUS_EXPR, arg1,
9286 TREE_OPERAND (iref, 1)));
9287 }
9288
9289 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9290 if (TREE_CODE (arg0) == ADDR_EXPR
9291 && handled_component_p (TREE_OPERAND (arg0, 0)))
9292 {
9293 tree base;
9294 HOST_WIDE_INT coffset;
9295 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9296 &coffset);
9297 if (!base)
9298 return NULL_TREE;
9299 return fold_build2 (MEM_REF, type,
9300 build_fold_addr_expr (base),
9301 int_const_binop (PLUS_EXPR, arg1,
9302 size_int (coffset)));
9303 }
9304
9305 return NULL_TREE;
9306
9307 case POINTER_PLUS_EXPR:
9308 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9309 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9310 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9311 return fold_convert_loc (loc, type,
9312 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9313 fold_convert_loc (loc, sizetype,
9314 arg1),
9315 fold_convert_loc (loc, sizetype,
9316 arg0)));
9317
9318 return NULL_TREE;
9319
9320 case PLUS_EXPR:
9321 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9322 {
9323 /* X + (X / CST) * -CST is X % CST. */
9324 if (TREE_CODE (arg1) == MULT_EXPR
9325 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9326 && operand_equal_p (arg0,
9327 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9328 {
9329 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9330 tree cst1 = TREE_OPERAND (arg1, 1);
9331 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9332 cst1, cst0);
9333 if (sum && integer_zerop (sum))
9334 return fold_convert_loc (loc, type,
9335 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9336 TREE_TYPE (arg0), arg0,
9337 cst0));
9338 }
9339 }
9340
9341 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9342 one. Make sure the type is not saturating and has the signedness of
9343 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9344 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9345 if ((TREE_CODE (arg0) == MULT_EXPR
9346 || TREE_CODE (arg1) == MULT_EXPR)
9347 && !TYPE_SATURATING (type)
9348 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9349 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9350 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9351 {
9352 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9353 if (tem)
9354 return tem;
9355 }
9356
9357 if (! FLOAT_TYPE_P (type))
9358 {
9359 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9360 (plus (plus (mult) (mult)) (foo)) so that we can
9361 take advantage of the factoring cases below. */
9362 if (ANY_INTEGRAL_TYPE_P (type)
9363 && TYPE_OVERFLOW_WRAPS (type)
9364 && (((TREE_CODE (arg0) == PLUS_EXPR
9365 || TREE_CODE (arg0) == MINUS_EXPR)
9366 && TREE_CODE (arg1) == MULT_EXPR)
9367 || ((TREE_CODE (arg1) == PLUS_EXPR
9368 || TREE_CODE (arg1) == MINUS_EXPR)
9369 && TREE_CODE (arg0) == MULT_EXPR)))
9370 {
9371 tree parg0, parg1, parg, marg;
9372 enum tree_code pcode;
9373
9374 if (TREE_CODE (arg1) == MULT_EXPR)
9375 parg = arg0, marg = arg1;
9376 else
9377 parg = arg1, marg = arg0;
9378 pcode = TREE_CODE (parg);
9379 parg0 = TREE_OPERAND (parg, 0);
9380 parg1 = TREE_OPERAND (parg, 1);
9381 STRIP_NOPS (parg0);
9382 STRIP_NOPS (parg1);
9383
9384 if (TREE_CODE (parg0) == MULT_EXPR
9385 && TREE_CODE (parg1) != MULT_EXPR)
9386 return fold_build2_loc (loc, pcode, type,
9387 fold_build2_loc (loc, PLUS_EXPR, type,
9388 fold_convert_loc (loc, type,
9389 parg0),
9390 fold_convert_loc (loc, type,
9391 marg)),
9392 fold_convert_loc (loc, type, parg1));
9393 if (TREE_CODE (parg0) != MULT_EXPR
9394 && TREE_CODE (parg1) == MULT_EXPR)
9395 return
9396 fold_build2_loc (loc, PLUS_EXPR, type,
9397 fold_convert_loc (loc, type, parg0),
9398 fold_build2_loc (loc, pcode, type,
9399 fold_convert_loc (loc, type, marg),
9400 fold_convert_loc (loc, type,
9401 parg1)));
9402 }
9403 }
9404 else
9405 {
9406 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9407 to __complex__ ( x, y ). This is not the same for SNaNs or
9408 if signed zeros are involved. */
9409 if (!HONOR_SNANS (element_mode (arg0))
9410 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9411 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9412 {
9413 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9414 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9415 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9416 bool arg0rz = false, arg0iz = false;
9417 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9418 || (arg0i && (arg0iz = real_zerop (arg0i))))
9419 {
9420 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9421 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9422 if (arg0rz && arg1i && real_zerop (arg1i))
9423 {
9424 tree rp = arg1r ? arg1r
9425 : build1 (REALPART_EXPR, rtype, arg1);
9426 tree ip = arg0i ? arg0i
9427 : build1 (IMAGPART_EXPR, rtype, arg0);
9428 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9429 }
9430 else if (arg0iz && arg1r && real_zerop (arg1r))
9431 {
9432 tree rp = arg0r ? arg0r
9433 : build1 (REALPART_EXPR, rtype, arg0);
9434 tree ip = arg1i ? arg1i
9435 : build1 (IMAGPART_EXPR, rtype, arg1);
9436 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9437 }
9438 }
9439 }
9440
9441 if (flag_unsafe_math_optimizations
9442 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9443 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9444 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9445 return tem;
9446
9447 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9448 We associate floats only if the user has specified
9449 -fassociative-math. */
9450 if (flag_associative_math
9451 && TREE_CODE (arg1) == PLUS_EXPR
9452 && TREE_CODE (arg0) != MULT_EXPR)
9453 {
9454 tree tree10 = TREE_OPERAND (arg1, 0);
9455 tree tree11 = TREE_OPERAND (arg1, 1);
9456 if (TREE_CODE (tree11) == MULT_EXPR
9457 && TREE_CODE (tree10) == MULT_EXPR)
9458 {
9459 tree tree0;
9460 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9461 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9462 }
9463 }
9464 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9465 We associate floats only if the user has specified
9466 -fassociative-math. */
9467 if (flag_associative_math
9468 && TREE_CODE (arg0) == PLUS_EXPR
9469 && TREE_CODE (arg1) != MULT_EXPR)
9470 {
9471 tree tree00 = TREE_OPERAND (arg0, 0);
9472 tree tree01 = TREE_OPERAND (arg0, 1);
9473 if (TREE_CODE (tree01) == MULT_EXPR
9474 && TREE_CODE (tree00) == MULT_EXPR)
9475 {
9476 tree tree0;
9477 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9478 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9479 }
9480 }
9481 }
9482
9483 bit_rotate:
9484 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9485 is a rotate of A by C1 bits. */
9486 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9487 is a rotate of A by B bits. */
9488 {
9489 enum tree_code code0, code1;
9490 tree rtype;
9491 code0 = TREE_CODE (arg0);
9492 code1 = TREE_CODE (arg1);
9493 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9494 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9495 && operand_equal_p (TREE_OPERAND (arg0, 0),
9496 TREE_OPERAND (arg1, 0), 0)
9497 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9498 TYPE_UNSIGNED (rtype))
9499 /* Only create rotates in complete modes. Other cases are not
9500 expanded properly. */
9501 && (element_precision (rtype)
9502 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9503 {
9504 tree tree01, tree11;
9505 enum tree_code code01, code11;
9506
9507 tree01 = TREE_OPERAND (arg0, 1);
9508 tree11 = TREE_OPERAND (arg1, 1);
9509 STRIP_NOPS (tree01);
9510 STRIP_NOPS (tree11);
9511 code01 = TREE_CODE (tree01);
9512 code11 = TREE_CODE (tree11);
9513 if (code01 == INTEGER_CST
9514 && code11 == INTEGER_CST
9515 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9516 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9517 {
9518 tem = build2_loc (loc, LROTATE_EXPR,
9519 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9520 TREE_OPERAND (arg0, 0),
9521 code0 == LSHIFT_EXPR
9522 ? TREE_OPERAND (arg0, 1)
9523 : TREE_OPERAND (arg1, 1));
9524 return fold_convert_loc (loc, type, tem);
9525 }
9526 else if (code11 == MINUS_EXPR)
9527 {
9528 tree tree110, tree111;
9529 tree110 = TREE_OPERAND (tree11, 0);
9530 tree111 = TREE_OPERAND (tree11, 1);
9531 STRIP_NOPS (tree110);
9532 STRIP_NOPS (tree111);
9533 if (TREE_CODE (tree110) == INTEGER_CST
9534 && 0 == compare_tree_int (tree110,
9535 element_precision
9536 (TREE_TYPE (TREE_OPERAND
9537 (arg0, 0))))
9538 && operand_equal_p (tree01, tree111, 0))
9539 return
9540 fold_convert_loc (loc, type,
9541 build2 ((code0 == LSHIFT_EXPR
9542 ? LROTATE_EXPR
9543 : RROTATE_EXPR),
9544 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9545 TREE_OPERAND (arg0, 0),
9546 TREE_OPERAND (arg0, 1)));
9547 }
9548 else if (code01 == MINUS_EXPR)
9549 {
9550 tree tree010, tree011;
9551 tree010 = TREE_OPERAND (tree01, 0);
9552 tree011 = TREE_OPERAND (tree01, 1);
9553 STRIP_NOPS (tree010);
9554 STRIP_NOPS (tree011);
9555 if (TREE_CODE (tree010) == INTEGER_CST
9556 && 0 == compare_tree_int (tree010,
9557 element_precision
9558 (TREE_TYPE (TREE_OPERAND
9559 (arg0, 0))))
9560 && operand_equal_p (tree11, tree011, 0))
9561 return fold_convert_loc
9562 (loc, type,
9563 build2 ((code0 != LSHIFT_EXPR
9564 ? LROTATE_EXPR
9565 : RROTATE_EXPR),
9566 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9567 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9568 }
9569 }
9570 }
9571
9572 associate:
9573 /* In most languages, can't associate operations on floats through
9574 parentheses. Rather than remember where the parentheses were, we
9575 don't associate floats at all, unless the user has specified
9576 -fassociative-math.
9577 And, we need to make sure type is not saturating. */
9578
9579 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9580 && !TYPE_SATURATING (type))
9581 {
9582 tree var0, con0, lit0, minus_lit0;
9583 tree var1, con1, lit1, minus_lit1;
9584 tree atype = type;
9585 bool ok = true;
9586
9587 /* Split both trees into variables, constants, and literals. Then
9588 associate each group together, the constants with literals,
9589 then the result with variables. This increases the chances of
9590 literals being recombined later and of generating relocatable
9591 expressions for the sum of a constant and literal. */
9592 var0 = split_tree (loc, arg0, type, code,
9593 &con0, &lit0, &minus_lit0, 0);
9594 var1 = split_tree (loc, arg1, type, code,
9595 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9596
9597 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9598 if (code == MINUS_EXPR)
9599 code = PLUS_EXPR;
9600
9601 /* With undefined overflow prefer doing association in a type
9602 which wraps on overflow, if that is one of the operand types. */
9603 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9604 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9605 {
9606 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9607 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9608 atype = TREE_TYPE (arg0);
9609 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9610 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9611 atype = TREE_TYPE (arg1);
9612 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9613 }
9614
9615 /* With undefined overflow we can only associate constants with one
9616 variable, and constants whose association doesn't overflow. */
9617 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9618 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9619 {
9620 if (var0 && var1)
9621 {
9622 tree tmp0 = var0;
9623 tree tmp1 = var1;
9624 bool one_neg = false;
9625
9626 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9627 {
9628 tmp0 = TREE_OPERAND (tmp0, 0);
9629 one_neg = !one_neg;
9630 }
9631 if (CONVERT_EXPR_P (tmp0)
9632 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9633 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9634 <= TYPE_PRECISION (atype)))
9635 tmp0 = TREE_OPERAND (tmp0, 0);
9636 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9637 {
9638 tmp1 = TREE_OPERAND (tmp1, 0);
9639 one_neg = !one_neg;
9640 }
9641 if (CONVERT_EXPR_P (tmp1)
9642 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9643 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9644 <= TYPE_PRECISION (atype)))
9645 tmp1 = TREE_OPERAND (tmp1, 0);
9646 /* The only case we can still associate with two variables
9647 is if they cancel out. */
9648 if (!one_neg
9649 || !operand_equal_p (tmp0, tmp1, 0))
9650 ok = false;
9651 }
9652 }
9653
9654 /* Only do something if we found more than two objects. Otherwise,
9655 nothing has changed and we risk infinite recursion. */
9656 if (ok
9657 && (2 < ((var0 != 0) + (var1 != 0)
9658 + (con0 != 0) + (con1 != 0)
9659 + (lit0 != 0) + (lit1 != 0)
9660 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9661 {
9662 var0 = associate_trees (loc, var0, var1, code, atype);
9663 con0 = associate_trees (loc, con0, con1, code, atype);
9664 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9665 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9666 code, atype);
9667
9668 /* Preserve the MINUS_EXPR if the negative part of the literal is
9669 greater than the positive part. Otherwise, the multiplicative
9670 folding code (i.e extract_muldiv) may be fooled in case
9671 unsigned constants are subtracted, like in the following
9672 example: ((X*2 + 4) - 8U)/2. */
9673 if (minus_lit0 && lit0)
9674 {
9675 if (TREE_CODE (lit0) == INTEGER_CST
9676 && TREE_CODE (minus_lit0) == INTEGER_CST
9677 && tree_int_cst_lt (lit0, minus_lit0))
9678 {
9679 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9680 MINUS_EXPR, atype);
9681 lit0 = 0;
9682 }
9683 else
9684 {
9685 lit0 = associate_trees (loc, lit0, minus_lit0,
9686 MINUS_EXPR, atype);
9687 minus_lit0 = 0;
9688 }
9689 }
9690
9691 /* Don't introduce overflows through reassociation. */
9692 if ((lit0 && TREE_OVERFLOW_P (lit0))
9693 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9694 return NULL_TREE;
9695
9696 if (minus_lit0)
9697 {
9698 if (con0 == 0)
9699 return
9700 fold_convert_loc (loc, type,
9701 associate_trees (loc, var0, minus_lit0,
9702 MINUS_EXPR, atype));
9703 else
9704 {
9705 con0 = associate_trees (loc, con0, minus_lit0,
9706 MINUS_EXPR, atype);
9707 return
9708 fold_convert_loc (loc, type,
9709 associate_trees (loc, var0, con0,
9710 PLUS_EXPR, atype));
9711 }
9712 }
9713
9714 con0 = associate_trees (loc, con0, lit0, code, atype);
9715 return
9716 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9717 code, atype));
9718 }
9719 }
9720
9721 return NULL_TREE;
9722
9723 case MINUS_EXPR:
9724 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9725 if (TREE_CODE (arg0) == NEGATE_EXPR
9726 && negate_expr_p (op1))
9727 return fold_build2_loc (loc, MINUS_EXPR, type,
9728 negate_expr (op1),
9729 fold_convert_loc (loc, type,
9730 TREE_OPERAND (arg0, 0)));
9731
9732 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9733 __complex__ ( x, -y ). This is not the same for SNaNs or if
9734 signed zeros are involved. */
9735 if (!HONOR_SNANS (element_mode (arg0))
9736 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9737 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9738 {
9739 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9740 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9741 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9742 bool arg0rz = false, arg0iz = false;
9743 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9744 || (arg0i && (arg0iz = real_zerop (arg0i))))
9745 {
9746 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9747 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9748 if (arg0rz && arg1i && real_zerop (arg1i))
9749 {
9750 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9751 arg1r ? arg1r
9752 : build1 (REALPART_EXPR, rtype, arg1));
9753 tree ip = arg0i ? arg0i
9754 : build1 (IMAGPART_EXPR, rtype, arg0);
9755 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9756 }
9757 else if (arg0iz && arg1r && real_zerop (arg1r))
9758 {
9759 tree rp = arg0r ? arg0r
9760 : build1 (REALPART_EXPR, rtype, arg0);
9761 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9762 arg1i ? arg1i
9763 : build1 (IMAGPART_EXPR, rtype, arg1));
9764 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9765 }
9766 }
9767 }
9768
9769 /* A - B -> A + (-B) if B is easily negatable. */
9770 if (negate_expr_p (op1)
9771 && ! TYPE_OVERFLOW_SANITIZED (type)
9772 && ((FLOAT_TYPE_P (type)
9773 /* Avoid this transformation if B is a positive REAL_CST. */
9774 && (TREE_CODE (op1) != REAL_CST
9775 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9776 || INTEGRAL_TYPE_P (type)))
9777 return fold_build2_loc (loc, PLUS_EXPR, type,
9778 fold_convert_loc (loc, type, arg0),
9779 negate_expr (op1));
9780
9781 /* Fold &a[i] - &a[j] to i-j. */
9782 if (TREE_CODE (arg0) == ADDR_EXPR
9783 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9784 && TREE_CODE (arg1) == ADDR_EXPR
9785 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9786 {
9787 tree tem = fold_addr_of_array_ref_difference (loc, type,
9788 TREE_OPERAND (arg0, 0),
9789 TREE_OPERAND (arg1, 0));
9790 if (tem)
9791 return tem;
9792 }
9793
9794 if (FLOAT_TYPE_P (type)
9795 && flag_unsafe_math_optimizations
9796 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9797 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9798 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9799 return tem;
9800
9801 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9802 one. Make sure the type is not saturating and has the signedness of
9803 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9804 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9805 if ((TREE_CODE (arg0) == MULT_EXPR
9806 || TREE_CODE (arg1) == MULT_EXPR)
9807 && !TYPE_SATURATING (type)
9808 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9809 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9810 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9811 {
9812 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9813 if (tem)
9814 return tem;
9815 }
9816
9817 goto associate;
9818
9819 case MULT_EXPR:
9820 if (! FLOAT_TYPE_P (type))
9821 {
9822 /* Transform x * -C into -x * C if x is easily negatable. */
9823 if (TREE_CODE (op1) == INTEGER_CST
9824 && tree_int_cst_sgn (op1) == -1
9825 && negate_expr_p (op0)
9826 && negate_expr_p (op1)
9827 && (tem = negate_expr (op1)) != op1
9828 && ! TREE_OVERFLOW (tem))
9829 return fold_build2_loc (loc, MULT_EXPR, type,
9830 fold_convert_loc (loc, type,
9831 negate_expr (op0)), tem);
9832
9833 strict_overflow_p = false;
9834 if (TREE_CODE (arg1) == INTEGER_CST
9835 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9836 &strict_overflow_p)))
9837 {
9838 if (strict_overflow_p)
9839 fold_overflow_warning (("assuming signed overflow does not "
9840 "occur when simplifying "
9841 "multiplication"),
9842 WARN_STRICT_OVERFLOW_MISC);
9843 return fold_convert_loc (loc, type, tem);
9844 }
9845
9846 /* Optimize z * conj(z) for integer complex numbers. */
9847 if (TREE_CODE (arg0) == CONJ_EXPR
9848 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9849 return fold_mult_zconjz (loc, type, arg1);
9850 if (TREE_CODE (arg1) == CONJ_EXPR
9851 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9852 return fold_mult_zconjz (loc, type, arg0);
9853 }
9854 else
9855 {
9856 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9857 This is not the same for NaNs or if signed zeros are
9858 involved. */
9859 if (!HONOR_NANS (arg0)
9860 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9861 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9862 && TREE_CODE (arg1) == COMPLEX_CST
9863 && real_zerop (TREE_REALPART (arg1)))
9864 {
9865 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9866 if (real_onep (TREE_IMAGPART (arg1)))
9867 return
9868 fold_build2_loc (loc, COMPLEX_EXPR, type,
9869 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9870 rtype, arg0)),
9871 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9872 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9873 return
9874 fold_build2_loc (loc, COMPLEX_EXPR, type,
9875 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9876 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9877 rtype, arg0)));
9878 }
9879
9880 /* Optimize z * conj(z) for floating point complex numbers.
9881 Guarded by flag_unsafe_math_optimizations as non-finite
9882 imaginary components don't produce scalar results. */
9883 if (flag_unsafe_math_optimizations
9884 && TREE_CODE (arg0) == CONJ_EXPR
9885 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9886 return fold_mult_zconjz (loc, type, arg1);
9887 if (flag_unsafe_math_optimizations
9888 && TREE_CODE (arg1) == CONJ_EXPR
9889 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9890 return fold_mult_zconjz (loc, type, arg0);
9891 }
9892 goto associate;
9893
9894 case BIT_IOR_EXPR:
9895 /* Canonicalize (X & C1) | C2. */
9896 if (TREE_CODE (arg0) == BIT_AND_EXPR
9897 && TREE_CODE (arg1) == INTEGER_CST
9898 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9899 {
9900 int width = TYPE_PRECISION (type), w;
9901 wide_int c1 = TREE_OPERAND (arg0, 1);
9902 wide_int c2 = arg1;
9903
9904 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9905 if ((c1 & c2) == c1)
9906 return omit_one_operand_loc (loc, type, arg1,
9907 TREE_OPERAND (arg0, 0));
9908
9909 wide_int msk = wi::mask (width, false,
9910 TYPE_PRECISION (TREE_TYPE (arg1)));
9911
9912 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9913 if (msk.and_not (c1 | c2) == 0)
9914 {
9915 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9916 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9917 }
9918
9919 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9920 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9921 mode which allows further optimizations. */
9922 c1 &= msk;
9923 c2 &= msk;
9924 wide_int c3 = c1.and_not (c2);
9925 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9926 {
9927 wide_int mask = wi::mask (w, false,
9928 TYPE_PRECISION (type));
9929 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9930 {
9931 c3 = mask;
9932 break;
9933 }
9934 }
9935
9936 if (c3 != c1)
9937 {
9938 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9939 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9940 wide_int_to_tree (type, c3));
9941 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9942 }
9943 }
9944
9945 /* See if this can be simplified into a rotate first. If that
9946 is unsuccessful continue in the association code. */
9947 goto bit_rotate;
9948
9949 case BIT_XOR_EXPR:
9950 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9951 if (TREE_CODE (arg0) == BIT_AND_EXPR
9952 && INTEGRAL_TYPE_P (type)
9953 && integer_onep (TREE_OPERAND (arg0, 1))
9954 && integer_onep (arg1))
9955 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9956 build_zero_cst (TREE_TYPE (arg0)));
9957
9958 /* See if this can be simplified into a rotate first. If that
9959 is unsuccessful continue in the association code. */
9960 goto bit_rotate;
9961
9962 case BIT_AND_EXPR:
9963 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9964 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9965 && INTEGRAL_TYPE_P (type)
9966 && integer_onep (TREE_OPERAND (arg0, 1))
9967 && integer_onep (arg1))
9968 {
9969 tree tem2;
9970 tem = TREE_OPERAND (arg0, 0);
9971 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9972 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9973 tem, tem2);
9974 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9975 build_zero_cst (TREE_TYPE (tem)));
9976 }
9977 /* Fold ~X & 1 as (X & 1) == 0. */
9978 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9979 && INTEGRAL_TYPE_P (type)
9980 && integer_onep (arg1))
9981 {
9982 tree tem2;
9983 tem = TREE_OPERAND (arg0, 0);
9984 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9985 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9986 tem, tem2);
9987 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9988 build_zero_cst (TREE_TYPE (tem)));
9989 }
9990 /* Fold !X & 1 as X == 0. */
9991 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9992 && integer_onep (arg1))
9993 {
9994 tem = TREE_OPERAND (arg0, 0);
9995 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9996 build_zero_cst (TREE_TYPE (tem)));
9997 }
9998
9999 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10000 multiple of 1 << CST. */
10001 if (TREE_CODE (arg1) == INTEGER_CST)
10002 {
10003 wide_int cst1 = arg1;
10004 wide_int ncst1 = -cst1;
10005 if ((cst1 & ncst1) == ncst1
10006 && multiple_of_p (type, arg0,
10007 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10008 return fold_convert_loc (loc, type, arg0);
10009 }
10010
10011 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10012 bits from CST2. */
10013 if (TREE_CODE (arg1) == INTEGER_CST
10014 && TREE_CODE (arg0) == MULT_EXPR
10015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10016 {
10017 wide_int warg1 = arg1;
10018 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10019
10020 if (masked == 0)
10021 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10022 arg0, arg1);
10023 else if (masked != warg1)
10024 {
10025 /* Avoid the transform if arg1 is a mask of some
10026 mode which allows further optimizations. */
10027 int pop = wi::popcount (warg1);
10028 if (!(pop >= BITS_PER_UNIT
10029 && pow2p_hwi (pop)
10030 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10031 return fold_build2_loc (loc, code, type, op0,
10032 wide_int_to_tree (type, masked));
10033 }
10034 }
10035
10036 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10037 ((A & N) + B) & M -> (A + B) & M
10038 Similarly if (N & M) == 0,
10039 ((A | N) + B) & M -> (A + B) & M
10040 and for - instead of + (or unary - instead of +)
10041 and/or ^ instead of |.
10042 If B is constant and (B & M) == 0, fold into A & M. */
10043 if (TREE_CODE (arg1) == INTEGER_CST)
10044 {
10045 wide_int cst1 = arg1;
10046 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10047 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10048 && (TREE_CODE (arg0) == PLUS_EXPR
10049 || TREE_CODE (arg0) == MINUS_EXPR
10050 || TREE_CODE (arg0) == NEGATE_EXPR)
10051 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10052 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10053 {
10054 tree pmop[2];
10055 int which = 0;
10056 wide_int cst0;
10057
10058 /* Now we know that arg0 is (C + D) or (C - D) or
10059 -C and arg1 (M) is == (1LL << cst) - 1.
10060 Store C into PMOP[0] and D into PMOP[1]. */
10061 pmop[0] = TREE_OPERAND (arg0, 0);
10062 pmop[1] = NULL;
10063 if (TREE_CODE (arg0) != NEGATE_EXPR)
10064 {
10065 pmop[1] = TREE_OPERAND (arg0, 1);
10066 which = 1;
10067 }
10068
10069 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10070 which = -1;
10071
10072 for (; which >= 0; which--)
10073 switch (TREE_CODE (pmop[which]))
10074 {
10075 case BIT_AND_EXPR:
10076 case BIT_IOR_EXPR:
10077 case BIT_XOR_EXPR:
10078 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10079 != INTEGER_CST)
10080 break;
10081 cst0 = TREE_OPERAND (pmop[which], 1);
10082 cst0 &= cst1;
10083 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10084 {
10085 if (cst0 != cst1)
10086 break;
10087 }
10088 else if (cst0 != 0)
10089 break;
10090 /* If C or D is of the form (A & N) where
10091 (N & M) == M, or of the form (A | N) or
10092 (A ^ N) where (N & M) == 0, replace it with A. */
10093 pmop[which] = TREE_OPERAND (pmop[which], 0);
10094 break;
10095 case INTEGER_CST:
10096 /* If C or D is a N where (N & M) == 0, it can be
10097 omitted (assumed 0). */
10098 if ((TREE_CODE (arg0) == PLUS_EXPR
10099 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10100 && (cst1 & pmop[which]) == 0)
10101 pmop[which] = NULL;
10102 break;
10103 default:
10104 break;
10105 }
10106
10107 /* Only build anything new if we optimized one or both arguments
10108 above. */
10109 if (pmop[0] != TREE_OPERAND (arg0, 0)
10110 || (TREE_CODE (arg0) != NEGATE_EXPR
10111 && pmop[1] != TREE_OPERAND (arg0, 1)))
10112 {
10113 tree utype = TREE_TYPE (arg0);
10114 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10115 {
10116 /* Perform the operations in a type that has defined
10117 overflow behavior. */
10118 utype = unsigned_type_for (TREE_TYPE (arg0));
10119 if (pmop[0] != NULL)
10120 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10121 if (pmop[1] != NULL)
10122 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10123 }
10124
10125 if (TREE_CODE (arg0) == NEGATE_EXPR)
10126 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10127 else if (TREE_CODE (arg0) == PLUS_EXPR)
10128 {
10129 if (pmop[0] != NULL && pmop[1] != NULL)
10130 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10131 pmop[0], pmop[1]);
10132 else if (pmop[0] != NULL)
10133 tem = pmop[0];
10134 else if (pmop[1] != NULL)
10135 tem = pmop[1];
10136 else
10137 return build_int_cst (type, 0);
10138 }
10139 else if (pmop[0] == NULL)
10140 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10141 else
10142 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10143 pmop[0], pmop[1]);
10144 /* TEM is now the new binary +, - or unary - replacement. */
10145 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10146 fold_convert_loc (loc, utype, arg1));
10147 return fold_convert_loc (loc, type, tem);
10148 }
10149 }
10150 }
10151
10152 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10153 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10154 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10155 {
10156 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10157
10158 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10159 if (mask == -1)
10160 return
10161 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10162 }
10163
10164 goto associate;
10165
10166 case RDIV_EXPR:
10167 /* Don't touch a floating-point divide by zero unless the mode
10168 of the constant can represent infinity. */
10169 if (TREE_CODE (arg1) == REAL_CST
10170 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10171 && real_zerop (arg1))
10172 return NULL_TREE;
10173
10174 /* (-A) / (-B) -> A / B */
10175 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10176 return fold_build2_loc (loc, RDIV_EXPR, type,
10177 TREE_OPERAND (arg0, 0),
10178 negate_expr (arg1));
10179 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10180 return fold_build2_loc (loc, RDIV_EXPR, type,
10181 negate_expr (arg0),
10182 TREE_OPERAND (arg1, 0));
10183 return NULL_TREE;
10184
10185 case TRUNC_DIV_EXPR:
10186 /* Fall through */
10187
10188 case FLOOR_DIV_EXPR:
10189 /* Simplify A / (B << N) where A and B are positive and B is
10190 a power of 2, to A >> (N + log2(B)). */
10191 strict_overflow_p = false;
10192 if (TREE_CODE (arg1) == LSHIFT_EXPR
10193 && (TYPE_UNSIGNED (type)
10194 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10195 {
10196 tree sval = TREE_OPERAND (arg1, 0);
10197 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10198 {
10199 tree sh_cnt = TREE_OPERAND (arg1, 1);
10200 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10201 wi::exact_log2 (sval));
10202
10203 if (strict_overflow_p)
10204 fold_overflow_warning (("assuming signed overflow does not "
10205 "occur when simplifying A / (B << N)"),
10206 WARN_STRICT_OVERFLOW_MISC);
10207
10208 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10209 sh_cnt, pow2);
10210 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10211 fold_convert_loc (loc, type, arg0), sh_cnt);
10212 }
10213 }
10214
10215 /* Fall through */
10216
10217 case ROUND_DIV_EXPR:
10218 case CEIL_DIV_EXPR:
10219 case EXACT_DIV_EXPR:
10220 if (integer_zerop (arg1))
10221 return NULL_TREE;
10222
10223 /* Convert -A / -B to A / B when the type is signed and overflow is
10224 undefined. */
10225 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10226 && TREE_CODE (op0) == NEGATE_EXPR
10227 && negate_expr_p (op1))
10228 {
10229 if (INTEGRAL_TYPE_P (type))
10230 fold_overflow_warning (("assuming signed overflow does not occur "
10231 "when distributing negation across "
10232 "division"),
10233 WARN_STRICT_OVERFLOW_MISC);
10234 return fold_build2_loc (loc, code, type,
10235 fold_convert_loc (loc, type,
10236 TREE_OPERAND (arg0, 0)),
10237 negate_expr (op1));
10238 }
10239 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10240 && TREE_CODE (arg1) == NEGATE_EXPR
10241 && negate_expr_p (op0))
10242 {
10243 if (INTEGRAL_TYPE_P (type))
10244 fold_overflow_warning (("assuming signed overflow does not occur "
10245 "when distributing negation across "
10246 "division"),
10247 WARN_STRICT_OVERFLOW_MISC);
10248 return fold_build2_loc (loc, code, type,
10249 negate_expr (op0),
10250 fold_convert_loc (loc, type,
10251 TREE_OPERAND (arg1, 0)));
10252 }
10253
10254 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10255 operation, EXACT_DIV_EXPR.
10256
10257 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10258 At one time others generated faster code, it's not clear if they do
10259 after the last round to changes to the DIV code in expmed.c. */
10260 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10261 && multiple_of_p (type, arg0, arg1))
10262 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10263 fold_convert (type, arg0),
10264 fold_convert (type, arg1));
10265
10266 strict_overflow_p = false;
10267 if (TREE_CODE (arg1) == INTEGER_CST
10268 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10269 &strict_overflow_p)))
10270 {
10271 if (strict_overflow_p)
10272 fold_overflow_warning (("assuming signed overflow does not occur "
10273 "when simplifying division"),
10274 WARN_STRICT_OVERFLOW_MISC);
10275 return fold_convert_loc (loc, type, tem);
10276 }
10277
10278 return NULL_TREE;
10279
10280 case CEIL_MOD_EXPR:
10281 case FLOOR_MOD_EXPR:
10282 case ROUND_MOD_EXPR:
10283 case TRUNC_MOD_EXPR:
10284 strict_overflow_p = false;
10285 if (TREE_CODE (arg1) == INTEGER_CST
10286 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10287 &strict_overflow_p)))
10288 {
10289 if (strict_overflow_p)
10290 fold_overflow_warning (("assuming signed overflow does not occur "
10291 "when simplifying modulus"),
10292 WARN_STRICT_OVERFLOW_MISC);
10293 return fold_convert_loc (loc, type, tem);
10294 }
10295
10296 return NULL_TREE;
10297
10298 case LROTATE_EXPR:
10299 case RROTATE_EXPR:
10300 case RSHIFT_EXPR:
10301 case LSHIFT_EXPR:
10302 /* Since negative shift count is not well-defined,
10303 don't try to compute it in the compiler. */
10304 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10305 return NULL_TREE;
10306
10307 prec = element_precision (type);
10308
10309 /* If we have a rotate of a bit operation with the rotate count and
10310 the second operand of the bit operation both constant,
10311 permute the two operations. */
10312 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10313 && (TREE_CODE (arg0) == BIT_AND_EXPR
10314 || TREE_CODE (arg0) == BIT_IOR_EXPR
10315 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10317 {
10318 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10319 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10320 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10321 fold_build2_loc (loc, code, type,
10322 arg00, arg1),
10323 fold_build2_loc (loc, code, type,
10324 arg01, arg1));
10325 }
10326
10327 /* Two consecutive rotates adding up to the some integer
10328 multiple of the precision of the type can be ignored. */
10329 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10330 && TREE_CODE (arg0) == RROTATE_EXPR
10331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10332 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10333 prec) == 0)
10334 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10335
10336 return NULL_TREE;
10337
10338 case MIN_EXPR:
10339 case MAX_EXPR:
10340 goto associate;
10341
10342 case TRUTH_ANDIF_EXPR:
10343 /* Note that the operands of this must be ints
10344 and their values must be 0 or 1.
10345 ("true" is a fixed value perhaps depending on the language.) */
10346 /* If first arg is constant zero, return it. */
10347 if (integer_zerop (arg0))
10348 return fold_convert_loc (loc, type, arg0);
10349 /* FALLTHRU */
10350 case TRUTH_AND_EXPR:
10351 /* If either arg is constant true, drop it. */
10352 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10353 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10354 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10355 /* Preserve sequence points. */
10356 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10357 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10358 /* If second arg is constant zero, result is zero, but first arg
10359 must be evaluated. */
10360 if (integer_zerop (arg1))
10361 return omit_one_operand_loc (loc, type, arg1, arg0);
10362 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10363 case will be handled here. */
10364 if (integer_zerop (arg0))
10365 return omit_one_operand_loc (loc, type, arg0, arg1);
10366
10367 /* !X && X is always false. */
10368 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10369 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10370 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10371 /* X && !X is always false. */
10372 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10373 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10374 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10375
10376 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10377 means A >= Y && A != MAX, but in this case we know that
10378 A < X <= MAX. */
10379
10380 if (!TREE_SIDE_EFFECTS (arg0)
10381 && !TREE_SIDE_EFFECTS (arg1))
10382 {
10383 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10384 if (tem && !operand_equal_p (tem, arg0, 0))
10385 return fold_build2_loc (loc, code, type, tem, arg1);
10386
10387 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10388 if (tem && !operand_equal_p (tem, arg1, 0))
10389 return fold_build2_loc (loc, code, type, arg0, tem);
10390 }
10391
10392 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10393 != NULL_TREE)
10394 return tem;
10395
10396 return NULL_TREE;
10397
10398 case TRUTH_ORIF_EXPR:
10399 /* Note that the operands of this must be ints
10400 and their values must be 0 or true.
10401 ("true" is a fixed value perhaps depending on the language.) */
10402 /* If first arg is constant true, return it. */
10403 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10404 return fold_convert_loc (loc, type, arg0);
10405 /* FALLTHRU */
10406 case TRUTH_OR_EXPR:
10407 /* If either arg is constant zero, drop it. */
10408 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10409 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10410 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10411 /* Preserve sequence points. */
10412 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10414 /* If second arg is constant true, result is true, but we must
10415 evaluate first arg. */
10416 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10417 return omit_one_operand_loc (loc, type, arg1, arg0);
10418 /* Likewise for first arg, but note this only occurs here for
10419 TRUTH_OR_EXPR. */
10420 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10421 return omit_one_operand_loc (loc, type, arg0, arg1);
10422
10423 /* !X || X is always true. */
10424 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10425 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10426 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10427 /* X || !X is always true. */
10428 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10430 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10431
10432 /* (X && !Y) || (!X && Y) is X ^ Y */
10433 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10434 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10435 {
10436 tree a0, a1, l0, l1, n0, n1;
10437
10438 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10439 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10440
10441 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10442 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10443
10444 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10445 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10446
10447 if ((operand_equal_p (n0, a0, 0)
10448 && operand_equal_p (n1, a1, 0))
10449 || (operand_equal_p (n0, a1, 0)
10450 && operand_equal_p (n1, a0, 0)))
10451 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10452 }
10453
10454 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10455 != NULL_TREE)
10456 return tem;
10457
10458 return NULL_TREE;
10459
10460 case TRUTH_XOR_EXPR:
10461 /* If the second arg is constant zero, drop it. */
10462 if (integer_zerop (arg1))
10463 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10464 /* If the second arg is constant true, this is a logical inversion. */
10465 if (integer_onep (arg1))
10466 {
10467 tem = invert_truthvalue_loc (loc, arg0);
10468 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10469 }
10470 /* Identical arguments cancel to zero. */
10471 if (operand_equal_p (arg0, arg1, 0))
10472 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10473
10474 /* !X ^ X is always true. */
10475 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10476 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10477 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10478
10479 /* X ^ !X is always true. */
10480 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10481 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10482 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10483
10484 return NULL_TREE;
10485
10486 case EQ_EXPR:
10487 case NE_EXPR:
10488 STRIP_NOPS (arg0);
10489 STRIP_NOPS (arg1);
10490
10491 tem = fold_comparison (loc, code, type, op0, op1);
10492 if (tem != NULL_TREE)
10493 return tem;
10494
10495 /* bool_var != 1 becomes !bool_var. */
10496 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10497 && code == NE_EXPR)
10498 return fold_convert_loc (loc, type,
10499 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10500 TREE_TYPE (arg0), arg0));
10501
10502 /* bool_var == 0 becomes !bool_var. */
10503 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10504 && code == EQ_EXPR)
10505 return fold_convert_loc (loc, type,
10506 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10507 TREE_TYPE (arg0), arg0));
10508
10509 /* !exp != 0 becomes !exp */
10510 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10511 && code == NE_EXPR)
10512 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10513
10514 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10515 if ((TREE_CODE (arg0) == PLUS_EXPR
10516 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10517 || TREE_CODE (arg0) == MINUS_EXPR)
10518 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10519 0)),
10520 arg1, 0)
10521 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10522 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10523 {
10524 tree val = TREE_OPERAND (arg0, 1);
10525 val = fold_build2_loc (loc, code, type, val,
10526 build_int_cst (TREE_TYPE (val), 0));
10527 return omit_two_operands_loc (loc, type, val,
10528 TREE_OPERAND (arg0, 0), arg1);
10529 }
10530
10531 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10532 if ((TREE_CODE (arg1) == PLUS_EXPR
10533 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10534 || TREE_CODE (arg1) == MINUS_EXPR)
10535 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10536 0)),
10537 arg0, 0)
10538 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10539 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10540 {
10541 tree val = TREE_OPERAND (arg1, 1);
10542 val = fold_build2_loc (loc, code, type, val,
10543 build_int_cst (TREE_TYPE (val), 0));
10544 return omit_two_operands_loc (loc, type, val,
10545 TREE_OPERAND (arg1, 0), arg0);
10546 }
10547
10548 /* If this is an EQ or NE comparison with zero and ARG0 is
10549 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10550 two operations, but the latter can be done in one less insn
10551 on machines that have only two-operand insns or on which a
10552 constant cannot be the first operand. */
10553 if (TREE_CODE (arg0) == BIT_AND_EXPR
10554 && integer_zerop (arg1))
10555 {
10556 tree arg00 = TREE_OPERAND (arg0, 0);
10557 tree arg01 = TREE_OPERAND (arg0, 1);
10558 if (TREE_CODE (arg00) == LSHIFT_EXPR
10559 && integer_onep (TREE_OPERAND (arg00, 0)))
10560 {
10561 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10562 arg01, TREE_OPERAND (arg00, 1));
10563 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10564 build_int_cst (TREE_TYPE (arg0), 1));
10565 return fold_build2_loc (loc, code, type,
10566 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10567 arg1);
10568 }
10569 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10570 && integer_onep (TREE_OPERAND (arg01, 0)))
10571 {
10572 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10573 arg00, TREE_OPERAND (arg01, 1));
10574 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10575 build_int_cst (TREE_TYPE (arg0), 1));
10576 return fold_build2_loc (loc, code, type,
10577 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10578 arg1);
10579 }
10580 }
10581
10582 /* If this is an NE or EQ comparison of zero against the result of a
10583 signed MOD operation whose second operand is a power of 2, make
10584 the MOD operation unsigned since it is simpler and equivalent. */
10585 if (integer_zerop (arg1)
10586 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10587 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10588 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10589 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10590 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10591 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10592 {
10593 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10594 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10595 fold_convert_loc (loc, newtype,
10596 TREE_OPERAND (arg0, 0)),
10597 fold_convert_loc (loc, newtype,
10598 TREE_OPERAND (arg0, 1)));
10599
10600 return fold_build2_loc (loc, code, type, newmod,
10601 fold_convert_loc (loc, newtype, arg1));
10602 }
10603
10604 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10605 C1 is a valid shift constant, and C2 is a power of two, i.e.
10606 a single bit. */
10607 if (TREE_CODE (arg0) == BIT_AND_EXPR
10608 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10609 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10610 == INTEGER_CST
10611 && integer_pow2p (TREE_OPERAND (arg0, 1))
10612 && integer_zerop (arg1))
10613 {
10614 tree itype = TREE_TYPE (arg0);
10615 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10616 prec = TYPE_PRECISION (itype);
10617
10618 /* Check for a valid shift count. */
10619 if (wi::ltu_p (arg001, prec))
10620 {
10621 tree arg01 = TREE_OPERAND (arg0, 1);
10622 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10623 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10624 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10625 can be rewritten as (X & (C2 << C1)) != 0. */
10626 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10627 {
10628 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10629 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10630 return fold_build2_loc (loc, code, type, tem,
10631 fold_convert_loc (loc, itype, arg1));
10632 }
10633 /* Otherwise, for signed (arithmetic) shifts,
10634 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10635 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10636 else if (!TYPE_UNSIGNED (itype))
10637 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10638 arg000, build_int_cst (itype, 0));
10639 /* Otherwise, of unsigned (logical) shifts,
10640 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10641 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10642 else
10643 return omit_one_operand_loc (loc, type,
10644 code == EQ_EXPR ? integer_one_node
10645 : integer_zero_node,
10646 arg000);
10647 }
10648 }
10649
10650 /* If this is a comparison of a field, we may be able to simplify it. */
10651 if ((TREE_CODE (arg0) == COMPONENT_REF
10652 || TREE_CODE (arg0) == BIT_FIELD_REF)
10653 /* Handle the constant case even without -O
10654 to make sure the warnings are given. */
10655 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10656 {
10657 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10658 if (t1)
10659 return t1;
10660 }
10661
10662 /* Optimize comparisons of strlen vs zero to a compare of the
10663 first character of the string vs zero. To wit,
10664 strlen(ptr) == 0 => *ptr == 0
10665 strlen(ptr) != 0 => *ptr != 0
10666 Other cases should reduce to one of these two (or a constant)
10667 due to the return value of strlen being unsigned. */
10668 if (TREE_CODE (arg0) == CALL_EXPR
10669 && integer_zerop (arg1))
10670 {
10671 tree fndecl = get_callee_fndecl (arg0);
10672
10673 if (fndecl
10674 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10675 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10676 && call_expr_nargs (arg0) == 1
10677 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10678 {
10679 tree iref = build_fold_indirect_ref_loc (loc,
10680 CALL_EXPR_ARG (arg0, 0));
10681 return fold_build2_loc (loc, code, type, iref,
10682 build_int_cst (TREE_TYPE (iref), 0));
10683 }
10684 }
10685
10686 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10687 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10688 if (TREE_CODE (arg0) == RSHIFT_EXPR
10689 && integer_zerop (arg1)
10690 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10691 {
10692 tree arg00 = TREE_OPERAND (arg0, 0);
10693 tree arg01 = TREE_OPERAND (arg0, 1);
10694 tree itype = TREE_TYPE (arg00);
10695 if (wi::eq_p (arg01, element_precision (itype) - 1))
10696 {
10697 if (TYPE_UNSIGNED (itype))
10698 {
10699 itype = signed_type_for (itype);
10700 arg00 = fold_convert_loc (loc, itype, arg00);
10701 }
10702 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10703 type, arg00, build_zero_cst (itype));
10704 }
10705 }
10706
10707 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10708 (X & C) == 0 when C is a single bit. */
10709 if (TREE_CODE (arg0) == BIT_AND_EXPR
10710 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10711 && integer_zerop (arg1)
10712 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10713 {
10714 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10715 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10716 TREE_OPERAND (arg0, 1));
10717 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10718 type, tem,
10719 fold_convert_loc (loc, TREE_TYPE (arg0),
10720 arg1));
10721 }
10722
10723 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10724 constant C is a power of two, i.e. a single bit. */
10725 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10727 && integer_zerop (arg1)
10728 && integer_pow2p (TREE_OPERAND (arg0, 1))
10729 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10730 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10731 {
10732 tree arg00 = TREE_OPERAND (arg0, 0);
10733 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10734 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10735 }
10736
10737 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10738 when is C is a power of two, i.e. a single bit. */
10739 if (TREE_CODE (arg0) == BIT_AND_EXPR
10740 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10741 && integer_zerop (arg1)
10742 && integer_pow2p (TREE_OPERAND (arg0, 1))
10743 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10744 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10745 {
10746 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10747 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10748 arg000, TREE_OPERAND (arg0, 1));
10749 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10750 tem, build_int_cst (TREE_TYPE (tem), 0));
10751 }
10752
10753 if (integer_zerop (arg1)
10754 && tree_expr_nonzero_p (arg0))
10755 {
10756 tree res = constant_boolean_node (code==NE_EXPR, type);
10757 return omit_one_operand_loc (loc, type, res, arg0);
10758 }
10759
10760 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10761 if (TREE_CODE (arg0) == BIT_AND_EXPR
10762 && TREE_CODE (arg1) == BIT_AND_EXPR)
10763 {
10764 tree arg00 = TREE_OPERAND (arg0, 0);
10765 tree arg01 = TREE_OPERAND (arg0, 1);
10766 tree arg10 = TREE_OPERAND (arg1, 0);
10767 tree arg11 = TREE_OPERAND (arg1, 1);
10768 tree itype = TREE_TYPE (arg0);
10769
10770 if (operand_equal_p (arg01, arg11, 0))
10771 {
10772 tem = fold_convert_loc (loc, itype, arg10);
10773 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10774 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10775 return fold_build2_loc (loc, code, type, tem,
10776 build_zero_cst (itype));
10777 }
10778 if (operand_equal_p (arg01, arg10, 0))
10779 {
10780 tem = fold_convert_loc (loc, itype, arg11);
10781 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10782 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10783 return fold_build2_loc (loc, code, type, tem,
10784 build_zero_cst (itype));
10785 }
10786 if (operand_equal_p (arg00, arg11, 0))
10787 {
10788 tem = fold_convert_loc (loc, itype, arg10);
10789 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10790 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10791 return fold_build2_loc (loc, code, type, tem,
10792 build_zero_cst (itype));
10793 }
10794 if (operand_equal_p (arg00, arg10, 0))
10795 {
10796 tem = fold_convert_loc (loc, itype, arg11);
10797 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10798 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10799 return fold_build2_loc (loc, code, type, tem,
10800 build_zero_cst (itype));
10801 }
10802 }
10803
10804 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10805 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10806 {
10807 tree arg00 = TREE_OPERAND (arg0, 0);
10808 tree arg01 = TREE_OPERAND (arg0, 1);
10809 tree arg10 = TREE_OPERAND (arg1, 0);
10810 tree arg11 = TREE_OPERAND (arg1, 1);
10811 tree itype = TREE_TYPE (arg0);
10812
10813 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10814 operand_equal_p guarantees no side-effects so we don't need
10815 to use omit_one_operand on Z. */
10816 if (operand_equal_p (arg01, arg11, 0))
10817 return fold_build2_loc (loc, code, type, arg00,
10818 fold_convert_loc (loc, TREE_TYPE (arg00),
10819 arg10));
10820 if (operand_equal_p (arg01, arg10, 0))
10821 return fold_build2_loc (loc, code, type, arg00,
10822 fold_convert_loc (loc, TREE_TYPE (arg00),
10823 arg11));
10824 if (operand_equal_p (arg00, arg11, 0))
10825 return fold_build2_loc (loc, code, type, arg01,
10826 fold_convert_loc (loc, TREE_TYPE (arg01),
10827 arg10));
10828 if (operand_equal_p (arg00, arg10, 0))
10829 return fold_build2_loc (loc, code, type, arg01,
10830 fold_convert_loc (loc, TREE_TYPE (arg01),
10831 arg11));
10832
10833 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10834 if (TREE_CODE (arg01) == INTEGER_CST
10835 && TREE_CODE (arg11) == INTEGER_CST)
10836 {
10837 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10838 fold_convert_loc (loc, itype, arg11));
10839 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10840 return fold_build2_loc (loc, code, type, tem,
10841 fold_convert_loc (loc, itype, arg10));
10842 }
10843 }
10844
10845 /* Attempt to simplify equality/inequality comparisons of complex
10846 values. Only lower the comparison if the result is known or
10847 can be simplified to a single scalar comparison. */
10848 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10849 || TREE_CODE (arg0) == COMPLEX_CST)
10850 && (TREE_CODE (arg1) == COMPLEX_EXPR
10851 || TREE_CODE (arg1) == COMPLEX_CST))
10852 {
10853 tree real0, imag0, real1, imag1;
10854 tree rcond, icond;
10855
10856 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10857 {
10858 real0 = TREE_OPERAND (arg0, 0);
10859 imag0 = TREE_OPERAND (arg0, 1);
10860 }
10861 else
10862 {
10863 real0 = TREE_REALPART (arg0);
10864 imag0 = TREE_IMAGPART (arg0);
10865 }
10866
10867 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10868 {
10869 real1 = TREE_OPERAND (arg1, 0);
10870 imag1 = TREE_OPERAND (arg1, 1);
10871 }
10872 else
10873 {
10874 real1 = TREE_REALPART (arg1);
10875 imag1 = TREE_IMAGPART (arg1);
10876 }
10877
10878 rcond = fold_binary_loc (loc, code, type, real0, real1);
10879 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10880 {
10881 if (integer_zerop (rcond))
10882 {
10883 if (code == EQ_EXPR)
10884 return omit_two_operands_loc (loc, type, boolean_false_node,
10885 imag0, imag1);
10886 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10887 }
10888 else
10889 {
10890 if (code == NE_EXPR)
10891 return omit_two_operands_loc (loc, type, boolean_true_node,
10892 imag0, imag1);
10893 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10894 }
10895 }
10896
10897 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10898 if (icond && TREE_CODE (icond) == INTEGER_CST)
10899 {
10900 if (integer_zerop (icond))
10901 {
10902 if (code == EQ_EXPR)
10903 return omit_two_operands_loc (loc, type, boolean_false_node,
10904 real0, real1);
10905 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10906 }
10907 else
10908 {
10909 if (code == NE_EXPR)
10910 return omit_two_operands_loc (loc, type, boolean_true_node,
10911 real0, real1);
10912 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10913 }
10914 }
10915 }
10916
10917 return NULL_TREE;
10918
10919 case LT_EXPR:
10920 case GT_EXPR:
10921 case LE_EXPR:
10922 case GE_EXPR:
10923 tem = fold_comparison (loc, code, type, op0, op1);
10924 if (tem != NULL_TREE)
10925 return tem;
10926
10927 /* Transform comparisons of the form X +- C CMP X. */
10928 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10929 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10930 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10931 && !HONOR_SNANS (arg0))
10932 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10933 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10934 {
10935 tree arg01 = TREE_OPERAND (arg0, 1);
10936 enum tree_code code0 = TREE_CODE (arg0);
10937 int is_positive;
10938
10939 if (TREE_CODE (arg01) == REAL_CST)
10940 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10941 else
10942 is_positive = tree_int_cst_sgn (arg01);
10943
10944 /* (X - c) > X becomes false. */
10945 if (code == GT_EXPR
10946 && ((code0 == MINUS_EXPR && is_positive >= 0)
10947 || (code0 == PLUS_EXPR && is_positive <= 0)))
10948 {
10949 if (TREE_CODE (arg01) == INTEGER_CST
10950 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10951 fold_overflow_warning (("assuming signed overflow does not "
10952 "occur when assuming that (X - c) > X "
10953 "is always false"),
10954 WARN_STRICT_OVERFLOW_ALL);
10955 return constant_boolean_node (0, type);
10956 }
10957
10958 /* Likewise (X + c) < X becomes false. */
10959 if (code == LT_EXPR
10960 && ((code0 == PLUS_EXPR && is_positive >= 0)
10961 || (code0 == MINUS_EXPR && is_positive <= 0)))
10962 {
10963 if (TREE_CODE (arg01) == INTEGER_CST
10964 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10965 fold_overflow_warning (("assuming signed overflow does not "
10966 "occur when assuming that "
10967 "(X + c) < X is always false"),
10968 WARN_STRICT_OVERFLOW_ALL);
10969 return constant_boolean_node (0, type);
10970 }
10971
10972 /* Convert (X - c) <= X to true. */
10973 if (!HONOR_NANS (arg1)
10974 && code == LE_EXPR
10975 && ((code0 == MINUS_EXPR && is_positive >= 0)
10976 || (code0 == PLUS_EXPR && is_positive <= 0)))
10977 {
10978 if (TREE_CODE (arg01) == INTEGER_CST
10979 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10980 fold_overflow_warning (("assuming signed overflow does not "
10981 "occur when assuming that "
10982 "(X - c) <= X is always true"),
10983 WARN_STRICT_OVERFLOW_ALL);
10984 return constant_boolean_node (1, type);
10985 }
10986
10987 /* Convert (X + c) >= X to true. */
10988 if (!HONOR_NANS (arg1)
10989 && code == GE_EXPR
10990 && ((code0 == PLUS_EXPR && is_positive >= 0)
10991 || (code0 == MINUS_EXPR && is_positive <= 0)))
10992 {
10993 if (TREE_CODE (arg01) == INTEGER_CST
10994 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10995 fold_overflow_warning (("assuming signed overflow does not "
10996 "occur when assuming that "
10997 "(X + c) >= X is always true"),
10998 WARN_STRICT_OVERFLOW_ALL);
10999 return constant_boolean_node (1, type);
11000 }
11001
11002 if (TREE_CODE (arg01) == INTEGER_CST)
11003 {
11004 /* Convert X + c > X and X - c < X to true for integers. */
11005 if (code == GT_EXPR
11006 && ((code0 == PLUS_EXPR && is_positive > 0)
11007 || (code0 == MINUS_EXPR && is_positive < 0)))
11008 {
11009 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11010 fold_overflow_warning (("assuming signed overflow does "
11011 "not occur when assuming that "
11012 "(X + c) > X is always true"),
11013 WARN_STRICT_OVERFLOW_ALL);
11014 return constant_boolean_node (1, type);
11015 }
11016
11017 if (code == LT_EXPR
11018 && ((code0 == MINUS_EXPR && is_positive > 0)
11019 || (code0 == PLUS_EXPR && is_positive < 0)))
11020 {
11021 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11022 fold_overflow_warning (("assuming signed overflow does "
11023 "not occur when assuming that "
11024 "(X - c) < X is always true"),
11025 WARN_STRICT_OVERFLOW_ALL);
11026 return constant_boolean_node (1, type);
11027 }
11028
11029 /* Convert X + c <= X and X - c >= X to false for integers. */
11030 if (code == LE_EXPR
11031 && ((code0 == PLUS_EXPR && is_positive > 0)
11032 || (code0 == MINUS_EXPR && is_positive < 0)))
11033 {
11034 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11035 fold_overflow_warning (("assuming signed overflow does "
11036 "not occur when assuming that "
11037 "(X + c) <= X is always false"),
11038 WARN_STRICT_OVERFLOW_ALL);
11039 return constant_boolean_node (0, type);
11040 }
11041
11042 if (code == GE_EXPR
11043 && ((code0 == MINUS_EXPR && is_positive > 0)
11044 || (code0 == PLUS_EXPR && is_positive < 0)))
11045 {
11046 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11047 fold_overflow_warning (("assuming signed overflow does "
11048 "not occur when assuming that "
11049 "(X - c) >= X is always false"),
11050 WARN_STRICT_OVERFLOW_ALL);
11051 return constant_boolean_node (0, type);
11052 }
11053 }
11054 }
11055
11056 /* If we are comparing an ABS_EXPR with a constant, we can
11057 convert all the cases into explicit comparisons, but they may
11058 well not be faster than doing the ABS and one comparison.
11059 But ABS (X) <= C is a range comparison, which becomes a subtraction
11060 and a comparison, and is probably faster. */
11061 if (code == LE_EXPR
11062 && TREE_CODE (arg1) == INTEGER_CST
11063 && TREE_CODE (arg0) == ABS_EXPR
11064 && ! TREE_SIDE_EFFECTS (arg0)
11065 && (0 != (tem = negate_expr (arg1)))
11066 && TREE_CODE (tem) == INTEGER_CST
11067 && !TREE_OVERFLOW (tem))
11068 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11069 build2 (GE_EXPR, type,
11070 TREE_OPERAND (arg0, 0), tem),
11071 build2 (LE_EXPR, type,
11072 TREE_OPERAND (arg0, 0), arg1));
11073
11074 /* Convert ABS_EXPR<x> >= 0 to true. */
11075 strict_overflow_p = false;
11076 if (code == GE_EXPR
11077 && (integer_zerop (arg1)
11078 || (! HONOR_NANS (arg0)
11079 && real_zerop (arg1)))
11080 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11081 {
11082 if (strict_overflow_p)
11083 fold_overflow_warning (("assuming signed overflow does not occur "
11084 "when simplifying comparison of "
11085 "absolute value and zero"),
11086 WARN_STRICT_OVERFLOW_CONDITIONAL);
11087 return omit_one_operand_loc (loc, type,
11088 constant_boolean_node (true, type),
11089 arg0);
11090 }
11091
11092 /* Convert ABS_EXPR<x> < 0 to false. */
11093 strict_overflow_p = false;
11094 if (code == LT_EXPR
11095 && (integer_zerop (arg1) || real_zerop (arg1))
11096 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11097 {
11098 if (strict_overflow_p)
11099 fold_overflow_warning (("assuming signed overflow does not occur "
11100 "when simplifying comparison of "
11101 "absolute value and zero"),
11102 WARN_STRICT_OVERFLOW_CONDITIONAL);
11103 return omit_one_operand_loc (loc, type,
11104 constant_boolean_node (false, type),
11105 arg0);
11106 }
11107
11108 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11109 and similarly for >= into !=. */
11110 if ((code == LT_EXPR || code == GE_EXPR)
11111 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11112 && TREE_CODE (arg1) == LSHIFT_EXPR
11113 && integer_onep (TREE_OPERAND (arg1, 0)))
11114 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11115 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11116 TREE_OPERAND (arg1, 1)),
11117 build_zero_cst (TREE_TYPE (arg0)));
11118
11119 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11120 otherwise Y might be >= # of bits in X's type and thus e.g.
11121 (unsigned char) (1 << Y) for Y 15 might be 0.
11122 If the cast is widening, then 1 << Y should have unsigned type,
11123 otherwise if Y is number of bits in the signed shift type minus 1,
11124 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11125 31 might be 0xffffffff80000000. */
11126 if ((code == LT_EXPR || code == GE_EXPR)
11127 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11128 && CONVERT_EXPR_P (arg1)
11129 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11130 && (element_precision (TREE_TYPE (arg1))
11131 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11132 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11133 || (element_precision (TREE_TYPE (arg1))
11134 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11135 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11136 {
11137 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11138 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11139 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11140 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11141 build_zero_cst (TREE_TYPE (arg0)));
11142 }
11143
11144 return NULL_TREE;
11145
11146 case UNORDERED_EXPR:
11147 case ORDERED_EXPR:
11148 case UNLT_EXPR:
11149 case UNLE_EXPR:
11150 case UNGT_EXPR:
11151 case UNGE_EXPR:
11152 case UNEQ_EXPR:
11153 case LTGT_EXPR:
11154 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11155 {
11156 tree targ0 = strip_float_extensions (arg0);
11157 tree targ1 = strip_float_extensions (arg1);
11158 tree newtype = TREE_TYPE (targ0);
11159
11160 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11161 newtype = TREE_TYPE (targ1);
11162
11163 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11164 return fold_build2_loc (loc, code, type,
11165 fold_convert_loc (loc, newtype, targ0),
11166 fold_convert_loc (loc, newtype, targ1));
11167 }
11168
11169 return NULL_TREE;
11170
11171 case COMPOUND_EXPR:
11172 /* When pedantic, a compound expression can be neither an lvalue
11173 nor an integer constant expression. */
11174 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11175 return NULL_TREE;
11176 /* Don't let (0, 0) be null pointer constant. */
11177 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11178 : fold_convert_loc (loc, type, arg1);
11179 return pedantic_non_lvalue_loc (loc, tem);
11180
11181 case ASSERT_EXPR:
11182 /* An ASSERT_EXPR should never be passed to fold_binary. */
11183 gcc_unreachable ();
11184
11185 default:
11186 return NULL_TREE;
11187 } /* switch (code) */
11188 }
11189
11190 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11191 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11192 of GOTO_EXPR. */
11193
11194 static tree
11195 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11196 {
11197 switch (TREE_CODE (*tp))
11198 {
11199 case LABEL_EXPR:
11200 return *tp;
11201
11202 case GOTO_EXPR:
11203 *walk_subtrees = 0;
11204
11205 /* fall through */
11206
11207 default:
11208 return NULL_TREE;
11209 }
11210 }
11211
11212 /* Return whether the sub-tree ST contains a label which is accessible from
11213 outside the sub-tree. */
11214
11215 static bool
11216 contains_label_p (tree st)
11217 {
11218 return
11219 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11220 }
11221
11222 /* Fold a ternary expression of code CODE and type TYPE with operands
11223 OP0, OP1, and OP2. Return the folded expression if folding is
11224 successful. Otherwise, return NULL_TREE. */
11225
11226 tree
11227 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11228 tree op0, tree op1, tree op2)
11229 {
11230 tree tem;
11231 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11232 enum tree_code_class kind = TREE_CODE_CLASS (code);
11233
11234 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11235 && TREE_CODE_LENGTH (code) == 3);
11236
11237 /* If this is a commutative operation, and OP0 is a constant, move it
11238 to OP1 to reduce the number of tests below. */
11239 if (commutative_ternary_tree_code (code)
11240 && tree_swap_operands_p (op0, op1))
11241 return fold_build3_loc (loc, code, type, op1, op0, op2);
11242
11243 tem = generic_simplify (loc, code, type, op0, op1, op2);
11244 if (tem)
11245 return tem;
11246
11247 /* Strip any conversions that don't change the mode. This is safe
11248 for every expression, except for a comparison expression because
11249 its signedness is derived from its operands. So, in the latter
11250 case, only strip conversions that don't change the signedness.
11251
11252 Note that this is done as an internal manipulation within the
11253 constant folder, in order to find the simplest representation of
11254 the arguments so that their form can be studied. In any cases,
11255 the appropriate type conversions should be put back in the tree
11256 that will get out of the constant folder. */
11257 if (op0)
11258 {
11259 arg0 = op0;
11260 STRIP_NOPS (arg0);
11261 }
11262
11263 if (op1)
11264 {
11265 arg1 = op1;
11266 STRIP_NOPS (arg1);
11267 }
11268
11269 if (op2)
11270 {
11271 arg2 = op2;
11272 STRIP_NOPS (arg2);
11273 }
11274
11275 switch (code)
11276 {
11277 case COMPONENT_REF:
11278 if (TREE_CODE (arg0) == CONSTRUCTOR
11279 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11280 {
11281 unsigned HOST_WIDE_INT idx;
11282 tree field, value;
11283 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11284 if (field == arg1)
11285 return value;
11286 }
11287 return NULL_TREE;
11288
11289 case COND_EXPR:
11290 case VEC_COND_EXPR:
11291 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11292 so all simple results must be passed through pedantic_non_lvalue. */
11293 if (TREE_CODE (arg0) == INTEGER_CST)
11294 {
11295 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11296 tem = integer_zerop (arg0) ? op2 : op1;
11297 /* Only optimize constant conditions when the selected branch
11298 has the same type as the COND_EXPR. This avoids optimizing
11299 away "c ? x : throw", where the throw has a void type.
11300 Avoid throwing away that operand which contains label. */
11301 if ((!TREE_SIDE_EFFECTS (unused_op)
11302 || !contains_label_p (unused_op))
11303 && (! VOID_TYPE_P (TREE_TYPE (tem))
11304 || VOID_TYPE_P (type)))
11305 return pedantic_non_lvalue_loc (loc, tem);
11306 return NULL_TREE;
11307 }
11308 else if (TREE_CODE (arg0) == VECTOR_CST)
11309 {
11310 if ((TREE_CODE (arg1) == VECTOR_CST
11311 || TREE_CODE (arg1) == CONSTRUCTOR)
11312 && (TREE_CODE (arg2) == VECTOR_CST
11313 || TREE_CODE (arg2) == CONSTRUCTOR))
11314 {
11315 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11316 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11317 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11318 for (i = 0; i < nelts; i++)
11319 {
11320 tree val = VECTOR_CST_ELT (arg0, i);
11321 if (integer_all_onesp (val))
11322 sel[i] = i;
11323 else if (integer_zerop (val))
11324 sel[i] = nelts + i;
11325 else /* Currently unreachable. */
11326 return NULL_TREE;
11327 }
11328 tree t = fold_vec_perm (type, arg1, arg2, sel);
11329 if (t != NULL_TREE)
11330 return t;
11331 }
11332 }
11333
11334 /* If we have A op B ? A : C, we may be able to convert this to a
11335 simpler expression, depending on the operation and the values
11336 of B and C. Signed zeros prevent all of these transformations,
11337 for reasons given above each one.
11338
11339 Also try swapping the arguments and inverting the conditional. */
11340 if (COMPARISON_CLASS_P (arg0)
11341 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11342 arg1, TREE_OPERAND (arg0, 1))
11343 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11344 {
11345 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11346 if (tem)
11347 return tem;
11348 }
11349
11350 if (COMPARISON_CLASS_P (arg0)
11351 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11352 op2,
11353 TREE_OPERAND (arg0, 1))
11354 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11355 {
11356 location_t loc0 = expr_location_or (arg0, loc);
11357 tem = fold_invert_truthvalue (loc0, arg0);
11358 if (tem && COMPARISON_CLASS_P (tem))
11359 {
11360 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11361 if (tem)
11362 return tem;
11363 }
11364 }
11365
11366 /* If the second operand is simpler than the third, swap them
11367 since that produces better jump optimization results. */
11368 if (truth_value_p (TREE_CODE (arg0))
11369 && tree_swap_operands_p (op1, op2))
11370 {
11371 location_t loc0 = expr_location_or (arg0, loc);
11372 /* See if this can be inverted. If it can't, possibly because
11373 it was a floating-point inequality comparison, don't do
11374 anything. */
11375 tem = fold_invert_truthvalue (loc0, arg0);
11376 if (tem)
11377 return fold_build3_loc (loc, code, type, tem, op2, op1);
11378 }
11379
11380 /* Convert A ? 1 : 0 to simply A. */
11381 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11382 : (integer_onep (op1)
11383 && !VECTOR_TYPE_P (type)))
11384 && integer_zerop (op2)
11385 /* If we try to convert OP0 to our type, the
11386 call to fold will try to move the conversion inside
11387 a COND, which will recurse. In that case, the COND_EXPR
11388 is probably the best choice, so leave it alone. */
11389 && type == TREE_TYPE (arg0))
11390 return pedantic_non_lvalue_loc (loc, arg0);
11391
11392 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11393 over COND_EXPR in cases such as floating point comparisons. */
11394 if (integer_zerop (op1)
11395 && code == COND_EXPR
11396 && integer_onep (op2)
11397 && !VECTOR_TYPE_P (type)
11398 && truth_value_p (TREE_CODE (arg0)))
11399 return pedantic_non_lvalue_loc (loc,
11400 fold_convert_loc (loc, type,
11401 invert_truthvalue_loc (loc,
11402 arg0)));
11403
11404 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11405 if (TREE_CODE (arg0) == LT_EXPR
11406 && integer_zerop (TREE_OPERAND (arg0, 1))
11407 && integer_zerop (op2)
11408 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11409 {
11410 /* sign_bit_p looks through both zero and sign extensions,
11411 but for this optimization only sign extensions are
11412 usable. */
11413 tree tem2 = TREE_OPERAND (arg0, 0);
11414 while (tem != tem2)
11415 {
11416 if (TREE_CODE (tem2) != NOP_EXPR
11417 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11418 {
11419 tem = NULL_TREE;
11420 break;
11421 }
11422 tem2 = TREE_OPERAND (tem2, 0);
11423 }
11424 /* sign_bit_p only checks ARG1 bits within A's precision.
11425 If <sign bit of A> has wider type than A, bits outside
11426 of A's precision in <sign bit of A> need to be checked.
11427 If they are all 0, this optimization needs to be done
11428 in unsigned A's type, if they are all 1 in signed A's type,
11429 otherwise this can't be done. */
11430 if (tem
11431 && TYPE_PRECISION (TREE_TYPE (tem))
11432 < TYPE_PRECISION (TREE_TYPE (arg1))
11433 && TYPE_PRECISION (TREE_TYPE (tem))
11434 < TYPE_PRECISION (type))
11435 {
11436 int inner_width, outer_width;
11437 tree tem_type;
11438
11439 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11440 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11441 if (outer_width > TYPE_PRECISION (type))
11442 outer_width = TYPE_PRECISION (type);
11443
11444 wide_int mask = wi::shifted_mask
11445 (inner_width, outer_width - inner_width, false,
11446 TYPE_PRECISION (TREE_TYPE (arg1)));
11447
11448 wide_int common = mask & arg1;
11449 if (common == mask)
11450 {
11451 tem_type = signed_type_for (TREE_TYPE (tem));
11452 tem = fold_convert_loc (loc, tem_type, tem);
11453 }
11454 else if (common == 0)
11455 {
11456 tem_type = unsigned_type_for (TREE_TYPE (tem));
11457 tem = fold_convert_loc (loc, tem_type, tem);
11458 }
11459 else
11460 tem = NULL;
11461 }
11462
11463 if (tem)
11464 return
11465 fold_convert_loc (loc, type,
11466 fold_build2_loc (loc, BIT_AND_EXPR,
11467 TREE_TYPE (tem), tem,
11468 fold_convert_loc (loc,
11469 TREE_TYPE (tem),
11470 arg1)));
11471 }
11472
11473 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11474 already handled above. */
11475 if (TREE_CODE (arg0) == BIT_AND_EXPR
11476 && integer_onep (TREE_OPERAND (arg0, 1))
11477 && integer_zerop (op2)
11478 && integer_pow2p (arg1))
11479 {
11480 tree tem = TREE_OPERAND (arg0, 0);
11481 STRIP_NOPS (tem);
11482 if (TREE_CODE (tem) == RSHIFT_EXPR
11483 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11484 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11485 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11486 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11487 fold_convert_loc (loc, type,
11488 TREE_OPERAND (tem, 0)),
11489 op1);
11490 }
11491
11492 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11493 is probably obsolete because the first operand should be a
11494 truth value (that's why we have the two cases above), but let's
11495 leave it in until we can confirm this for all front-ends. */
11496 if (integer_zerop (op2)
11497 && TREE_CODE (arg0) == NE_EXPR
11498 && integer_zerop (TREE_OPERAND (arg0, 1))
11499 && integer_pow2p (arg1)
11500 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11501 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11502 arg1, OEP_ONLY_CONST))
11503 return pedantic_non_lvalue_loc (loc,
11504 fold_convert_loc (loc, type,
11505 TREE_OPERAND (arg0, 0)));
11506
11507 /* Disable the transformations below for vectors, since
11508 fold_binary_op_with_conditional_arg may undo them immediately,
11509 yielding an infinite loop. */
11510 if (code == VEC_COND_EXPR)
11511 return NULL_TREE;
11512
11513 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11514 if (integer_zerop (op2)
11515 && truth_value_p (TREE_CODE (arg0))
11516 && truth_value_p (TREE_CODE (arg1))
11517 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11518 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11519 : TRUTH_ANDIF_EXPR,
11520 type, fold_convert_loc (loc, type, arg0), op1);
11521
11522 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11523 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11524 && truth_value_p (TREE_CODE (arg0))
11525 && truth_value_p (TREE_CODE (arg1))
11526 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11527 {
11528 location_t loc0 = expr_location_or (arg0, loc);
11529 /* Only perform transformation if ARG0 is easily inverted. */
11530 tem = fold_invert_truthvalue (loc0, arg0);
11531 if (tem)
11532 return fold_build2_loc (loc, code == VEC_COND_EXPR
11533 ? BIT_IOR_EXPR
11534 : TRUTH_ORIF_EXPR,
11535 type, fold_convert_loc (loc, type, tem),
11536 op1);
11537 }
11538
11539 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11540 if (integer_zerop (arg1)
11541 && truth_value_p (TREE_CODE (arg0))
11542 && truth_value_p (TREE_CODE (op2))
11543 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11544 {
11545 location_t loc0 = expr_location_or (arg0, loc);
11546 /* Only perform transformation if ARG0 is easily inverted. */
11547 tem = fold_invert_truthvalue (loc0, arg0);
11548 if (tem)
11549 return fold_build2_loc (loc, code == VEC_COND_EXPR
11550 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11551 type, fold_convert_loc (loc, type, tem),
11552 op2);
11553 }
11554
11555 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11556 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11557 && truth_value_p (TREE_CODE (arg0))
11558 && truth_value_p (TREE_CODE (op2))
11559 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11560 return fold_build2_loc (loc, code == VEC_COND_EXPR
11561 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11562 type, fold_convert_loc (loc, type, arg0), op2);
11563
11564 return NULL_TREE;
11565
11566 case CALL_EXPR:
11567 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11568 of fold_ternary on them. */
11569 gcc_unreachable ();
11570
11571 case BIT_FIELD_REF:
11572 if (TREE_CODE (arg0) == VECTOR_CST
11573 && (type == TREE_TYPE (TREE_TYPE (arg0))
11574 || (TREE_CODE (type) == VECTOR_TYPE
11575 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11576 {
11577 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11578 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11579 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11580 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11581
11582 if (n != 0
11583 && (idx % width) == 0
11584 && (n % width) == 0
11585 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11586 {
11587 idx = idx / width;
11588 n = n / width;
11589
11590 if (TREE_CODE (arg0) == VECTOR_CST)
11591 {
11592 if (n == 1)
11593 return VECTOR_CST_ELT (arg0, idx);
11594
11595 tree *vals = XALLOCAVEC (tree, n);
11596 for (unsigned i = 0; i < n; ++i)
11597 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11598 return build_vector (type, vals);
11599 }
11600 }
11601 }
11602
11603 /* On constants we can use native encode/interpret to constant
11604 fold (nearly) all BIT_FIELD_REFs. */
11605 if (CONSTANT_CLASS_P (arg0)
11606 && can_native_interpret_type_p (type)
11607 && BITS_PER_UNIT == 8)
11608 {
11609 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11610 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11611 /* Limit us to a reasonable amount of work. To relax the
11612 other limitations we need bit-shifting of the buffer
11613 and rounding up the size. */
11614 if (bitpos % BITS_PER_UNIT == 0
11615 && bitsize % BITS_PER_UNIT == 0
11616 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11617 {
11618 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11619 unsigned HOST_WIDE_INT len
11620 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11621 bitpos / BITS_PER_UNIT);
11622 if (len > 0
11623 && len * BITS_PER_UNIT >= bitsize)
11624 {
11625 tree v = native_interpret_expr (type, b,
11626 bitsize / BITS_PER_UNIT);
11627 if (v)
11628 return v;
11629 }
11630 }
11631 }
11632
11633 return NULL_TREE;
11634
11635 case FMA_EXPR:
11636 /* For integers we can decompose the FMA if possible. */
11637 if (TREE_CODE (arg0) == INTEGER_CST
11638 && TREE_CODE (arg1) == INTEGER_CST)
11639 return fold_build2_loc (loc, PLUS_EXPR, type,
11640 const_binop (MULT_EXPR, arg0, arg1), arg2);
11641 if (integer_zerop (arg2))
11642 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11643
11644 return fold_fma (loc, type, arg0, arg1, arg2);
11645
11646 case VEC_PERM_EXPR:
11647 if (TREE_CODE (arg2) == VECTOR_CST)
11648 {
11649 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11650 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11651 unsigned char *sel2 = sel + nelts;
11652 bool need_mask_canon = false;
11653 bool need_mask_canon2 = false;
11654 bool all_in_vec0 = true;
11655 bool all_in_vec1 = true;
11656 bool maybe_identity = true;
11657 bool single_arg = (op0 == op1);
11658 bool changed = false;
11659
11660 mask2 = 2 * nelts - 1;
11661 mask = single_arg ? (nelts - 1) : mask2;
11662 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11663 for (i = 0; i < nelts; i++)
11664 {
11665 tree val = VECTOR_CST_ELT (arg2, i);
11666 if (TREE_CODE (val) != INTEGER_CST)
11667 return NULL_TREE;
11668
11669 /* Make sure that the perm value is in an acceptable
11670 range. */
11671 wide_int t = val;
11672 need_mask_canon |= wi::gtu_p (t, mask);
11673 need_mask_canon2 |= wi::gtu_p (t, mask2);
11674 sel[i] = t.to_uhwi () & mask;
11675 sel2[i] = t.to_uhwi () & mask2;
11676
11677 if (sel[i] < nelts)
11678 all_in_vec1 = false;
11679 else
11680 all_in_vec0 = false;
11681
11682 if ((sel[i] & (nelts-1)) != i)
11683 maybe_identity = false;
11684 }
11685
11686 if (maybe_identity)
11687 {
11688 if (all_in_vec0)
11689 return op0;
11690 if (all_in_vec1)
11691 return op1;
11692 }
11693
11694 if (all_in_vec0)
11695 op1 = op0;
11696 else if (all_in_vec1)
11697 {
11698 op0 = op1;
11699 for (i = 0; i < nelts; i++)
11700 sel[i] -= nelts;
11701 need_mask_canon = true;
11702 }
11703
11704 if ((TREE_CODE (op0) == VECTOR_CST
11705 || TREE_CODE (op0) == CONSTRUCTOR)
11706 && (TREE_CODE (op1) == VECTOR_CST
11707 || TREE_CODE (op1) == CONSTRUCTOR))
11708 {
11709 tree t = fold_vec_perm (type, op0, op1, sel);
11710 if (t != NULL_TREE)
11711 return t;
11712 }
11713
11714 if (op0 == op1 && !single_arg)
11715 changed = true;
11716
11717 /* Some targets are deficient and fail to expand a single
11718 argument permutation while still allowing an equivalent
11719 2-argument version. */
11720 if (need_mask_canon && arg2 == op2
11721 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11722 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11723 {
11724 need_mask_canon = need_mask_canon2;
11725 sel = sel2;
11726 }
11727
11728 if (need_mask_canon && arg2 == op2)
11729 {
11730 tree *tsel = XALLOCAVEC (tree, nelts);
11731 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11732 for (i = 0; i < nelts; i++)
11733 tsel[i] = build_int_cst (eltype, sel[i]);
11734 op2 = build_vector (TREE_TYPE (arg2), tsel);
11735 changed = true;
11736 }
11737
11738 if (changed)
11739 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11740 }
11741 return NULL_TREE;
11742
11743 case BIT_INSERT_EXPR:
11744 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11745 if (TREE_CODE (arg0) == INTEGER_CST
11746 && TREE_CODE (arg1) == INTEGER_CST)
11747 {
11748 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11749 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11750 wide_int tem = wi::bit_and (arg0,
11751 wi::shifted_mask (bitpos, bitsize, true,
11752 TYPE_PRECISION (type)));
11753 wide_int tem2
11754 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11755 bitsize), bitpos);
11756 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11757 }
11758 else if (TREE_CODE (arg0) == VECTOR_CST
11759 && CONSTANT_CLASS_P (arg1)
11760 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11761 TREE_TYPE (arg1)))
11762 {
11763 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11764 unsigned HOST_WIDE_INT elsize
11765 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11766 if (bitpos % elsize == 0)
11767 {
11768 unsigned k = bitpos / elsize;
11769 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11770 return arg0;
11771 else
11772 {
11773 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11774 memcpy (elts, VECTOR_CST_ELTS (arg0),
11775 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11776 elts[k] = arg1;
11777 return build_vector (type, elts);
11778 }
11779 }
11780 }
11781 return NULL_TREE;
11782
11783 default:
11784 return NULL_TREE;
11785 } /* switch (code) */
11786 }
11787
11788 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11789 of an array (or vector). */
11790
11791 tree
11792 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11793 {
11794 tree index_type = NULL_TREE;
11795 offset_int low_bound = 0;
11796
11797 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11798 {
11799 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11800 if (domain_type && TYPE_MIN_VALUE (domain_type))
11801 {
11802 /* Static constructors for variably sized objects makes no sense. */
11803 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11804 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11805 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11806 }
11807 }
11808
11809 if (index_type)
11810 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11811 TYPE_SIGN (index_type));
11812
11813 offset_int index = low_bound - 1;
11814 if (index_type)
11815 index = wi::ext (index, TYPE_PRECISION (index_type),
11816 TYPE_SIGN (index_type));
11817
11818 offset_int max_index;
11819 unsigned HOST_WIDE_INT cnt;
11820 tree cfield, cval;
11821
11822 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11823 {
11824 /* Array constructor might explicitly set index, or specify a range,
11825 or leave index NULL meaning that it is next index after previous
11826 one. */
11827 if (cfield)
11828 {
11829 if (TREE_CODE (cfield) == INTEGER_CST)
11830 max_index = index = wi::to_offset (cfield);
11831 else
11832 {
11833 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11834 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11835 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11836 }
11837 }
11838 else
11839 {
11840 index += 1;
11841 if (index_type)
11842 index = wi::ext (index, TYPE_PRECISION (index_type),
11843 TYPE_SIGN (index_type));
11844 max_index = index;
11845 }
11846
11847 /* Do we have match? */
11848 if (wi::cmpu (access_index, index) >= 0
11849 && wi::cmpu (access_index, max_index) <= 0)
11850 return cval;
11851 }
11852 return NULL_TREE;
11853 }
11854
11855 /* Perform constant folding and related simplification of EXPR.
11856 The related simplifications include x*1 => x, x*0 => 0, etc.,
11857 and application of the associative law.
11858 NOP_EXPR conversions may be removed freely (as long as we
11859 are careful not to change the type of the overall expression).
11860 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11861 but we can constant-fold them if they have constant operands. */
11862
11863 #ifdef ENABLE_FOLD_CHECKING
11864 # define fold(x) fold_1 (x)
11865 static tree fold_1 (tree);
11866 static
11867 #endif
11868 tree
11869 fold (tree expr)
11870 {
11871 const tree t = expr;
11872 enum tree_code code = TREE_CODE (t);
11873 enum tree_code_class kind = TREE_CODE_CLASS (code);
11874 tree tem;
11875 location_t loc = EXPR_LOCATION (expr);
11876
11877 /* Return right away if a constant. */
11878 if (kind == tcc_constant)
11879 return t;
11880
11881 /* CALL_EXPR-like objects with variable numbers of operands are
11882 treated specially. */
11883 if (kind == tcc_vl_exp)
11884 {
11885 if (code == CALL_EXPR)
11886 {
11887 tem = fold_call_expr (loc, expr, false);
11888 return tem ? tem : expr;
11889 }
11890 return expr;
11891 }
11892
11893 if (IS_EXPR_CODE_CLASS (kind))
11894 {
11895 tree type = TREE_TYPE (t);
11896 tree op0, op1, op2;
11897
11898 switch (TREE_CODE_LENGTH (code))
11899 {
11900 case 1:
11901 op0 = TREE_OPERAND (t, 0);
11902 tem = fold_unary_loc (loc, code, type, op0);
11903 return tem ? tem : expr;
11904 case 2:
11905 op0 = TREE_OPERAND (t, 0);
11906 op1 = TREE_OPERAND (t, 1);
11907 tem = fold_binary_loc (loc, code, type, op0, op1);
11908 return tem ? tem : expr;
11909 case 3:
11910 op0 = TREE_OPERAND (t, 0);
11911 op1 = TREE_OPERAND (t, 1);
11912 op2 = TREE_OPERAND (t, 2);
11913 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11914 return tem ? tem : expr;
11915 default:
11916 break;
11917 }
11918 }
11919
11920 switch (code)
11921 {
11922 case ARRAY_REF:
11923 {
11924 tree op0 = TREE_OPERAND (t, 0);
11925 tree op1 = TREE_OPERAND (t, 1);
11926
11927 if (TREE_CODE (op1) == INTEGER_CST
11928 && TREE_CODE (op0) == CONSTRUCTOR
11929 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11930 {
11931 tree val = get_array_ctor_element_at_index (op0,
11932 wi::to_offset (op1));
11933 if (val)
11934 return val;
11935 }
11936
11937 return t;
11938 }
11939
11940 /* Return a VECTOR_CST if possible. */
11941 case CONSTRUCTOR:
11942 {
11943 tree type = TREE_TYPE (t);
11944 if (TREE_CODE (type) != VECTOR_TYPE)
11945 return t;
11946
11947 unsigned i;
11948 tree val;
11949 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11950 if (! CONSTANT_CLASS_P (val))
11951 return t;
11952
11953 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11954 }
11955
11956 case CONST_DECL:
11957 return fold (DECL_INITIAL (t));
11958
11959 default:
11960 return t;
11961 } /* switch (code) */
11962 }
11963
11964 #ifdef ENABLE_FOLD_CHECKING
11965 #undef fold
11966
11967 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11968 hash_table<nofree_ptr_hash<const tree_node> > *);
11969 static void fold_check_failed (const_tree, const_tree);
11970 void print_fold_checksum (const_tree);
11971
11972 /* When --enable-checking=fold, compute a digest of expr before
11973 and after actual fold call to see if fold did not accidentally
11974 change original expr. */
11975
11976 tree
11977 fold (tree expr)
11978 {
11979 tree ret;
11980 struct md5_ctx ctx;
11981 unsigned char checksum_before[16], checksum_after[16];
11982 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11983
11984 md5_init_ctx (&ctx);
11985 fold_checksum_tree (expr, &ctx, &ht);
11986 md5_finish_ctx (&ctx, checksum_before);
11987 ht.empty ();
11988
11989 ret = fold_1 (expr);
11990
11991 md5_init_ctx (&ctx);
11992 fold_checksum_tree (expr, &ctx, &ht);
11993 md5_finish_ctx (&ctx, checksum_after);
11994
11995 if (memcmp (checksum_before, checksum_after, 16))
11996 fold_check_failed (expr, ret);
11997
11998 return ret;
11999 }
12000
12001 void
12002 print_fold_checksum (const_tree expr)
12003 {
12004 struct md5_ctx ctx;
12005 unsigned char checksum[16], cnt;
12006 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12007
12008 md5_init_ctx (&ctx);
12009 fold_checksum_tree (expr, &ctx, &ht);
12010 md5_finish_ctx (&ctx, checksum);
12011 for (cnt = 0; cnt < 16; ++cnt)
12012 fprintf (stderr, "%02x", checksum[cnt]);
12013 putc ('\n', stderr);
12014 }
12015
12016 static void
12017 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12018 {
12019 internal_error ("fold check: original tree changed by fold");
12020 }
12021
12022 static void
12023 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12024 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12025 {
12026 const tree_node **slot;
12027 enum tree_code code;
12028 union tree_node buf;
12029 int i, len;
12030
12031 recursive_label:
12032 if (expr == NULL)
12033 return;
12034 slot = ht->find_slot (expr, INSERT);
12035 if (*slot != NULL)
12036 return;
12037 *slot = expr;
12038 code = TREE_CODE (expr);
12039 if (TREE_CODE_CLASS (code) == tcc_declaration
12040 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12041 {
12042 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12043 memcpy ((char *) &buf, expr, tree_size (expr));
12044 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12045 buf.decl_with_vis.symtab_node = NULL;
12046 expr = (tree) &buf;
12047 }
12048 else if (TREE_CODE_CLASS (code) == tcc_type
12049 && (TYPE_POINTER_TO (expr)
12050 || TYPE_REFERENCE_TO (expr)
12051 || TYPE_CACHED_VALUES_P (expr)
12052 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12053 || TYPE_NEXT_VARIANT (expr)
12054 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12055 {
12056 /* Allow these fields to be modified. */
12057 tree tmp;
12058 memcpy ((char *) &buf, expr, tree_size (expr));
12059 expr = tmp = (tree) &buf;
12060 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12061 TYPE_POINTER_TO (tmp) = NULL;
12062 TYPE_REFERENCE_TO (tmp) = NULL;
12063 TYPE_NEXT_VARIANT (tmp) = NULL;
12064 TYPE_ALIAS_SET (tmp) = -1;
12065 if (TYPE_CACHED_VALUES_P (tmp))
12066 {
12067 TYPE_CACHED_VALUES_P (tmp) = 0;
12068 TYPE_CACHED_VALUES (tmp) = NULL;
12069 }
12070 }
12071 md5_process_bytes (expr, tree_size (expr), ctx);
12072 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12073 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12074 if (TREE_CODE_CLASS (code) != tcc_type
12075 && TREE_CODE_CLASS (code) != tcc_declaration
12076 && code != TREE_LIST
12077 && code != SSA_NAME
12078 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12079 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12080 switch (TREE_CODE_CLASS (code))
12081 {
12082 case tcc_constant:
12083 switch (code)
12084 {
12085 case STRING_CST:
12086 md5_process_bytes (TREE_STRING_POINTER (expr),
12087 TREE_STRING_LENGTH (expr), ctx);
12088 break;
12089 case COMPLEX_CST:
12090 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12091 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12092 break;
12093 case VECTOR_CST:
12094 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12095 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12096 break;
12097 default:
12098 break;
12099 }
12100 break;
12101 case tcc_exceptional:
12102 switch (code)
12103 {
12104 case TREE_LIST:
12105 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12106 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12107 expr = TREE_CHAIN (expr);
12108 goto recursive_label;
12109 break;
12110 case TREE_VEC:
12111 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12112 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12113 break;
12114 default:
12115 break;
12116 }
12117 break;
12118 case tcc_expression:
12119 case tcc_reference:
12120 case tcc_comparison:
12121 case tcc_unary:
12122 case tcc_binary:
12123 case tcc_statement:
12124 case tcc_vl_exp:
12125 len = TREE_OPERAND_LENGTH (expr);
12126 for (i = 0; i < len; ++i)
12127 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12128 break;
12129 case tcc_declaration:
12130 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12131 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12132 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12133 {
12134 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12135 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12136 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12137 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12138 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12139 }
12140
12141 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12142 {
12143 if (TREE_CODE (expr) == FUNCTION_DECL)
12144 {
12145 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12146 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12147 }
12148 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12149 }
12150 break;
12151 case tcc_type:
12152 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12153 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12154 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12155 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12156 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12157 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12158 if (INTEGRAL_TYPE_P (expr)
12159 || SCALAR_FLOAT_TYPE_P (expr))
12160 {
12161 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12162 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12163 }
12164 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12165 if (TREE_CODE (expr) == RECORD_TYPE
12166 || TREE_CODE (expr) == UNION_TYPE
12167 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12168 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12169 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12170 break;
12171 default:
12172 break;
12173 }
12174 }
12175
12176 /* Helper function for outputting the checksum of a tree T. When
12177 debugging with gdb, you can "define mynext" to be "next" followed
12178 by "call debug_fold_checksum (op0)", then just trace down till the
12179 outputs differ. */
12180
12181 DEBUG_FUNCTION void
12182 debug_fold_checksum (const_tree t)
12183 {
12184 int i;
12185 unsigned char checksum[16];
12186 struct md5_ctx ctx;
12187 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12188
12189 md5_init_ctx (&ctx);
12190 fold_checksum_tree (t, &ctx, &ht);
12191 md5_finish_ctx (&ctx, checksum);
12192 ht.empty ();
12193
12194 for (i = 0; i < 16; i++)
12195 fprintf (stderr, "%d ", checksum[i]);
12196
12197 fprintf (stderr, "\n");
12198 }
12199
12200 #endif
12201
12202 /* Fold a unary tree expression with code CODE of type TYPE with an
12203 operand OP0. LOC is the location of the resulting expression.
12204 Return a folded expression if successful. Otherwise, return a tree
12205 expression with code CODE of type TYPE with an operand OP0. */
12206
12207 tree
12208 fold_build1_stat_loc (location_t loc,
12209 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12210 {
12211 tree tem;
12212 #ifdef ENABLE_FOLD_CHECKING
12213 unsigned char checksum_before[16], checksum_after[16];
12214 struct md5_ctx ctx;
12215 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12216
12217 md5_init_ctx (&ctx);
12218 fold_checksum_tree (op0, &ctx, &ht);
12219 md5_finish_ctx (&ctx, checksum_before);
12220 ht.empty ();
12221 #endif
12222
12223 tem = fold_unary_loc (loc, code, type, op0);
12224 if (!tem)
12225 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12226
12227 #ifdef ENABLE_FOLD_CHECKING
12228 md5_init_ctx (&ctx);
12229 fold_checksum_tree (op0, &ctx, &ht);
12230 md5_finish_ctx (&ctx, checksum_after);
12231
12232 if (memcmp (checksum_before, checksum_after, 16))
12233 fold_check_failed (op0, tem);
12234 #endif
12235 return tem;
12236 }
12237
12238 /* Fold a binary tree expression with code CODE of type TYPE with
12239 operands OP0 and OP1. LOC is the location of the resulting
12240 expression. Return a folded expression if successful. Otherwise,
12241 return a tree expression with code CODE of type TYPE with operands
12242 OP0 and OP1. */
12243
12244 tree
12245 fold_build2_stat_loc (location_t loc,
12246 enum tree_code code, tree type, tree op0, tree op1
12247 MEM_STAT_DECL)
12248 {
12249 tree tem;
12250 #ifdef ENABLE_FOLD_CHECKING
12251 unsigned char checksum_before_op0[16],
12252 checksum_before_op1[16],
12253 checksum_after_op0[16],
12254 checksum_after_op1[16];
12255 struct md5_ctx ctx;
12256 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12257
12258 md5_init_ctx (&ctx);
12259 fold_checksum_tree (op0, &ctx, &ht);
12260 md5_finish_ctx (&ctx, checksum_before_op0);
12261 ht.empty ();
12262
12263 md5_init_ctx (&ctx);
12264 fold_checksum_tree (op1, &ctx, &ht);
12265 md5_finish_ctx (&ctx, checksum_before_op1);
12266 ht.empty ();
12267 #endif
12268
12269 tem = fold_binary_loc (loc, code, type, op0, op1);
12270 if (!tem)
12271 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12272
12273 #ifdef ENABLE_FOLD_CHECKING
12274 md5_init_ctx (&ctx);
12275 fold_checksum_tree (op0, &ctx, &ht);
12276 md5_finish_ctx (&ctx, checksum_after_op0);
12277 ht.empty ();
12278
12279 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12280 fold_check_failed (op0, tem);
12281
12282 md5_init_ctx (&ctx);
12283 fold_checksum_tree (op1, &ctx, &ht);
12284 md5_finish_ctx (&ctx, checksum_after_op1);
12285
12286 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12287 fold_check_failed (op1, tem);
12288 #endif
12289 return tem;
12290 }
12291
12292 /* Fold a ternary tree expression with code CODE of type TYPE with
12293 operands OP0, OP1, and OP2. Return a folded expression if
12294 successful. Otherwise, return a tree expression with code CODE of
12295 type TYPE with operands OP0, OP1, and OP2. */
12296
12297 tree
12298 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12299 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12300 {
12301 tree tem;
12302 #ifdef ENABLE_FOLD_CHECKING
12303 unsigned char checksum_before_op0[16],
12304 checksum_before_op1[16],
12305 checksum_before_op2[16],
12306 checksum_after_op0[16],
12307 checksum_after_op1[16],
12308 checksum_after_op2[16];
12309 struct md5_ctx ctx;
12310 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12311
12312 md5_init_ctx (&ctx);
12313 fold_checksum_tree (op0, &ctx, &ht);
12314 md5_finish_ctx (&ctx, checksum_before_op0);
12315 ht.empty ();
12316
12317 md5_init_ctx (&ctx);
12318 fold_checksum_tree (op1, &ctx, &ht);
12319 md5_finish_ctx (&ctx, checksum_before_op1);
12320 ht.empty ();
12321
12322 md5_init_ctx (&ctx);
12323 fold_checksum_tree (op2, &ctx, &ht);
12324 md5_finish_ctx (&ctx, checksum_before_op2);
12325 ht.empty ();
12326 #endif
12327
12328 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12329 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12330 if (!tem)
12331 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12332
12333 #ifdef ENABLE_FOLD_CHECKING
12334 md5_init_ctx (&ctx);
12335 fold_checksum_tree (op0, &ctx, &ht);
12336 md5_finish_ctx (&ctx, checksum_after_op0);
12337 ht.empty ();
12338
12339 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12340 fold_check_failed (op0, tem);
12341
12342 md5_init_ctx (&ctx);
12343 fold_checksum_tree (op1, &ctx, &ht);
12344 md5_finish_ctx (&ctx, checksum_after_op1);
12345 ht.empty ();
12346
12347 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12348 fold_check_failed (op1, tem);
12349
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op2, &ctx, &ht);
12352 md5_finish_ctx (&ctx, checksum_after_op2);
12353
12354 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12355 fold_check_failed (op2, tem);
12356 #endif
12357 return tem;
12358 }
12359
12360 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12361 arguments in ARGARRAY, and a null static chain.
12362 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12363 of type TYPE from the given operands as constructed by build_call_array. */
12364
12365 tree
12366 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12367 int nargs, tree *argarray)
12368 {
12369 tree tem;
12370 #ifdef ENABLE_FOLD_CHECKING
12371 unsigned char checksum_before_fn[16],
12372 checksum_before_arglist[16],
12373 checksum_after_fn[16],
12374 checksum_after_arglist[16];
12375 struct md5_ctx ctx;
12376 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12377 int i;
12378
12379 md5_init_ctx (&ctx);
12380 fold_checksum_tree (fn, &ctx, &ht);
12381 md5_finish_ctx (&ctx, checksum_before_fn);
12382 ht.empty ();
12383
12384 md5_init_ctx (&ctx);
12385 for (i = 0; i < nargs; i++)
12386 fold_checksum_tree (argarray[i], &ctx, &ht);
12387 md5_finish_ctx (&ctx, checksum_before_arglist);
12388 ht.empty ();
12389 #endif
12390
12391 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12392 if (!tem)
12393 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12394
12395 #ifdef ENABLE_FOLD_CHECKING
12396 md5_init_ctx (&ctx);
12397 fold_checksum_tree (fn, &ctx, &ht);
12398 md5_finish_ctx (&ctx, checksum_after_fn);
12399 ht.empty ();
12400
12401 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12402 fold_check_failed (fn, tem);
12403
12404 md5_init_ctx (&ctx);
12405 for (i = 0; i < nargs; i++)
12406 fold_checksum_tree (argarray[i], &ctx, &ht);
12407 md5_finish_ctx (&ctx, checksum_after_arglist);
12408
12409 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12410 fold_check_failed (NULL_TREE, tem);
12411 #endif
12412 return tem;
12413 }
12414
12415 /* Perform constant folding and related simplification of initializer
12416 expression EXPR. These behave identically to "fold_buildN" but ignore
12417 potential run-time traps and exceptions that fold must preserve. */
12418
12419 #define START_FOLD_INIT \
12420 int saved_signaling_nans = flag_signaling_nans;\
12421 int saved_trapping_math = flag_trapping_math;\
12422 int saved_rounding_math = flag_rounding_math;\
12423 int saved_trapv = flag_trapv;\
12424 int saved_folding_initializer = folding_initializer;\
12425 flag_signaling_nans = 0;\
12426 flag_trapping_math = 0;\
12427 flag_rounding_math = 0;\
12428 flag_trapv = 0;\
12429 folding_initializer = 1;
12430
12431 #define END_FOLD_INIT \
12432 flag_signaling_nans = saved_signaling_nans;\
12433 flag_trapping_math = saved_trapping_math;\
12434 flag_rounding_math = saved_rounding_math;\
12435 flag_trapv = saved_trapv;\
12436 folding_initializer = saved_folding_initializer;
12437
12438 tree
12439 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12440 tree type, tree op)
12441 {
12442 tree result;
12443 START_FOLD_INIT;
12444
12445 result = fold_build1_loc (loc, code, type, op);
12446
12447 END_FOLD_INIT;
12448 return result;
12449 }
12450
12451 tree
12452 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12453 tree type, tree op0, tree op1)
12454 {
12455 tree result;
12456 START_FOLD_INIT;
12457
12458 result = fold_build2_loc (loc, code, type, op0, op1);
12459
12460 END_FOLD_INIT;
12461 return result;
12462 }
12463
12464 tree
12465 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12466 int nargs, tree *argarray)
12467 {
12468 tree result;
12469 START_FOLD_INIT;
12470
12471 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12472
12473 END_FOLD_INIT;
12474 return result;
12475 }
12476
12477 #undef START_FOLD_INIT
12478 #undef END_FOLD_INIT
12479
12480 /* Determine if first argument is a multiple of second argument. Return 0 if
12481 it is not, or we cannot easily determined it to be.
12482
12483 An example of the sort of thing we care about (at this point; this routine
12484 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12485 fold cases do now) is discovering that
12486
12487 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12488
12489 is a multiple of
12490
12491 SAVE_EXPR (J * 8)
12492
12493 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12494
12495 This code also handles discovering that
12496
12497 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12498
12499 is a multiple of 8 so we don't have to worry about dealing with a
12500 possible remainder.
12501
12502 Note that we *look* inside a SAVE_EXPR only to determine how it was
12503 calculated; it is not safe for fold to do much of anything else with the
12504 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12505 at run time. For example, the latter example above *cannot* be implemented
12506 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12507 evaluation time of the original SAVE_EXPR is not necessarily the same at
12508 the time the new expression is evaluated. The only optimization of this
12509 sort that would be valid is changing
12510
12511 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12512
12513 divided by 8 to
12514
12515 SAVE_EXPR (I) * SAVE_EXPR (J)
12516
12517 (where the same SAVE_EXPR (J) is used in the original and the
12518 transformed version). */
12519
12520 int
12521 multiple_of_p (tree type, const_tree top, const_tree bottom)
12522 {
12523 gimple *stmt;
12524 tree t1, op1, op2;
12525
12526 if (operand_equal_p (top, bottom, 0))
12527 return 1;
12528
12529 if (TREE_CODE (type) != INTEGER_TYPE)
12530 return 0;
12531
12532 switch (TREE_CODE (top))
12533 {
12534 case BIT_AND_EXPR:
12535 /* Bitwise and provides a power of two multiple. If the mask is
12536 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12537 if (!integer_pow2p (bottom))
12538 return 0;
12539 /* FALLTHRU */
12540
12541 case MULT_EXPR:
12542 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12543 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12544
12545 case MINUS_EXPR:
12546 /* It is impossible to prove if op0 - op1 is multiple of bottom
12547 precisely, so be conservative here checking if both op0 and op1
12548 are multiple of bottom. Note we check the second operand first
12549 since it's usually simpler. */
12550 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12551 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12552
12553 case PLUS_EXPR:
12554 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12555 as op0 - 3 if the expression has unsigned type. For example,
12556 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12557 op1 = TREE_OPERAND (top, 1);
12558 if (TYPE_UNSIGNED (type)
12559 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12560 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12561 return (multiple_of_p (type, op1, bottom)
12562 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12563
12564 case LSHIFT_EXPR:
12565 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12566 {
12567 op1 = TREE_OPERAND (top, 1);
12568 /* const_binop may not detect overflow correctly,
12569 so check for it explicitly here. */
12570 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12571 && 0 != (t1 = fold_convert (type,
12572 const_binop (LSHIFT_EXPR,
12573 size_one_node,
12574 op1)))
12575 && !TREE_OVERFLOW (t1))
12576 return multiple_of_p (type, t1, bottom);
12577 }
12578 return 0;
12579
12580 case NOP_EXPR:
12581 /* Can't handle conversions from non-integral or wider integral type. */
12582 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12583 || (TYPE_PRECISION (type)
12584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12585 return 0;
12586
12587 /* fall through */
12588
12589 case SAVE_EXPR:
12590 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12591
12592 case COND_EXPR:
12593 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12594 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12595
12596 case INTEGER_CST:
12597 if (TREE_CODE (bottom) != INTEGER_CST
12598 || integer_zerop (bottom)
12599 || (TYPE_UNSIGNED (type)
12600 && (tree_int_cst_sgn (top) < 0
12601 || tree_int_cst_sgn (bottom) < 0)))
12602 return 0;
12603 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12604 SIGNED);
12605
12606 case SSA_NAME:
12607 if (TREE_CODE (bottom) == INTEGER_CST
12608 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12609 && gimple_code (stmt) == GIMPLE_ASSIGN)
12610 {
12611 enum tree_code code = gimple_assign_rhs_code (stmt);
12612
12613 /* Check for special cases to see if top is defined as multiple
12614 of bottom:
12615
12616 top = (X & ~(bottom - 1) ; bottom is power of 2
12617
12618 or
12619
12620 Y = X % bottom
12621 top = X - Y. */
12622 if (code == BIT_AND_EXPR
12623 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12624 && TREE_CODE (op2) == INTEGER_CST
12625 && integer_pow2p (bottom)
12626 && wi::multiple_of_p (wi::to_widest (op2),
12627 wi::to_widest (bottom), UNSIGNED))
12628 return 1;
12629
12630 op1 = gimple_assign_rhs1 (stmt);
12631 if (code == MINUS_EXPR
12632 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12633 && TREE_CODE (op2) == SSA_NAME
12634 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12635 && gimple_code (stmt) == GIMPLE_ASSIGN
12636 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12637 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12638 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12639 return 1;
12640 }
12641
12642 /* fall through */
12643
12644 default:
12645 return 0;
12646 }
12647 }
12648
12649 #define tree_expr_nonnegative_warnv_p(X, Y) \
12650 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12651
12652 #define RECURSE(X) \
12653 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12654
12655 /* Return true if CODE or TYPE is known to be non-negative. */
12656
12657 static bool
12658 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12659 {
12660 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12661 && truth_value_p (code))
12662 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12663 have a signed:1 type (where the value is -1 and 0). */
12664 return true;
12665 return false;
12666 }
12667
12668 /* Return true if (CODE OP0) is known to be non-negative. If the return
12669 value is based on the assumption that signed overflow is undefined,
12670 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12671 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12672
12673 bool
12674 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12675 bool *strict_overflow_p, int depth)
12676 {
12677 if (TYPE_UNSIGNED (type))
12678 return true;
12679
12680 switch (code)
12681 {
12682 case ABS_EXPR:
12683 /* We can't return 1 if flag_wrapv is set because
12684 ABS_EXPR<INT_MIN> = INT_MIN. */
12685 if (!ANY_INTEGRAL_TYPE_P (type))
12686 return true;
12687 if (TYPE_OVERFLOW_UNDEFINED (type))
12688 {
12689 *strict_overflow_p = true;
12690 return true;
12691 }
12692 break;
12693
12694 case NON_LVALUE_EXPR:
12695 case FLOAT_EXPR:
12696 case FIX_TRUNC_EXPR:
12697 return RECURSE (op0);
12698
12699 CASE_CONVERT:
12700 {
12701 tree inner_type = TREE_TYPE (op0);
12702 tree outer_type = type;
12703
12704 if (TREE_CODE (outer_type) == REAL_TYPE)
12705 {
12706 if (TREE_CODE (inner_type) == REAL_TYPE)
12707 return RECURSE (op0);
12708 if (INTEGRAL_TYPE_P (inner_type))
12709 {
12710 if (TYPE_UNSIGNED (inner_type))
12711 return true;
12712 return RECURSE (op0);
12713 }
12714 }
12715 else if (INTEGRAL_TYPE_P (outer_type))
12716 {
12717 if (TREE_CODE (inner_type) == REAL_TYPE)
12718 return RECURSE (op0);
12719 if (INTEGRAL_TYPE_P (inner_type))
12720 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12721 && TYPE_UNSIGNED (inner_type);
12722 }
12723 }
12724 break;
12725
12726 default:
12727 return tree_simple_nonnegative_warnv_p (code, type);
12728 }
12729
12730 /* We don't know sign of `t', so be conservative and return false. */
12731 return false;
12732 }
12733
12734 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12735 value is based on the assumption that signed overflow is undefined,
12736 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12737 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12738
12739 bool
12740 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12741 tree op1, bool *strict_overflow_p,
12742 int depth)
12743 {
12744 if (TYPE_UNSIGNED (type))
12745 return true;
12746
12747 switch (code)
12748 {
12749 case POINTER_PLUS_EXPR:
12750 case PLUS_EXPR:
12751 if (FLOAT_TYPE_P (type))
12752 return RECURSE (op0) && RECURSE (op1);
12753
12754 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12755 both unsigned and at least 2 bits shorter than the result. */
12756 if (TREE_CODE (type) == INTEGER_TYPE
12757 && TREE_CODE (op0) == NOP_EXPR
12758 && TREE_CODE (op1) == NOP_EXPR)
12759 {
12760 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12761 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12762 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12763 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12764 {
12765 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12766 TYPE_PRECISION (inner2)) + 1;
12767 return prec < TYPE_PRECISION (type);
12768 }
12769 }
12770 break;
12771
12772 case MULT_EXPR:
12773 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12774 {
12775 /* x * x is always non-negative for floating point x
12776 or without overflow. */
12777 if (operand_equal_p (op0, op1, 0)
12778 || (RECURSE (op0) && RECURSE (op1)))
12779 {
12780 if (ANY_INTEGRAL_TYPE_P (type)
12781 && TYPE_OVERFLOW_UNDEFINED (type))
12782 *strict_overflow_p = true;
12783 return true;
12784 }
12785 }
12786
12787 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12788 both unsigned and their total bits is shorter than the result. */
12789 if (TREE_CODE (type) == INTEGER_TYPE
12790 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12791 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12792 {
12793 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12794 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12795 : TREE_TYPE (op0);
12796 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12797 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12798 : TREE_TYPE (op1);
12799
12800 bool unsigned0 = TYPE_UNSIGNED (inner0);
12801 bool unsigned1 = TYPE_UNSIGNED (inner1);
12802
12803 if (TREE_CODE (op0) == INTEGER_CST)
12804 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12805
12806 if (TREE_CODE (op1) == INTEGER_CST)
12807 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12808
12809 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12810 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12811 {
12812 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12813 ? tree_int_cst_min_precision (op0, UNSIGNED)
12814 : TYPE_PRECISION (inner0);
12815
12816 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12817 ? tree_int_cst_min_precision (op1, UNSIGNED)
12818 : TYPE_PRECISION (inner1);
12819
12820 return precision0 + precision1 < TYPE_PRECISION (type);
12821 }
12822 }
12823 return false;
12824
12825 case BIT_AND_EXPR:
12826 case MAX_EXPR:
12827 return RECURSE (op0) || RECURSE (op1);
12828
12829 case BIT_IOR_EXPR:
12830 case BIT_XOR_EXPR:
12831 case MIN_EXPR:
12832 case RDIV_EXPR:
12833 case TRUNC_DIV_EXPR:
12834 case CEIL_DIV_EXPR:
12835 case FLOOR_DIV_EXPR:
12836 case ROUND_DIV_EXPR:
12837 return RECURSE (op0) && RECURSE (op1);
12838
12839 case TRUNC_MOD_EXPR:
12840 return RECURSE (op0);
12841
12842 case FLOOR_MOD_EXPR:
12843 return RECURSE (op1);
12844
12845 case CEIL_MOD_EXPR:
12846 case ROUND_MOD_EXPR:
12847 default:
12848 return tree_simple_nonnegative_warnv_p (code, type);
12849 }
12850
12851 /* We don't know sign of `t', so be conservative and return false. */
12852 return false;
12853 }
12854
12855 /* Return true if T is known to be non-negative. If the return
12856 value is based on the assumption that signed overflow is undefined,
12857 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12858 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12859
12860 bool
12861 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12862 {
12863 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12864 return true;
12865
12866 switch (TREE_CODE (t))
12867 {
12868 case INTEGER_CST:
12869 return tree_int_cst_sgn (t) >= 0;
12870
12871 case REAL_CST:
12872 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12873
12874 case FIXED_CST:
12875 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12876
12877 case COND_EXPR:
12878 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12879
12880 case SSA_NAME:
12881 /* Limit the depth of recursion to avoid quadratic behavior.
12882 This is expected to catch almost all occurrences in practice.
12883 If this code misses important cases that unbounded recursion
12884 would not, passes that need this information could be revised
12885 to provide it through dataflow propagation. */
12886 return (!name_registered_for_update_p (t)
12887 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12888 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12889 strict_overflow_p, depth));
12890
12891 default:
12892 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12893 }
12894 }
12895
12896 /* Return true if T is known to be non-negative. If the return
12897 value is based on the assumption that signed overflow is undefined,
12898 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12899 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12900
12901 bool
12902 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12903 bool *strict_overflow_p, int depth)
12904 {
12905 switch (fn)
12906 {
12907 CASE_CFN_ACOS:
12908 CASE_CFN_ACOSH:
12909 CASE_CFN_CABS:
12910 CASE_CFN_COSH:
12911 CASE_CFN_ERFC:
12912 CASE_CFN_EXP:
12913 CASE_CFN_EXP10:
12914 CASE_CFN_EXP2:
12915 CASE_CFN_FABS:
12916 CASE_CFN_FDIM:
12917 CASE_CFN_HYPOT:
12918 CASE_CFN_POW10:
12919 CASE_CFN_FFS:
12920 CASE_CFN_PARITY:
12921 CASE_CFN_POPCOUNT:
12922 CASE_CFN_CLZ:
12923 CASE_CFN_CLRSB:
12924 case CFN_BUILT_IN_BSWAP32:
12925 case CFN_BUILT_IN_BSWAP64:
12926 /* Always true. */
12927 return true;
12928
12929 CASE_CFN_SQRT:
12930 /* sqrt(-0.0) is -0.0. */
12931 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12932 return true;
12933 return RECURSE (arg0);
12934
12935 CASE_CFN_ASINH:
12936 CASE_CFN_ATAN:
12937 CASE_CFN_ATANH:
12938 CASE_CFN_CBRT:
12939 CASE_CFN_CEIL:
12940 CASE_CFN_ERF:
12941 CASE_CFN_EXPM1:
12942 CASE_CFN_FLOOR:
12943 CASE_CFN_FMOD:
12944 CASE_CFN_FREXP:
12945 CASE_CFN_ICEIL:
12946 CASE_CFN_IFLOOR:
12947 CASE_CFN_IRINT:
12948 CASE_CFN_IROUND:
12949 CASE_CFN_LCEIL:
12950 CASE_CFN_LDEXP:
12951 CASE_CFN_LFLOOR:
12952 CASE_CFN_LLCEIL:
12953 CASE_CFN_LLFLOOR:
12954 CASE_CFN_LLRINT:
12955 CASE_CFN_LLROUND:
12956 CASE_CFN_LRINT:
12957 CASE_CFN_LROUND:
12958 CASE_CFN_MODF:
12959 CASE_CFN_NEARBYINT:
12960 CASE_CFN_RINT:
12961 CASE_CFN_ROUND:
12962 CASE_CFN_SCALB:
12963 CASE_CFN_SCALBLN:
12964 CASE_CFN_SCALBN:
12965 CASE_CFN_SIGNBIT:
12966 CASE_CFN_SIGNIFICAND:
12967 CASE_CFN_SINH:
12968 CASE_CFN_TANH:
12969 CASE_CFN_TRUNC:
12970 /* True if the 1st argument is nonnegative. */
12971 return RECURSE (arg0);
12972
12973 CASE_CFN_FMAX:
12974 /* True if the 1st OR 2nd arguments are nonnegative. */
12975 return RECURSE (arg0) || RECURSE (arg1);
12976
12977 CASE_CFN_FMIN:
12978 /* True if the 1st AND 2nd arguments are nonnegative. */
12979 return RECURSE (arg0) && RECURSE (arg1);
12980
12981 CASE_CFN_COPYSIGN:
12982 /* True if the 2nd argument is nonnegative. */
12983 return RECURSE (arg1);
12984
12985 CASE_CFN_POWI:
12986 /* True if the 1st argument is nonnegative or the second
12987 argument is an even integer. */
12988 if (TREE_CODE (arg1) == INTEGER_CST
12989 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12990 return true;
12991 return RECURSE (arg0);
12992
12993 CASE_CFN_POW:
12994 /* True if the 1st argument is nonnegative or the second
12995 argument is an even integer valued real. */
12996 if (TREE_CODE (arg1) == REAL_CST)
12997 {
12998 REAL_VALUE_TYPE c;
12999 HOST_WIDE_INT n;
13000
13001 c = TREE_REAL_CST (arg1);
13002 n = real_to_integer (&c);
13003 if ((n & 1) == 0)
13004 {
13005 REAL_VALUE_TYPE cint;
13006 real_from_integer (&cint, VOIDmode, n, SIGNED);
13007 if (real_identical (&c, &cint))
13008 return true;
13009 }
13010 }
13011 return RECURSE (arg0);
13012
13013 default:
13014 break;
13015 }
13016 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13017 }
13018
13019 /* Return true if T is known to be non-negative. If the return
13020 value is based on the assumption that signed overflow is undefined,
13021 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13022 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13023
13024 static bool
13025 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13026 {
13027 enum tree_code code = TREE_CODE (t);
13028 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13029 return true;
13030
13031 switch (code)
13032 {
13033 case TARGET_EXPR:
13034 {
13035 tree temp = TARGET_EXPR_SLOT (t);
13036 t = TARGET_EXPR_INITIAL (t);
13037
13038 /* If the initializer is non-void, then it's a normal expression
13039 that will be assigned to the slot. */
13040 if (!VOID_TYPE_P (t))
13041 return RECURSE (t);
13042
13043 /* Otherwise, the initializer sets the slot in some way. One common
13044 way is an assignment statement at the end of the initializer. */
13045 while (1)
13046 {
13047 if (TREE_CODE (t) == BIND_EXPR)
13048 t = expr_last (BIND_EXPR_BODY (t));
13049 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13050 || TREE_CODE (t) == TRY_CATCH_EXPR)
13051 t = expr_last (TREE_OPERAND (t, 0));
13052 else if (TREE_CODE (t) == STATEMENT_LIST)
13053 t = expr_last (t);
13054 else
13055 break;
13056 }
13057 if (TREE_CODE (t) == MODIFY_EXPR
13058 && TREE_OPERAND (t, 0) == temp)
13059 return RECURSE (TREE_OPERAND (t, 1));
13060
13061 return false;
13062 }
13063
13064 case CALL_EXPR:
13065 {
13066 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13067 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13068
13069 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13070 get_call_combined_fn (t),
13071 arg0,
13072 arg1,
13073 strict_overflow_p, depth);
13074 }
13075 case COMPOUND_EXPR:
13076 case MODIFY_EXPR:
13077 return RECURSE (TREE_OPERAND (t, 1));
13078
13079 case BIND_EXPR:
13080 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13081
13082 case SAVE_EXPR:
13083 return RECURSE (TREE_OPERAND (t, 0));
13084
13085 default:
13086 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13087 }
13088 }
13089
13090 #undef RECURSE
13091 #undef tree_expr_nonnegative_warnv_p
13092
13093 /* Return true if T is known to be non-negative. If the return
13094 value is based on the assumption that signed overflow is undefined,
13095 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13096 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13097
13098 bool
13099 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13100 {
13101 enum tree_code code;
13102 if (t == error_mark_node)
13103 return false;
13104
13105 code = TREE_CODE (t);
13106 switch (TREE_CODE_CLASS (code))
13107 {
13108 case tcc_binary:
13109 case tcc_comparison:
13110 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13111 TREE_TYPE (t),
13112 TREE_OPERAND (t, 0),
13113 TREE_OPERAND (t, 1),
13114 strict_overflow_p, depth);
13115
13116 case tcc_unary:
13117 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13118 TREE_TYPE (t),
13119 TREE_OPERAND (t, 0),
13120 strict_overflow_p, depth);
13121
13122 case tcc_constant:
13123 case tcc_declaration:
13124 case tcc_reference:
13125 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13126
13127 default:
13128 break;
13129 }
13130
13131 switch (code)
13132 {
13133 case TRUTH_AND_EXPR:
13134 case TRUTH_OR_EXPR:
13135 case TRUTH_XOR_EXPR:
13136 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13137 TREE_TYPE (t),
13138 TREE_OPERAND (t, 0),
13139 TREE_OPERAND (t, 1),
13140 strict_overflow_p, depth);
13141 case TRUTH_NOT_EXPR:
13142 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13143 TREE_TYPE (t),
13144 TREE_OPERAND (t, 0),
13145 strict_overflow_p, depth);
13146
13147 case COND_EXPR:
13148 case CONSTRUCTOR:
13149 case OBJ_TYPE_REF:
13150 case ASSERT_EXPR:
13151 case ADDR_EXPR:
13152 case WITH_SIZE_EXPR:
13153 case SSA_NAME:
13154 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13155
13156 default:
13157 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13158 }
13159 }
13160
13161 /* Return true if `t' is known to be non-negative. Handle warnings
13162 about undefined signed overflow. */
13163
13164 bool
13165 tree_expr_nonnegative_p (tree t)
13166 {
13167 bool ret, strict_overflow_p;
13168
13169 strict_overflow_p = false;
13170 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13171 if (strict_overflow_p)
13172 fold_overflow_warning (("assuming signed overflow does not occur when "
13173 "determining that expression is always "
13174 "non-negative"),
13175 WARN_STRICT_OVERFLOW_MISC);
13176 return ret;
13177 }
13178
13179
13180 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13181 For floating point we further ensure that T is not denormal.
13182 Similar logic is present in nonzero_address in rtlanal.h.
13183
13184 If the return value is based on the assumption that signed overflow
13185 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13186 change *STRICT_OVERFLOW_P. */
13187
13188 bool
13189 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13190 bool *strict_overflow_p)
13191 {
13192 switch (code)
13193 {
13194 case ABS_EXPR:
13195 return tree_expr_nonzero_warnv_p (op0,
13196 strict_overflow_p);
13197
13198 case NOP_EXPR:
13199 {
13200 tree inner_type = TREE_TYPE (op0);
13201 tree outer_type = type;
13202
13203 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13204 && tree_expr_nonzero_warnv_p (op0,
13205 strict_overflow_p));
13206 }
13207 break;
13208
13209 case NON_LVALUE_EXPR:
13210 return tree_expr_nonzero_warnv_p (op0,
13211 strict_overflow_p);
13212
13213 default:
13214 break;
13215 }
13216
13217 return false;
13218 }
13219
13220 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13221 For floating point we further ensure that T is not denormal.
13222 Similar logic is present in nonzero_address in rtlanal.h.
13223
13224 If the return value is based on the assumption that signed overflow
13225 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13226 change *STRICT_OVERFLOW_P. */
13227
13228 bool
13229 tree_binary_nonzero_warnv_p (enum tree_code code,
13230 tree type,
13231 tree op0,
13232 tree op1, bool *strict_overflow_p)
13233 {
13234 bool sub_strict_overflow_p;
13235 switch (code)
13236 {
13237 case POINTER_PLUS_EXPR:
13238 case PLUS_EXPR:
13239 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13240 {
13241 /* With the presence of negative values it is hard
13242 to say something. */
13243 sub_strict_overflow_p = false;
13244 if (!tree_expr_nonnegative_warnv_p (op0,
13245 &sub_strict_overflow_p)
13246 || !tree_expr_nonnegative_warnv_p (op1,
13247 &sub_strict_overflow_p))
13248 return false;
13249 /* One of operands must be positive and the other non-negative. */
13250 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13251 overflows, on a twos-complement machine the sum of two
13252 nonnegative numbers can never be zero. */
13253 return (tree_expr_nonzero_warnv_p (op0,
13254 strict_overflow_p)
13255 || tree_expr_nonzero_warnv_p (op1,
13256 strict_overflow_p));
13257 }
13258 break;
13259
13260 case MULT_EXPR:
13261 if (TYPE_OVERFLOW_UNDEFINED (type))
13262 {
13263 if (tree_expr_nonzero_warnv_p (op0,
13264 strict_overflow_p)
13265 && tree_expr_nonzero_warnv_p (op1,
13266 strict_overflow_p))
13267 {
13268 *strict_overflow_p = true;
13269 return true;
13270 }
13271 }
13272 break;
13273
13274 case MIN_EXPR:
13275 sub_strict_overflow_p = false;
13276 if (tree_expr_nonzero_warnv_p (op0,
13277 &sub_strict_overflow_p)
13278 && tree_expr_nonzero_warnv_p (op1,
13279 &sub_strict_overflow_p))
13280 {
13281 if (sub_strict_overflow_p)
13282 *strict_overflow_p = true;
13283 }
13284 break;
13285
13286 case MAX_EXPR:
13287 sub_strict_overflow_p = false;
13288 if (tree_expr_nonzero_warnv_p (op0,
13289 &sub_strict_overflow_p))
13290 {
13291 if (sub_strict_overflow_p)
13292 *strict_overflow_p = true;
13293
13294 /* When both operands are nonzero, then MAX must be too. */
13295 if (tree_expr_nonzero_warnv_p (op1,
13296 strict_overflow_p))
13297 return true;
13298
13299 /* MAX where operand 0 is positive is positive. */
13300 return tree_expr_nonnegative_warnv_p (op0,
13301 strict_overflow_p);
13302 }
13303 /* MAX where operand 1 is positive is positive. */
13304 else if (tree_expr_nonzero_warnv_p (op1,
13305 &sub_strict_overflow_p)
13306 && tree_expr_nonnegative_warnv_p (op1,
13307 &sub_strict_overflow_p))
13308 {
13309 if (sub_strict_overflow_p)
13310 *strict_overflow_p = true;
13311 return true;
13312 }
13313 break;
13314
13315 case BIT_IOR_EXPR:
13316 return (tree_expr_nonzero_warnv_p (op1,
13317 strict_overflow_p)
13318 || tree_expr_nonzero_warnv_p (op0,
13319 strict_overflow_p));
13320
13321 default:
13322 break;
13323 }
13324
13325 return false;
13326 }
13327
13328 /* Return true when T is an address and is known to be nonzero.
13329 For floating point we further ensure that T is not denormal.
13330 Similar logic is present in nonzero_address in rtlanal.h.
13331
13332 If the return value is based on the assumption that signed overflow
13333 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13334 change *STRICT_OVERFLOW_P. */
13335
13336 bool
13337 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13338 {
13339 bool sub_strict_overflow_p;
13340 switch (TREE_CODE (t))
13341 {
13342 case INTEGER_CST:
13343 return !integer_zerop (t);
13344
13345 case ADDR_EXPR:
13346 {
13347 tree base = TREE_OPERAND (t, 0);
13348
13349 if (!DECL_P (base))
13350 base = get_base_address (base);
13351
13352 if (base && TREE_CODE (base) == TARGET_EXPR)
13353 base = TARGET_EXPR_SLOT (base);
13354
13355 if (!base)
13356 return false;
13357
13358 /* For objects in symbol table check if we know they are non-zero.
13359 Don't do anything for variables and functions before symtab is built;
13360 it is quite possible that they will be declared weak later. */
13361 int nonzero_addr = maybe_nonzero_address (base);
13362 if (nonzero_addr >= 0)
13363 return nonzero_addr;
13364
13365 /* Constants are never weak. */
13366 if (CONSTANT_CLASS_P (base))
13367 return true;
13368
13369 return false;
13370 }
13371
13372 case COND_EXPR:
13373 sub_strict_overflow_p = false;
13374 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13375 &sub_strict_overflow_p)
13376 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13377 &sub_strict_overflow_p))
13378 {
13379 if (sub_strict_overflow_p)
13380 *strict_overflow_p = true;
13381 return true;
13382 }
13383 break;
13384
13385 case SSA_NAME:
13386 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13387 break;
13388 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13389
13390 default:
13391 break;
13392 }
13393 return false;
13394 }
13395
13396 #define integer_valued_real_p(X) \
13397 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13398
13399 #define RECURSE(X) \
13400 ((integer_valued_real_p) (X, depth + 1))
13401
13402 /* Return true if the floating point result of (CODE OP0) has an
13403 integer value. We also allow +Inf, -Inf and NaN to be considered
13404 integer values. Return false for signaling NaN.
13405
13406 DEPTH is the current nesting depth of the query. */
13407
13408 bool
13409 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13410 {
13411 switch (code)
13412 {
13413 case FLOAT_EXPR:
13414 return true;
13415
13416 case ABS_EXPR:
13417 return RECURSE (op0);
13418
13419 CASE_CONVERT:
13420 {
13421 tree type = TREE_TYPE (op0);
13422 if (TREE_CODE (type) == INTEGER_TYPE)
13423 return true;
13424 if (TREE_CODE (type) == REAL_TYPE)
13425 return RECURSE (op0);
13426 break;
13427 }
13428
13429 default:
13430 break;
13431 }
13432 return false;
13433 }
13434
13435 /* Return true if the floating point result of (CODE OP0 OP1) has an
13436 integer value. We also allow +Inf, -Inf and NaN to be considered
13437 integer values. Return false for signaling NaN.
13438
13439 DEPTH is the current nesting depth of the query. */
13440
13441 bool
13442 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13443 {
13444 switch (code)
13445 {
13446 case PLUS_EXPR:
13447 case MINUS_EXPR:
13448 case MULT_EXPR:
13449 case MIN_EXPR:
13450 case MAX_EXPR:
13451 return RECURSE (op0) && RECURSE (op1);
13452
13453 default:
13454 break;
13455 }
13456 return false;
13457 }
13458
13459 /* Return true if the floating point result of calling FNDECL with arguments
13460 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13461 considered integer values. Return false for signaling NaN. If FNDECL
13462 takes fewer than 2 arguments, the remaining ARGn are null.
13463
13464 DEPTH is the current nesting depth of the query. */
13465
13466 bool
13467 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13468 {
13469 switch (fn)
13470 {
13471 CASE_CFN_CEIL:
13472 CASE_CFN_FLOOR:
13473 CASE_CFN_NEARBYINT:
13474 CASE_CFN_RINT:
13475 CASE_CFN_ROUND:
13476 CASE_CFN_TRUNC:
13477 return true;
13478
13479 CASE_CFN_FMIN:
13480 CASE_CFN_FMAX:
13481 return RECURSE (arg0) && RECURSE (arg1);
13482
13483 default:
13484 break;
13485 }
13486 return false;
13487 }
13488
13489 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13490 has an integer value. We also allow +Inf, -Inf and NaN to be
13491 considered integer values. Return false for signaling NaN.
13492
13493 DEPTH is the current nesting depth of the query. */
13494
13495 bool
13496 integer_valued_real_single_p (tree t, int depth)
13497 {
13498 switch (TREE_CODE (t))
13499 {
13500 case REAL_CST:
13501 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13502
13503 case COND_EXPR:
13504 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13505
13506 case SSA_NAME:
13507 /* Limit the depth of recursion to avoid quadratic behavior.
13508 This is expected to catch almost all occurrences in practice.
13509 If this code misses important cases that unbounded recursion
13510 would not, passes that need this information could be revised
13511 to provide it through dataflow propagation. */
13512 return (!name_registered_for_update_p (t)
13513 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13514 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13515 depth));
13516
13517 default:
13518 break;
13519 }
13520 return false;
13521 }
13522
13523 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13524 has an integer value. We also allow +Inf, -Inf and NaN to be
13525 considered integer values. Return false for signaling NaN.
13526
13527 DEPTH is the current nesting depth of the query. */
13528
13529 static bool
13530 integer_valued_real_invalid_p (tree t, int depth)
13531 {
13532 switch (TREE_CODE (t))
13533 {
13534 case COMPOUND_EXPR:
13535 case MODIFY_EXPR:
13536 case BIND_EXPR:
13537 return RECURSE (TREE_OPERAND (t, 1));
13538
13539 case SAVE_EXPR:
13540 return RECURSE (TREE_OPERAND (t, 0));
13541
13542 default:
13543 break;
13544 }
13545 return false;
13546 }
13547
13548 #undef RECURSE
13549 #undef integer_valued_real_p
13550
13551 /* Return true if the floating point expression T has an integer value.
13552 We also allow +Inf, -Inf and NaN to be considered integer values.
13553 Return false for signaling NaN.
13554
13555 DEPTH is the current nesting depth of the query. */
13556
13557 bool
13558 integer_valued_real_p (tree t, int depth)
13559 {
13560 if (t == error_mark_node)
13561 return false;
13562
13563 tree_code code = TREE_CODE (t);
13564 switch (TREE_CODE_CLASS (code))
13565 {
13566 case tcc_binary:
13567 case tcc_comparison:
13568 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13569 TREE_OPERAND (t, 1), depth);
13570
13571 case tcc_unary:
13572 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13573
13574 case tcc_constant:
13575 case tcc_declaration:
13576 case tcc_reference:
13577 return integer_valued_real_single_p (t, depth);
13578
13579 default:
13580 break;
13581 }
13582
13583 switch (code)
13584 {
13585 case COND_EXPR:
13586 case SSA_NAME:
13587 return integer_valued_real_single_p (t, depth);
13588
13589 case CALL_EXPR:
13590 {
13591 tree arg0 = (call_expr_nargs (t) > 0
13592 ? CALL_EXPR_ARG (t, 0)
13593 : NULL_TREE);
13594 tree arg1 = (call_expr_nargs (t) > 1
13595 ? CALL_EXPR_ARG (t, 1)
13596 : NULL_TREE);
13597 return integer_valued_real_call_p (get_call_combined_fn (t),
13598 arg0, arg1, depth);
13599 }
13600
13601 default:
13602 return integer_valued_real_invalid_p (t, depth);
13603 }
13604 }
13605
13606 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13607 attempt to fold the expression to a constant without modifying TYPE,
13608 OP0 or OP1.
13609
13610 If the expression could be simplified to a constant, then return
13611 the constant. If the expression would not be simplified to a
13612 constant, then return NULL_TREE. */
13613
13614 tree
13615 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13616 {
13617 tree tem = fold_binary (code, type, op0, op1);
13618 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13619 }
13620
13621 /* Given the components of a unary expression CODE, TYPE and OP0,
13622 attempt to fold the expression to a constant without modifying
13623 TYPE or OP0.
13624
13625 If the expression could be simplified to a constant, then return
13626 the constant. If the expression would not be simplified to a
13627 constant, then return NULL_TREE. */
13628
13629 tree
13630 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13631 {
13632 tree tem = fold_unary (code, type, op0);
13633 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13634 }
13635
13636 /* If EXP represents referencing an element in a constant string
13637 (either via pointer arithmetic or array indexing), return the
13638 tree representing the value accessed, otherwise return NULL. */
13639
13640 tree
13641 fold_read_from_constant_string (tree exp)
13642 {
13643 if ((TREE_CODE (exp) == INDIRECT_REF
13644 || TREE_CODE (exp) == ARRAY_REF)
13645 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13646 {
13647 tree exp1 = TREE_OPERAND (exp, 0);
13648 tree index;
13649 tree string;
13650 location_t loc = EXPR_LOCATION (exp);
13651
13652 if (TREE_CODE (exp) == INDIRECT_REF)
13653 string = string_constant (exp1, &index);
13654 else
13655 {
13656 tree low_bound = array_ref_low_bound (exp);
13657 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13658
13659 /* Optimize the special-case of a zero lower bound.
13660
13661 We convert the low_bound to sizetype to avoid some problems
13662 with constant folding. (E.g. suppose the lower bound is 1,
13663 and its mode is QI. Without the conversion,l (ARRAY
13664 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13665 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13666 if (! integer_zerop (low_bound))
13667 index = size_diffop_loc (loc, index,
13668 fold_convert_loc (loc, sizetype, low_bound));
13669
13670 string = exp1;
13671 }
13672
13673 if (string
13674 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13675 && TREE_CODE (string) == STRING_CST
13676 && TREE_CODE (index) == INTEGER_CST
13677 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13678 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13679 == MODE_INT)
13680 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13681 return build_int_cst_type (TREE_TYPE (exp),
13682 (TREE_STRING_POINTER (string)
13683 [TREE_INT_CST_LOW (index)]));
13684 }
13685 return NULL;
13686 }
13687
13688 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13689 an integer constant, real, or fixed-point constant.
13690
13691 TYPE is the type of the result. */
13692
13693 static tree
13694 fold_negate_const (tree arg0, tree type)
13695 {
13696 tree t = NULL_TREE;
13697
13698 switch (TREE_CODE (arg0))
13699 {
13700 case INTEGER_CST:
13701 {
13702 bool overflow;
13703 wide_int val = wi::neg (arg0, &overflow);
13704 t = force_fit_type (type, val, 1,
13705 (overflow && ! TYPE_UNSIGNED (type))
13706 || TREE_OVERFLOW (arg0));
13707 break;
13708 }
13709
13710 case REAL_CST:
13711 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13712 break;
13713
13714 case FIXED_CST:
13715 {
13716 FIXED_VALUE_TYPE f;
13717 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13718 &(TREE_FIXED_CST (arg0)), NULL,
13719 TYPE_SATURATING (type));
13720 t = build_fixed (type, f);
13721 /* Propagate overflow flags. */
13722 if (overflow_p | TREE_OVERFLOW (arg0))
13723 TREE_OVERFLOW (t) = 1;
13724 break;
13725 }
13726
13727 default:
13728 gcc_unreachable ();
13729 }
13730
13731 return t;
13732 }
13733
13734 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13735 an integer constant or real constant.
13736
13737 TYPE is the type of the result. */
13738
13739 tree
13740 fold_abs_const (tree arg0, tree type)
13741 {
13742 tree t = NULL_TREE;
13743
13744 switch (TREE_CODE (arg0))
13745 {
13746 case INTEGER_CST:
13747 {
13748 /* If the value is unsigned or non-negative, then the absolute value
13749 is the same as the ordinary value. */
13750 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13751 t = arg0;
13752
13753 /* If the value is negative, then the absolute value is
13754 its negation. */
13755 else
13756 {
13757 bool overflow;
13758 wide_int val = wi::neg (arg0, &overflow);
13759 t = force_fit_type (type, val, -1,
13760 overflow | TREE_OVERFLOW (arg0));
13761 }
13762 }
13763 break;
13764
13765 case REAL_CST:
13766 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13767 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13768 else
13769 t = arg0;
13770 break;
13771
13772 default:
13773 gcc_unreachable ();
13774 }
13775
13776 return t;
13777 }
13778
13779 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13780 constant. TYPE is the type of the result. */
13781
13782 static tree
13783 fold_not_const (const_tree arg0, tree type)
13784 {
13785 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13786
13787 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13788 }
13789
13790 /* Given CODE, a relational operator, the target type, TYPE and two
13791 constant operands OP0 and OP1, return the result of the
13792 relational operation. If the result is not a compile time
13793 constant, then return NULL_TREE. */
13794
13795 static tree
13796 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13797 {
13798 int result, invert;
13799
13800 /* From here on, the only cases we handle are when the result is
13801 known to be a constant. */
13802
13803 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13804 {
13805 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13806 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13807
13808 /* Handle the cases where either operand is a NaN. */
13809 if (real_isnan (c0) || real_isnan (c1))
13810 {
13811 switch (code)
13812 {
13813 case EQ_EXPR:
13814 case ORDERED_EXPR:
13815 result = 0;
13816 break;
13817
13818 case NE_EXPR:
13819 case UNORDERED_EXPR:
13820 case UNLT_EXPR:
13821 case UNLE_EXPR:
13822 case UNGT_EXPR:
13823 case UNGE_EXPR:
13824 case UNEQ_EXPR:
13825 result = 1;
13826 break;
13827
13828 case LT_EXPR:
13829 case LE_EXPR:
13830 case GT_EXPR:
13831 case GE_EXPR:
13832 case LTGT_EXPR:
13833 if (flag_trapping_math)
13834 return NULL_TREE;
13835 result = 0;
13836 break;
13837
13838 default:
13839 gcc_unreachable ();
13840 }
13841
13842 return constant_boolean_node (result, type);
13843 }
13844
13845 return constant_boolean_node (real_compare (code, c0, c1), type);
13846 }
13847
13848 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13849 {
13850 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13851 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13852 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13853 }
13854
13855 /* Handle equality/inequality of complex constants. */
13856 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13857 {
13858 tree rcond = fold_relational_const (code, type,
13859 TREE_REALPART (op0),
13860 TREE_REALPART (op1));
13861 tree icond = fold_relational_const (code, type,
13862 TREE_IMAGPART (op0),
13863 TREE_IMAGPART (op1));
13864 if (code == EQ_EXPR)
13865 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13866 else if (code == NE_EXPR)
13867 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13868 else
13869 return NULL_TREE;
13870 }
13871
13872 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13873 {
13874 if (!VECTOR_TYPE_P (type))
13875 {
13876 /* Have vector comparison with scalar boolean result. */
13877 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13878 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13879 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13880 {
13881 tree elem0 = VECTOR_CST_ELT (op0, i);
13882 tree elem1 = VECTOR_CST_ELT (op1, i);
13883 tree tmp = fold_relational_const (code, type, elem0, elem1);
13884 if (tmp == NULL_TREE)
13885 return NULL_TREE;
13886 if (integer_zerop (tmp))
13887 return constant_boolean_node (false, type);
13888 }
13889 return constant_boolean_node (true, type);
13890 }
13891 unsigned count = VECTOR_CST_NELTS (op0);
13892 tree *elts = XALLOCAVEC (tree, count);
13893 gcc_assert (VECTOR_CST_NELTS (op1) == count
13894 && TYPE_VECTOR_SUBPARTS (type) == count);
13895
13896 for (unsigned i = 0; i < count; i++)
13897 {
13898 tree elem_type = TREE_TYPE (type);
13899 tree elem0 = VECTOR_CST_ELT (op0, i);
13900 tree elem1 = VECTOR_CST_ELT (op1, i);
13901
13902 tree tem = fold_relational_const (code, elem_type,
13903 elem0, elem1);
13904
13905 if (tem == NULL_TREE)
13906 return NULL_TREE;
13907
13908 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13909 }
13910
13911 return build_vector (type, elts);
13912 }
13913
13914 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13915
13916 To compute GT, swap the arguments and do LT.
13917 To compute GE, do LT and invert the result.
13918 To compute LE, swap the arguments, do LT and invert the result.
13919 To compute NE, do EQ and invert the result.
13920
13921 Therefore, the code below must handle only EQ and LT. */
13922
13923 if (code == LE_EXPR || code == GT_EXPR)
13924 {
13925 std::swap (op0, op1);
13926 code = swap_tree_comparison (code);
13927 }
13928
13929 /* Note that it is safe to invert for real values here because we
13930 have already handled the one case that it matters. */
13931
13932 invert = 0;
13933 if (code == NE_EXPR || code == GE_EXPR)
13934 {
13935 invert = 1;
13936 code = invert_tree_comparison (code, false);
13937 }
13938
13939 /* Compute a result for LT or EQ if args permit;
13940 Otherwise return T. */
13941 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13942 {
13943 if (code == EQ_EXPR)
13944 result = tree_int_cst_equal (op0, op1);
13945 else
13946 result = tree_int_cst_lt (op0, op1);
13947 }
13948 else
13949 return NULL_TREE;
13950
13951 if (invert)
13952 result ^= 1;
13953 return constant_boolean_node (result, type);
13954 }
13955
13956 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13957 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13958 itself. */
13959
13960 tree
13961 fold_build_cleanup_point_expr (tree type, tree expr)
13962 {
13963 /* If the expression does not have side effects then we don't have to wrap
13964 it with a cleanup point expression. */
13965 if (!TREE_SIDE_EFFECTS (expr))
13966 return expr;
13967
13968 /* If the expression is a return, check to see if the expression inside the
13969 return has no side effects or the right hand side of the modify expression
13970 inside the return. If either don't have side effects set we don't need to
13971 wrap the expression in a cleanup point expression. Note we don't check the
13972 left hand side of the modify because it should always be a return decl. */
13973 if (TREE_CODE (expr) == RETURN_EXPR)
13974 {
13975 tree op = TREE_OPERAND (expr, 0);
13976 if (!op || !TREE_SIDE_EFFECTS (op))
13977 return expr;
13978 op = TREE_OPERAND (op, 1);
13979 if (!TREE_SIDE_EFFECTS (op))
13980 return expr;
13981 }
13982
13983 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13984 }
13985
13986 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13987 of an indirection through OP0, or NULL_TREE if no simplification is
13988 possible. */
13989
13990 tree
13991 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13992 {
13993 tree sub = op0;
13994 tree subtype;
13995
13996 STRIP_NOPS (sub);
13997 subtype = TREE_TYPE (sub);
13998 if (!POINTER_TYPE_P (subtype)
13999 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14000 return NULL_TREE;
14001
14002 if (TREE_CODE (sub) == ADDR_EXPR)
14003 {
14004 tree op = TREE_OPERAND (sub, 0);
14005 tree optype = TREE_TYPE (op);
14006 /* *&CONST_DECL -> to the value of the const decl. */
14007 if (TREE_CODE (op) == CONST_DECL)
14008 return DECL_INITIAL (op);
14009 /* *&p => p; make sure to handle *&"str"[cst] here. */
14010 if (type == optype)
14011 {
14012 tree fop = fold_read_from_constant_string (op);
14013 if (fop)
14014 return fop;
14015 else
14016 return op;
14017 }
14018 /* *(foo *)&fooarray => fooarray[0] */
14019 else if (TREE_CODE (optype) == ARRAY_TYPE
14020 && type == TREE_TYPE (optype)
14021 && (!in_gimple_form
14022 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14023 {
14024 tree type_domain = TYPE_DOMAIN (optype);
14025 tree min_val = size_zero_node;
14026 if (type_domain && TYPE_MIN_VALUE (type_domain))
14027 min_val = TYPE_MIN_VALUE (type_domain);
14028 if (in_gimple_form
14029 && TREE_CODE (min_val) != INTEGER_CST)
14030 return NULL_TREE;
14031 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14032 NULL_TREE, NULL_TREE);
14033 }
14034 /* *(foo *)&complexfoo => __real__ complexfoo */
14035 else if (TREE_CODE (optype) == COMPLEX_TYPE
14036 && type == TREE_TYPE (optype))
14037 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14038 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14039 else if (TREE_CODE (optype) == VECTOR_TYPE
14040 && type == TREE_TYPE (optype))
14041 {
14042 tree part_width = TYPE_SIZE (type);
14043 tree index = bitsize_int (0);
14044 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14045 }
14046 }
14047
14048 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14049 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14050 {
14051 tree op00 = TREE_OPERAND (sub, 0);
14052 tree op01 = TREE_OPERAND (sub, 1);
14053
14054 STRIP_NOPS (op00);
14055 if (TREE_CODE (op00) == ADDR_EXPR)
14056 {
14057 tree op00type;
14058 op00 = TREE_OPERAND (op00, 0);
14059 op00type = TREE_TYPE (op00);
14060
14061 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14062 if (TREE_CODE (op00type) == VECTOR_TYPE
14063 && type == TREE_TYPE (op00type))
14064 {
14065 tree part_width = TYPE_SIZE (type);
14066 unsigned HOST_WIDE_INT max_offset
14067 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14068 * TYPE_VECTOR_SUBPARTS (op00type));
14069 if (tree_int_cst_sign_bit (op01) == 0
14070 && compare_tree_int (op01, max_offset) == -1)
14071 {
14072 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14073 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14074 tree index = bitsize_int (indexi);
14075 return fold_build3_loc (loc,
14076 BIT_FIELD_REF, type, op00,
14077 part_width, index);
14078 }
14079 }
14080 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14081 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14082 && type == TREE_TYPE (op00type))
14083 {
14084 tree size = TYPE_SIZE_UNIT (type);
14085 if (tree_int_cst_equal (size, op01))
14086 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14087 }
14088 /* ((foo *)&fooarray)[1] => fooarray[1] */
14089 else if (TREE_CODE (op00type) == ARRAY_TYPE
14090 && type == TREE_TYPE (op00type))
14091 {
14092 tree type_domain = TYPE_DOMAIN (op00type);
14093 tree min_val = size_zero_node;
14094 if (type_domain && TYPE_MIN_VALUE (type_domain))
14095 min_val = TYPE_MIN_VALUE (type_domain);
14096 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14097 TYPE_SIZE_UNIT (type));
14098 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14099 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14100 NULL_TREE, NULL_TREE);
14101 }
14102 }
14103 }
14104
14105 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14106 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14107 && type == TREE_TYPE (TREE_TYPE (subtype))
14108 && (!in_gimple_form
14109 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14110 {
14111 tree type_domain;
14112 tree min_val = size_zero_node;
14113 sub = build_fold_indirect_ref_loc (loc, sub);
14114 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14115 if (type_domain && TYPE_MIN_VALUE (type_domain))
14116 min_val = TYPE_MIN_VALUE (type_domain);
14117 if (in_gimple_form
14118 && TREE_CODE (min_val) != INTEGER_CST)
14119 return NULL_TREE;
14120 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14121 NULL_TREE);
14122 }
14123
14124 return NULL_TREE;
14125 }
14126
14127 /* Builds an expression for an indirection through T, simplifying some
14128 cases. */
14129
14130 tree
14131 build_fold_indirect_ref_loc (location_t loc, tree t)
14132 {
14133 tree type = TREE_TYPE (TREE_TYPE (t));
14134 tree sub = fold_indirect_ref_1 (loc, type, t);
14135
14136 if (sub)
14137 return sub;
14138
14139 return build1_loc (loc, INDIRECT_REF, type, t);
14140 }
14141
14142 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14143
14144 tree
14145 fold_indirect_ref_loc (location_t loc, tree t)
14146 {
14147 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14148
14149 if (sub)
14150 return sub;
14151 else
14152 return t;
14153 }
14154
14155 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14156 whose result is ignored. The type of the returned tree need not be
14157 the same as the original expression. */
14158
14159 tree
14160 fold_ignored_result (tree t)
14161 {
14162 if (!TREE_SIDE_EFFECTS (t))
14163 return integer_zero_node;
14164
14165 for (;;)
14166 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14167 {
14168 case tcc_unary:
14169 t = TREE_OPERAND (t, 0);
14170 break;
14171
14172 case tcc_binary:
14173 case tcc_comparison:
14174 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14175 t = TREE_OPERAND (t, 0);
14176 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14177 t = TREE_OPERAND (t, 1);
14178 else
14179 return t;
14180 break;
14181
14182 case tcc_expression:
14183 switch (TREE_CODE (t))
14184 {
14185 case COMPOUND_EXPR:
14186 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14187 return t;
14188 t = TREE_OPERAND (t, 0);
14189 break;
14190
14191 case COND_EXPR:
14192 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14193 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14194 return t;
14195 t = TREE_OPERAND (t, 0);
14196 break;
14197
14198 default:
14199 return t;
14200 }
14201 break;
14202
14203 default:
14204 return t;
14205 }
14206 }
14207
14208 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14209
14210 tree
14211 round_up_loc (location_t loc, tree value, unsigned int divisor)
14212 {
14213 tree div = NULL_TREE;
14214
14215 if (divisor == 1)
14216 return value;
14217
14218 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14219 have to do anything. Only do this when we are not given a const,
14220 because in that case, this check is more expensive than just
14221 doing it. */
14222 if (TREE_CODE (value) != INTEGER_CST)
14223 {
14224 div = build_int_cst (TREE_TYPE (value), divisor);
14225
14226 if (multiple_of_p (TREE_TYPE (value), value, div))
14227 return value;
14228 }
14229
14230 /* If divisor is a power of two, simplify this to bit manipulation. */
14231 if (pow2_or_zerop (divisor))
14232 {
14233 if (TREE_CODE (value) == INTEGER_CST)
14234 {
14235 wide_int val = value;
14236 bool overflow_p;
14237
14238 if ((val & (divisor - 1)) == 0)
14239 return value;
14240
14241 overflow_p = TREE_OVERFLOW (value);
14242 val += divisor - 1;
14243 val &= (int) -divisor;
14244 if (val == 0)
14245 overflow_p = true;
14246
14247 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14248 }
14249 else
14250 {
14251 tree t;
14252
14253 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14254 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14255 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14256 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14257 }
14258 }
14259 else
14260 {
14261 if (!div)
14262 div = build_int_cst (TREE_TYPE (value), divisor);
14263 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14264 value = size_binop_loc (loc, MULT_EXPR, value, div);
14265 }
14266
14267 return value;
14268 }
14269
14270 /* Likewise, but round down. */
14271
14272 tree
14273 round_down_loc (location_t loc, tree value, int divisor)
14274 {
14275 tree div = NULL_TREE;
14276
14277 gcc_assert (divisor > 0);
14278 if (divisor == 1)
14279 return value;
14280
14281 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14282 have to do anything. Only do this when we are not given a const,
14283 because in that case, this check is more expensive than just
14284 doing it. */
14285 if (TREE_CODE (value) != INTEGER_CST)
14286 {
14287 div = build_int_cst (TREE_TYPE (value), divisor);
14288
14289 if (multiple_of_p (TREE_TYPE (value), value, div))
14290 return value;
14291 }
14292
14293 /* If divisor is a power of two, simplify this to bit manipulation. */
14294 if (pow2_or_zerop (divisor))
14295 {
14296 tree t;
14297
14298 t = build_int_cst (TREE_TYPE (value), -divisor);
14299 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14300 }
14301 else
14302 {
14303 if (!div)
14304 div = build_int_cst (TREE_TYPE (value), divisor);
14305 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14306 value = size_binop_loc (loc, MULT_EXPR, value, div);
14307 }
14308
14309 return value;
14310 }
14311
14312 /* Returns the pointer to the base of the object addressed by EXP and
14313 extracts the information about the offset of the access, storing it
14314 to PBITPOS and POFFSET. */
14315
14316 static tree
14317 split_address_to_core_and_offset (tree exp,
14318 HOST_WIDE_INT *pbitpos, tree *poffset)
14319 {
14320 tree core;
14321 machine_mode mode;
14322 int unsignedp, reversep, volatilep;
14323 HOST_WIDE_INT bitsize;
14324 location_t loc = EXPR_LOCATION (exp);
14325
14326 if (TREE_CODE (exp) == ADDR_EXPR)
14327 {
14328 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14329 poffset, &mode, &unsignedp, &reversep,
14330 &volatilep);
14331 core = build_fold_addr_expr_loc (loc, core);
14332 }
14333 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14334 {
14335 core = TREE_OPERAND (exp, 0);
14336 STRIP_NOPS (core);
14337 *pbitpos = 0;
14338 *poffset = TREE_OPERAND (exp, 1);
14339 if (TREE_CODE (*poffset) == INTEGER_CST)
14340 {
14341 offset_int tem = wi::sext (wi::to_offset (*poffset),
14342 TYPE_PRECISION (TREE_TYPE (*poffset)));
14343 tem <<= LOG2_BITS_PER_UNIT;
14344 if (wi::fits_shwi_p (tem))
14345 {
14346 *pbitpos = tem.to_shwi ();
14347 *poffset = NULL_TREE;
14348 }
14349 }
14350 }
14351 else
14352 {
14353 core = exp;
14354 *pbitpos = 0;
14355 *poffset = NULL_TREE;
14356 }
14357
14358 return core;
14359 }
14360
14361 /* Returns true if addresses of E1 and E2 differ by a constant, false
14362 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14363
14364 bool
14365 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14366 {
14367 tree core1, core2;
14368 HOST_WIDE_INT bitpos1, bitpos2;
14369 tree toffset1, toffset2, tdiff, type;
14370
14371 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14372 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14373
14374 if (bitpos1 % BITS_PER_UNIT != 0
14375 || bitpos2 % BITS_PER_UNIT != 0
14376 || !operand_equal_p (core1, core2, 0))
14377 return false;
14378
14379 if (toffset1 && toffset2)
14380 {
14381 type = TREE_TYPE (toffset1);
14382 if (type != TREE_TYPE (toffset2))
14383 toffset2 = fold_convert (type, toffset2);
14384
14385 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14386 if (!cst_and_fits_in_hwi (tdiff))
14387 return false;
14388
14389 *diff = int_cst_value (tdiff);
14390 }
14391 else if (toffset1 || toffset2)
14392 {
14393 /* If only one of the offsets is non-constant, the difference cannot
14394 be a constant. */
14395 return false;
14396 }
14397 else
14398 *diff = 0;
14399
14400 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14401 return true;
14402 }
14403
14404 /* Return OFF converted to a pointer offset type suitable as offset for
14405 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14406 tree
14407 convert_to_ptrofftype_loc (location_t loc, tree off)
14408 {
14409 return fold_convert_loc (loc, sizetype, off);
14410 }
14411
14412 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14413 tree
14414 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14415 {
14416 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14417 ptr, convert_to_ptrofftype_loc (loc, off));
14418 }
14419
14420 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14421 tree
14422 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14423 {
14424 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14425 ptr, size_int (off));
14426 }
14427
14428 /* Return a char pointer for a C string if it is a string constant
14429 or sum of string constant and integer constant. We only support
14430 string constants properly terminated with '\0' character.
14431 If STRLEN is a valid pointer, length (including terminating character)
14432 of returned string is stored to the argument. */
14433
14434 const char *
14435 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14436 {
14437 tree offset_node;
14438
14439 if (strlen)
14440 *strlen = 0;
14441
14442 src = string_constant (src, &offset_node);
14443 if (src == 0)
14444 return NULL;
14445
14446 unsigned HOST_WIDE_INT offset = 0;
14447 if (offset_node != NULL_TREE)
14448 {
14449 if (!tree_fits_uhwi_p (offset_node))
14450 return NULL;
14451 else
14452 offset = tree_to_uhwi (offset_node);
14453 }
14454
14455 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14456 const char *string = TREE_STRING_POINTER (src);
14457
14458 /* Support only properly null-terminated strings. */
14459 if (string_length == 0
14460 || string[string_length - 1] != '\0'
14461 || offset >= string_length)
14462 return NULL;
14463
14464 if (strlen)
14465 *strlen = string_length - offset;
14466 return string + offset;
14467 }
14468
14469 #if CHECKING_P
14470
14471 namespace selftest {
14472
14473 /* Helper functions for writing tests of folding trees. */
14474
14475 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14476
14477 static void
14478 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14479 tree constant)
14480 {
14481 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14482 }
14483
14484 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14485 wrapping WRAPPED_EXPR. */
14486
14487 static void
14488 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14489 tree wrapped_expr)
14490 {
14491 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14492 ASSERT_NE (wrapped_expr, result);
14493 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14494 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14495 }
14496
14497 /* Verify that various arithmetic binary operations are folded
14498 correctly. */
14499
14500 static void
14501 test_arithmetic_folding ()
14502 {
14503 tree type = integer_type_node;
14504 tree x = create_tmp_var_raw (type, "x");
14505 tree zero = build_zero_cst (type);
14506 tree one = build_int_cst (type, 1);
14507
14508 /* Addition. */
14509 /* 1 <-- (0 + 1) */
14510 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14511 one);
14512 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14513 one);
14514
14515 /* (nonlvalue)x <-- (x + 0) */
14516 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14517 x);
14518
14519 /* Subtraction. */
14520 /* 0 <-- (x - x) */
14521 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14522 zero);
14523 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14524 x);
14525
14526 /* Multiplication. */
14527 /* 0 <-- (x * 0) */
14528 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14529 zero);
14530
14531 /* (nonlvalue)x <-- (x * 1) */
14532 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14533 x);
14534 }
14535
14536 /* Verify that various binary operations on vectors are folded
14537 correctly. */
14538
14539 static void
14540 test_vector_folding ()
14541 {
14542 tree inner_type = integer_type_node;
14543 tree type = build_vector_type (inner_type, 4);
14544 tree zero = build_zero_cst (type);
14545 tree one = build_one_cst (type);
14546
14547 /* Verify equality tests that return a scalar boolean result. */
14548 tree res_type = boolean_type_node;
14549 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14550 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14551 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14552 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14553 }
14554
14555 /* Run all of the selftests within this file. */
14556
14557 void
14558 fold_const_c_tests ()
14559 {
14560 test_arithmetic_folding ();
14561 test_vector_folding ();
14562 }
14563
14564 } // namespace selftest
14565
14566 #endif /* CHECKING_P */