re PR c/79153 (-Wimplicit-fallthrough missed warning)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84
85 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */
87 int folding_initializer = 0;
88
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
93 COMPCODE_FALSE = 0,
94 COMPCODE_LT = 1,
95 COMPCODE_EQ = 2,
96 COMPCODE_LE = 3,
97 COMPCODE_GT = 4,
98 COMPCODE_LTGT = 5,
99 COMPCODE_GE = 6,
100 COMPCODE_ORD = 7,
101 COMPCODE_UNORD = 8,
102 COMPCODE_UNLT = 9,
103 COMPCODE_UNEQ = 10,
104 COMPCODE_UNLE = 11,
105 COMPCODE_UNGT = 12,
106 COMPCODE_NE = 13,
107 COMPCODE_UNGE = 14,
108 COMPCODE_TRUE = 15
109 };
110
111 static bool negate_expr_p (tree);
112 static tree negate_expr (tree);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int twoval_comparison_p (tree, tree *, tree *, int *);
117 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
118 static tree optimize_bit_field_compare (location_t, enum tree_code,
119 tree, tree, tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138 static tree fold_view_convert_expr (tree, tree);
139 static tree fold_negate_expr (location_t, tree);
140
141
142 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
143 Otherwise, return LOC. */
144
145 static location_t
146 expr_location_or (tree t, location_t loc)
147 {
148 location_t tloc = EXPR_LOCATION (t);
149 return tloc == UNKNOWN_LOCATION ? loc : tloc;
150 }
151
152 /* Similar to protected_set_expr_location, but never modify x in place,
153 if location can and needs to be set, unshare it. */
154
155 static inline tree
156 protected_set_expr_location_unshare (tree x, location_t loc)
157 {
158 if (CAN_HAVE_LOCATION_P (x)
159 && EXPR_LOCATION (x) != loc
160 && !(TREE_CODE (x) == SAVE_EXPR
161 || TREE_CODE (x) == TARGET_EXPR
162 || TREE_CODE (x) == BIND_EXPR))
163 {
164 x = copy_node (x);
165 SET_EXPR_LOCATION (x, loc);
166 }
167 return x;
168 }
169 \f
170 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
171 division and returns the quotient. Otherwise returns
172 NULL_TREE. */
173
174 tree
175 div_if_zero_remainder (const_tree arg1, const_tree arg2)
176 {
177 widest_int quo;
178
179 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
180 SIGNED, &quo))
181 return wide_int_to_tree (TREE_TYPE (arg1), quo);
182
183 return NULL_TREE;
184 }
185 \f
186 /* This is nonzero if we should defer warnings about undefined
187 overflow. This facility exists because these warnings are a
188 special case. The code to estimate loop iterations does not want
189 to issue any warnings, since it works with expressions which do not
190 occur in user code. Various bits of cleanup code call fold(), but
191 only use the result if it has certain characteristics (e.g., is a
192 constant); that code only wants to issue a warning if the result is
193 used. */
194
195 static int fold_deferring_overflow_warnings;
196
197 /* If a warning about undefined overflow is deferred, this is the
198 warning. Note that this may cause us to turn two warnings into
199 one, but that is fine since it is sufficient to only give one
200 warning per expression. */
201
202 static const char* fold_deferred_overflow_warning;
203
204 /* If a warning about undefined overflow is deferred, this is the
205 level at which the warning should be emitted. */
206
207 static enum warn_strict_overflow_code fold_deferred_overflow_code;
208
209 /* Start deferring overflow warnings. We could use a stack here to
210 permit nested calls, but at present it is not necessary. */
211
212 void
213 fold_defer_overflow_warnings (void)
214 {
215 ++fold_deferring_overflow_warnings;
216 }
217
218 /* Stop deferring overflow warnings. If there is a pending warning,
219 and ISSUE is true, then issue the warning if appropriate. STMT is
220 the statement with which the warning should be associated (used for
221 location information); STMT may be NULL. CODE is the level of the
222 warning--a warn_strict_overflow_code value. This function will use
223 the smaller of CODE and the deferred code when deciding whether to
224 issue the warning. CODE may be zero to mean to always use the
225 deferred code. */
226
227 void
228 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
229 {
230 const char *warnmsg;
231 location_t locus;
232
233 gcc_assert (fold_deferring_overflow_warnings > 0);
234 --fold_deferring_overflow_warnings;
235 if (fold_deferring_overflow_warnings > 0)
236 {
237 if (fold_deferred_overflow_warning != NULL
238 && code != 0
239 && code < (int) fold_deferred_overflow_code)
240 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
241 return;
242 }
243
244 warnmsg = fold_deferred_overflow_warning;
245 fold_deferred_overflow_warning = NULL;
246
247 if (!issue || warnmsg == NULL)
248 return;
249
250 if (gimple_no_warning_p (stmt))
251 return;
252
253 /* Use the smallest code level when deciding to issue the
254 warning. */
255 if (code == 0 || code > (int) fold_deferred_overflow_code)
256 code = fold_deferred_overflow_code;
257
258 if (!issue_strict_overflow_warning (code))
259 return;
260
261 if (stmt == NULL)
262 locus = input_location;
263 else
264 locus = gimple_location (stmt);
265 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
266 }
267
268 /* Stop deferring overflow warnings, ignoring any deferred
269 warnings. */
270
271 void
272 fold_undefer_and_ignore_overflow_warnings (void)
273 {
274 fold_undefer_overflow_warnings (false, NULL, 0);
275 }
276
277 /* Whether we are deferring overflow warnings. */
278
279 bool
280 fold_deferring_overflow_warnings_p (void)
281 {
282 return fold_deferring_overflow_warnings > 0;
283 }
284
285 /* This is called when we fold something based on the fact that signed
286 overflow is undefined. */
287
288 void
289 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
290 {
291 if (fold_deferring_overflow_warnings > 0)
292 {
293 if (fold_deferred_overflow_warning == NULL
294 || wc < fold_deferred_overflow_code)
295 {
296 fold_deferred_overflow_warning = gmsgid;
297 fold_deferred_overflow_code = wc;
298 }
299 }
300 else if (issue_strict_overflow_warning (wc))
301 warning (OPT_Wstrict_overflow, gmsgid);
302 }
303 \f
304 /* Return true if the built-in mathematical function specified by CODE
305 is odd, i.e. -f(x) == f(-x). */
306
307 bool
308 negate_mathfn_p (combined_fn fn)
309 {
310 switch (fn)
311 {
312 CASE_CFN_ASIN:
313 CASE_CFN_ASINH:
314 CASE_CFN_ATAN:
315 CASE_CFN_ATANH:
316 CASE_CFN_CASIN:
317 CASE_CFN_CASINH:
318 CASE_CFN_CATAN:
319 CASE_CFN_CATANH:
320 CASE_CFN_CBRT:
321 CASE_CFN_CPROJ:
322 CASE_CFN_CSIN:
323 CASE_CFN_CSINH:
324 CASE_CFN_CTAN:
325 CASE_CFN_CTANH:
326 CASE_CFN_ERF:
327 CASE_CFN_LLROUND:
328 CASE_CFN_LROUND:
329 CASE_CFN_ROUND:
330 CASE_CFN_SIN:
331 CASE_CFN_SINH:
332 CASE_CFN_TAN:
333 CASE_CFN_TANH:
334 CASE_CFN_TRUNC:
335 return true;
336
337 CASE_CFN_LLRINT:
338 CASE_CFN_LRINT:
339 CASE_CFN_NEARBYINT:
340 CASE_CFN_RINT:
341 return !flag_rounding_math;
342
343 default:
344 break;
345 }
346 return false;
347 }
348
349 /* Check whether we may negate an integer constant T without causing
350 overflow. */
351
352 bool
353 may_negate_without_overflow_p (const_tree t)
354 {
355 tree type;
356
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
358
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
362
363 return !wi::only_sign_bit_p (wi::to_wide (t));
364 }
365
366 /* Determine whether an expression T can be cheaply negated using
367 the function negate_expr without introducing undefined overflow. */
368
369 static bool
370 negate_expr_p (tree t)
371 {
372 tree type;
373
374 if (t == 0)
375 return false;
376
377 type = TREE_TYPE (t);
378
379 STRIP_SIGN_NOPS (t);
380 switch (TREE_CODE (t))
381 {
382 case INTEGER_CST:
383 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
384 return true;
385
386 /* Check that -CST will not overflow type. */
387 return may_negate_without_overflow_p (t);
388 case BIT_NOT_EXPR:
389 return (INTEGRAL_TYPE_P (type)
390 && TYPE_OVERFLOW_WRAPS (type));
391
392 case FIXED_CST:
393 return true;
394
395 case NEGATE_EXPR:
396 return !TYPE_OVERFLOW_SANITIZED (type);
397
398 case REAL_CST:
399 /* We want to canonicalize to positive real constants. Pretend
400 that only negative ones can be easily negated. */
401 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
402
403 case COMPLEX_CST:
404 return negate_expr_p (TREE_REALPART (t))
405 && negate_expr_p (TREE_IMAGPART (t));
406
407 case VECTOR_CST:
408 {
409 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
410 return true;
411
412 int count = VECTOR_CST_NELTS (t), i;
413
414 for (i = 0; i < count; i++)
415 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
416 return false;
417
418 return true;
419 }
420
421 case COMPLEX_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0))
423 && negate_expr_p (TREE_OPERAND (t, 1));
424
425 case CONJ_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0));
427
428 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
430 || HONOR_SIGNED_ZEROS (element_mode (type))
431 || (ANY_INTEGRAL_TYPE_P (type)
432 && ! TYPE_OVERFLOW_WRAPS (type)))
433 return false;
434 /* -(A + B) -> (-B) - A. */
435 if (negate_expr_p (TREE_OPERAND (t, 1)))
436 return true;
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 && !HONOR_SIGNED_ZEROS (element_mode (type))
444 && (! ANY_INTEGRAL_TYPE_P (type)
445 || TYPE_OVERFLOW_WRAPS (type));
446
447 case MULT_EXPR:
448 if (TYPE_UNSIGNED (type))
449 break;
450 /* INT_MIN/n * n doesn't overflow while negating one operand it does
451 if n is a (negative) power of two. */
452 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
453 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
454 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
455 && (wi::popcount
456 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
457 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
460 break;
461
462 /* Fall through. */
463
464 case RDIV_EXPR:
465 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
466 return negate_expr_p (TREE_OPERAND (t, 1))
467 || negate_expr_p (TREE_OPERAND (t, 0));
468 break;
469
470 case TRUNC_DIV_EXPR:
471 case ROUND_DIV_EXPR:
472 case EXACT_DIV_EXPR:
473 if (TYPE_UNSIGNED (type))
474 break;
475 if (negate_expr_p (TREE_OPERAND (t, 0)))
476 return true;
477 /* In general we can't negate B in A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. */
480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
483 && ! integer_onep (TREE_OPERAND (t, 1))))
484 return negate_expr_p (TREE_OPERAND (t, 1));
485 break;
486
487 case NOP_EXPR:
488 /* Negate -((double)float) as (double)(-float). */
489 if (TREE_CODE (type) == REAL_TYPE)
490 {
491 tree tem = strip_float_extensions (t);
492 if (tem != t)
493 return negate_expr_p (tem);
494 }
495 break;
496
497 case CALL_EXPR:
498 /* Negate -f(x) as f(-x). */
499 if (negate_mathfn_p (get_call_combined_fn (t)))
500 return negate_expr_p (CALL_EXPR_ARG (t, 0));
501 break;
502
503 case RSHIFT_EXPR:
504 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
505 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
506 {
507 tree op1 = TREE_OPERAND (t, 1);
508 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
509 return true;
510 }
511 break;
512
513 default:
514 break;
515 }
516 return false;
517 }
518
519 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
520 simplification is possible.
521 If negate_expr_p would return true for T, NULL_TREE will never be
522 returned. */
523
524 static tree
525 fold_negate_expr_1 (location_t loc, tree t)
526 {
527 tree type = TREE_TYPE (t);
528 tree tem;
529
530 switch (TREE_CODE (t))
531 {
532 /* Convert - (~A) to A + 1. */
533 case BIT_NOT_EXPR:
534 if (INTEGRAL_TYPE_P (type))
535 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
536 build_one_cst (type));
537 break;
538
539 case INTEGER_CST:
540 tem = fold_negate_const (t, type);
541 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && !TYPE_OVERFLOW_TRAPS (type)
544 && TYPE_OVERFLOW_WRAPS (type))
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
546 return tem;
547 break;
548
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 return tem;
552
553 case FIXED_CST:
554 tem = fold_negate_const (t, type);
555 return tem;
556
557 case COMPLEX_CST:
558 {
559 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
560 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
561 if (rpart && ipart)
562 return build_complex (type, rpart, ipart);
563 }
564 break;
565
566 case VECTOR_CST:
567 {
568 int count = VECTOR_CST_NELTS (t), i;
569
570 auto_vec<tree, 32> elts (count);
571 for (i = 0; i < count; i++)
572 {
573 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 if (elt == NULL_TREE)
575 return NULL_TREE;
576 elts.quick_push (elt);
577 }
578
579 return build_vector (type, elts);
580 }
581
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
588
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
594
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
599
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
603 {
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
606 {
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
610 }
611
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
614 {
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
618 }
619 }
620 break;
621
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
629
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
633
634 /* Fall through. */
635
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
638 {
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
647 }
648 break;
649
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
671
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
675 {
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
679 }
680 break;
681
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 {
687 tree fndecl, arg;
688
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
692 }
693 break;
694
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 {
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
701 {
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
708 }
709 }
710 break;
711
712 default:
713 break;
714 }
715
716 return NULL_TREE;
717 }
718
719 /* A wrapper for fold_negate_expr_1. */
720
721 static tree
722 fold_negate_expr (location_t loc, tree t)
723 {
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
730 }
731
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
735
736 static tree
737 negate_expr (tree t)
738 {
739 tree type, tem;
740 location_t loc;
741
742 if (t == NULL_TREE)
743 return NULL_TREE;
744
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
748
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
753 }
754 \f
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
762
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
766
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
772
773 If IN is itself a literal or constant, return it as appropriate.
774
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
777
778 static tree
779 split_tree (tree in, tree type, enum tree_code code,
780 tree *minus_varp, tree *conp, tree *minus_conp,
781 tree *litp, tree *minus_litp, int negate_p)
782 {
783 tree var = 0;
784 *minus_varp = 0;
785 *conp = 0;
786 *minus_conp = 0;
787 *litp = 0;
788 *minus_litp = 0;
789
790 /* Strip any conversions that don't change the machine mode or signedness. */
791 STRIP_SIGN_NOPS (in);
792
793 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
794 || TREE_CODE (in) == FIXED_CST)
795 *litp = in;
796 else if (TREE_CODE (in) == code
797 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
798 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
799 /* We can associate addition and subtraction together (even
800 though the C standard doesn't say so) for integers because
801 the value is not affected. For reals, the value might be
802 affected, so we can't. */
803 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
804 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR
806 && (TREE_CODE (in) == PLUS_EXPR
807 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
808 {
809 tree op0 = TREE_OPERAND (in, 0);
810 tree op1 = TREE_OPERAND (in, 1);
811 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
812 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
813
814 /* First see if either of the operands is a literal, then a constant. */
815 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
816 || TREE_CODE (op0) == FIXED_CST)
817 *litp = op0, op0 = 0;
818 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
819 || TREE_CODE (op1) == FIXED_CST)
820 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
821
822 if (op0 != 0 && TREE_CONSTANT (op0))
823 *conp = op0, op0 = 0;
824 else if (op1 != 0 && TREE_CONSTANT (op1))
825 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
826
827 /* If we haven't dealt with either operand, this is not a case we can
828 decompose. Otherwise, VAR is either of the ones remaining, if any. */
829 if (op0 != 0 && op1 != 0)
830 var = in;
831 else if (op0 != 0)
832 var = op0;
833 else
834 var = op1, neg_var_p = neg1_p;
835
836 /* Now do any needed negations. */
837 if (neg_litp_p)
838 *minus_litp = *litp, *litp = 0;
839 if (neg_conp_p && *conp)
840 *minus_conp = *conp, *conp = 0;
841 if (neg_var_p && var)
842 *minus_varp = var, var = 0;
843 }
844 else if (TREE_CONSTANT (in))
845 *conp = in;
846 else if (TREE_CODE (in) == BIT_NOT_EXPR
847 && code == PLUS_EXPR)
848 {
849 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
850 when IN is constant. */
851 *litp = build_minus_one_cst (type);
852 *minus_varp = TREE_OPERAND (in, 0);
853 }
854 else
855 var = in;
856
857 if (negate_p)
858 {
859 if (*litp)
860 *minus_litp = *litp, *litp = 0;
861 else if (*minus_litp)
862 *litp = *minus_litp, *minus_litp = 0;
863 if (*conp)
864 *minus_conp = *conp, *conp = 0;
865 else if (*minus_conp)
866 *conp = *minus_conp, *minus_conp = 0;
867 if (var)
868 *minus_varp = var, var = 0;
869 else if (*minus_varp)
870 var = *minus_varp, *minus_varp = 0;
871 }
872
873 if (*litp
874 && TREE_OVERFLOW_P (*litp))
875 *litp = drop_tree_overflow (*litp);
876 if (*minus_litp
877 && TREE_OVERFLOW_P (*minus_litp))
878 *minus_litp = drop_tree_overflow (*minus_litp);
879
880 return var;
881 }
882
883 /* Re-associate trees split by the above function. T1 and T2 are
884 either expressions to associate or null. Return the new
885 expression, if any. LOC is the location of the new expression. If
886 we build an operation, do it in TYPE and with CODE. */
887
888 static tree
889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 {
891 if (t1 == 0)
892 {
893 gcc_assert (t2 == 0 || code != MINUS_EXPR);
894 return t2;
895 }
896 else if (t2 == 0)
897 return t1;
898
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
904 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 {
906 if (code == PLUS_EXPR)
907 {
908 if (TREE_CODE (t1) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t2),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t1, 0)));
913 else if (TREE_CODE (t2) == NEGATE_EXPR)
914 return build2_loc (loc, MINUS_EXPR, type,
915 fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type,
917 TREE_OPERAND (t2, 0)));
918 else if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921 else if (code == MINUS_EXPR)
922 {
923 if (integer_zerop (t2))
924 return fold_convert_loc (loc, type, t1);
925 }
926
927 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
929 }
930
931 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
932 fold_convert_loc (loc, type, t2));
933 }
934 \f
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936 for use in int_const_binop, size_binop and size_diffop. */
937
938 static bool
939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 {
941 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942 return false;
943 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
944 return false;
945
946 switch (code)
947 {
948 case LSHIFT_EXPR:
949 case RSHIFT_EXPR:
950 case LROTATE_EXPR:
951 case RROTATE_EXPR:
952 return true;
953
954 default:
955 break;
956 }
957
958 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 && TYPE_MODE (type1) == TYPE_MODE (type2);
961 }
962
963
964 /* Combine two integer constants PARG1 and PARG2 under operation CODE
965 to produce a new constant. Return NULL_TREE if we don't know how
966 to evaluate CODE at compile-time. */
967
968 static tree
969 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
970 int overflowable)
971 {
972 wide_int res;
973 tree t;
974 tree type = TREE_TYPE (parg1);
975 signop sign = TYPE_SIGN (type);
976 bool overflow = false;
977
978 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
979 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
980
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 arg2 = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005
1006 if (code == RSHIFT_EXPR)
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 else
1012 res = wi::lshift (arg1, arg2);
1013 break;
1014
1015 case RROTATE_EXPR:
1016 case LROTATE_EXPR:
1017 if (wi::neg_p (arg2))
1018 {
1019 arg2 = -arg2;
1020 if (code == RROTATE_EXPR)
1021 code = LROTATE_EXPR;
1022 else
1023 code = RROTATE_EXPR;
1024 }
1025
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, arg2);
1028 else
1029 res = wi::lrotate (arg1, arg2);
1030 break;
1031
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, &overflow);
1034 break;
1035
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, &overflow);
1042 break;
1043
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1047
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_floor (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::div_round (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return NULL_TREE;
1088 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1089 break;
1090
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return NULL_TREE;
1094 res = wi::mod_round (arg1, arg2, sign, &overflow);
1095 break;
1096
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1100
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1104
1105 default:
1106 return NULL_TREE;
1107 }
1108
1109 t = force_fit_type (type, res, overflowable,
1110 (((sign == SIGNED || overflowable == -1)
1111 && overflow)
1112 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1113
1114 return t;
1115 }
1116
1117 tree
1118 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 {
1120 return int_const_binop_1 (code, arg1, arg2, 1);
1121 }
1122
1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1124 constant. We assume ARG1 and ARG2 have the same data type, or at least
1125 are the same kind of constant and the same machine mode. Return zero if
1126 combining the constants is not allowed in the current operating mode. */
1127
1128 static tree
1129 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 {
1131 /* Sanity check for the recursive cases. */
1132 if (!arg1 || !arg2)
1133 return NULL_TREE;
1134
1135 STRIP_NOPS (arg1);
1136 STRIP_NOPS (arg2);
1137
1138 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1139 {
1140 if (code == POINTER_PLUS_EXPR)
1141 return int_const_binop (PLUS_EXPR,
1142 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1143
1144 return int_const_binop (code, arg1, arg2);
1145 }
1146
1147 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1148 {
1149 machine_mode mode;
1150 REAL_VALUE_TYPE d1;
1151 REAL_VALUE_TYPE d2;
1152 REAL_VALUE_TYPE value;
1153 REAL_VALUE_TYPE result;
1154 bool inexact;
1155 tree t, type;
1156
1157 /* The following codes are handled by real_arithmetic. */
1158 switch (code)
1159 {
1160 case PLUS_EXPR:
1161 case MINUS_EXPR:
1162 case MULT_EXPR:
1163 case RDIV_EXPR:
1164 case MIN_EXPR:
1165 case MAX_EXPR:
1166 break;
1167
1168 default:
1169 return NULL_TREE;
1170 }
1171
1172 d1 = TREE_REAL_CST (arg1);
1173 d2 = TREE_REAL_CST (arg2);
1174
1175 type = TREE_TYPE (arg1);
1176 mode = TYPE_MODE (type);
1177
1178 /* Don't perform operation if we honor signaling NaNs and
1179 either operand is a signaling NaN. */
1180 if (HONOR_SNANS (mode)
1181 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1182 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1183 return NULL_TREE;
1184
1185 /* Don't perform operation if it would raise a division
1186 by zero exception. */
1187 if (code == RDIV_EXPR
1188 && real_equal (&d2, &dconst0)
1189 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1190 return NULL_TREE;
1191
1192 /* If either operand is a NaN, just return it. Otherwise, set up
1193 for floating-point trap; we return an overflow. */
1194 if (REAL_VALUE_ISNAN (d1))
1195 {
1196 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1197 is off. */
1198 d1.signalling = 0;
1199 t = build_real (type, d1);
1200 return t;
1201 }
1202 else if (REAL_VALUE_ISNAN (d2))
1203 {
1204 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1205 is off. */
1206 d2.signalling = 0;
1207 t = build_real (type, d2);
1208 return t;
1209 }
1210
1211 inexact = real_arithmetic (&value, code, &d1, &d2);
1212 real_convert (&result, mode, &value);
1213
1214 /* Don't constant fold this floating point operation if
1215 the result has overflowed and flag_trapping_math. */
1216 if (flag_trapping_math
1217 && MODE_HAS_INFINITIES (mode)
1218 && REAL_VALUE_ISINF (result)
1219 && !REAL_VALUE_ISINF (d1)
1220 && !REAL_VALUE_ISINF (d2))
1221 return NULL_TREE;
1222
1223 /* Don't constant fold this floating point operation if the
1224 result may dependent upon the run-time rounding mode and
1225 flag_rounding_math is set, or if GCC's software emulation
1226 is unable to accurately represent the result. */
1227 if ((flag_rounding_math
1228 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1229 && (inexact || !real_identical (&result, &value)))
1230 return NULL_TREE;
1231
1232 t = build_real (type, result);
1233
1234 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1235 return t;
1236 }
1237
1238 if (TREE_CODE (arg1) == FIXED_CST)
1239 {
1240 FIXED_VALUE_TYPE f1;
1241 FIXED_VALUE_TYPE f2;
1242 FIXED_VALUE_TYPE result;
1243 tree t, type;
1244 int sat_p;
1245 bool overflow_p;
1246
1247 /* The following codes are handled by fixed_arithmetic. */
1248 switch (code)
1249 {
1250 case PLUS_EXPR:
1251 case MINUS_EXPR:
1252 case MULT_EXPR:
1253 case TRUNC_DIV_EXPR:
1254 if (TREE_CODE (arg2) != FIXED_CST)
1255 return NULL_TREE;
1256 f2 = TREE_FIXED_CST (arg2);
1257 break;
1258
1259 case LSHIFT_EXPR:
1260 case RSHIFT_EXPR:
1261 {
1262 if (TREE_CODE (arg2) != INTEGER_CST)
1263 return NULL_TREE;
1264 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1265 f2.data.high = w2.elt (1);
1266 f2.data.low = w2.ulow ();
1267 f2.mode = SImode;
1268 }
1269 break;
1270
1271 default:
1272 return NULL_TREE;
1273 }
1274
1275 f1 = TREE_FIXED_CST (arg1);
1276 type = TREE_TYPE (arg1);
1277 sat_p = TYPE_SATURATING (type);
1278 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1279 t = build_fixed (type, result);
1280 /* Propagate overflow flags. */
1281 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1282 TREE_OVERFLOW (t) = 1;
1283 return t;
1284 }
1285
1286 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1287 {
1288 tree type = TREE_TYPE (arg1);
1289 tree r1 = TREE_REALPART (arg1);
1290 tree i1 = TREE_IMAGPART (arg1);
1291 tree r2 = TREE_REALPART (arg2);
1292 tree i2 = TREE_IMAGPART (arg2);
1293 tree real, imag;
1294
1295 switch (code)
1296 {
1297 case PLUS_EXPR:
1298 case MINUS_EXPR:
1299 real = const_binop (code, r1, r2);
1300 imag = const_binop (code, i1, i2);
1301 break;
1302
1303 case MULT_EXPR:
1304 if (COMPLEX_FLOAT_TYPE_P (type))
1305 return do_mpc_arg2 (arg1, arg2, type,
1306 /* do_nonfinite= */ folding_initializer,
1307 mpc_mul);
1308
1309 real = const_binop (MINUS_EXPR,
1310 const_binop (MULT_EXPR, r1, r2),
1311 const_binop (MULT_EXPR, i1, i2));
1312 imag = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r1, i2),
1314 const_binop (MULT_EXPR, i1, r2));
1315 break;
1316
1317 case RDIV_EXPR:
1318 if (COMPLEX_FLOAT_TYPE_P (type))
1319 return do_mpc_arg2 (arg1, arg2, type,
1320 /* do_nonfinite= */ folding_initializer,
1321 mpc_div);
1322 /* Fallthru. */
1323 case TRUNC_DIV_EXPR:
1324 case CEIL_DIV_EXPR:
1325 case FLOOR_DIV_EXPR:
1326 case ROUND_DIV_EXPR:
1327 if (flag_complex_method == 0)
1328 {
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_straight().
1331
1332 Expand complex division to scalars, straightforward algorithm.
1333 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1334 t = br*br + bi*bi
1335 */
1336 tree magsquared
1337 = const_binop (PLUS_EXPR,
1338 const_binop (MULT_EXPR, r2, r2),
1339 const_binop (MULT_EXPR, i2, i2));
1340 tree t1
1341 = const_binop (PLUS_EXPR,
1342 const_binop (MULT_EXPR, r1, r2),
1343 const_binop (MULT_EXPR, i1, i2));
1344 tree t2
1345 = const_binop (MINUS_EXPR,
1346 const_binop (MULT_EXPR, i1, r2),
1347 const_binop (MULT_EXPR, r1, i2));
1348
1349 real = const_binop (code, t1, magsquared);
1350 imag = const_binop (code, t2, magsquared);
1351 }
1352 else
1353 {
1354 /* Keep this algorithm in sync with
1355 tree-complex.c:expand_complex_div_wide().
1356
1357 Expand complex division to scalars, modified algorithm to minimize
1358 overflow with wide input ranges. */
1359 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1360 fold_abs_const (r2, TREE_TYPE (type)),
1361 fold_abs_const (i2, TREE_TYPE (type)));
1362
1363 if (integer_nonzerop (compare))
1364 {
1365 /* In the TRUE branch, we compute
1366 ratio = br/bi;
1367 div = (br * ratio) + bi;
1368 tr = (ar * ratio) + ai;
1369 ti = (ai * ratio) - ar;
1370 tr = tr / div;
1371 ti = ti / div; */
1372 tree ratio = const_binop (code, r2, i2);
1373 tree div = const_binop (PLUS_EXPR, i2,
1374 const_binop (MULT_EXPR, r2, ratio));
1375 real = const_binop (MULT_EXPR, r1, ratio);
1376 real = const_binop (PLUS_EXPR, real, i1);
1377 real = const_binop (code, real, div);
1378
1379 imag = const_binop (MULT_EXPR, i1, ratio);
1380 imag = const_binop (MINUS_EXPR, imag, r1);
1381 imag = const_binop (code, imag, div);
1382 }
1383 else
1384 {
1385 /* In the FALSE branch, we compute
1386 ratio = d/c;
1387 divisor = (d * ratio) + c;
1388 tr = (b * ratio) + a;
1389 ti = b - (a * ratio);
1390 tr = tr / div;
1391 ti = ti / div; */
1392 tree ratio = const_binop (code, i2, r2);
1393 tree div = const_binop (PLUS_EXPR, r2,
1394 const_binop (MULT_EXPR, i2, ratio));
1395
1396 real = const_binop (MULT_EXPR, i1, ratio);
1397 real = const_binop (PLUS_EXPR, real, r1);
1398 real = const_binop (code, real, div);
1399
1400 imag = const_binop (MULT_EXPR, r1, ratio);
1401 imag = const_binop (MINUS_EXPR, i1, imag);
1402 imag = const_binop (code, imag, div);
1403 }
1404 }
1405 break;
1406
1407 default:
1408 return NULL_TREE;
1409 }
1410
1411 if (real && imag)
1412 return build_complex (type, real, imag);
1413 }
1414
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == VECTOR_CST)
1417 {
1418 tree type = TREE_TYPE (arg1);
1419 int count = VECTOR_CST_NELTS (arg1), i;
1420
1421 auto_vec<tree, 32> elts (count);
1422 for (i = 0; i < count; i++)
1423 {
1424 tree elem1 = VECTOR_CST_ELT (arg1, i);
1425 tree elem2 = VECTOR_CST_ELT (arg2, i);
1426
1427 tree elt = const_binop (code, elem1, elem2);
1428
1429 /* It is possible that const_binop cannot handle the given
1430 code and return NULL_TREE */
1431 if (elt == NULL_TREE)
1432 return NULL_TREE;
1433 elts.quick_push (elt);
1434 }
1435
1436 return build_vector (type, elts);
1437 }
1438
1439 /* Shifts allow a scalar offset for a vector. */
1440 if (TREE_CODE (arg1) == VECTOR_CST
1441 && TREE_CODE (arg2) == INTEGER_CST)
1442 {
1443 tree type = TREE_TYPE (arg1);
1444 int count = VECTOR_CST_NELTS (arg1), i;
1445
1446 auto_vec<tree, 32> elts (count);
1447 for (i = 0; i < count; i++)
1448 {
1449 tree elem1 = VECTOR_CST_ELT (arg1, i);
1450
1451 tree elt = const_binop (code, elem1, arg2);
1452
1453 /* It is possible that const_binop cannot handle the given
1454 code and return NULL_TREE. */
1455 if (elt == NULL_TREE)
1456 return NULL_TREE;
1457 elts.quick_push (elt);
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1467
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1470 {
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1473
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1477 {
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1485
1486 case POINTER_DIFF_EXPR:
1487 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1488 {
1489 offset_int res = wi::sub (wi::to_offset (arg1),
1490 wi::to_offset (arg2));
1491 return force_fit_type (type, res, 1,
1492 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1493 }
1494 return NULL_TREE;
1495
1496 case VEC_PACK_TRUNC_EXPR:
1497 case VEC_PACK_FIX_TRUNC_EXPR:
1498 {
1499 unsigned int out_nelts, in_nelts, i;
1500
1501 if (TREE_CODE (arg1) != VECTOR_CST
1502 || TREE_CODE (arg2) != VECTOR_CST)
1503 return NULL_TREE;
1504
1505 in_nelts = VECTOR_CST_NELTS (arg1);
1506 out_nelts = in_nelts * 2;
1507 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1508 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1509
1510 auto_vec<tree, 32> elts (out_nelts);
1511 for (i = 0; i < out_nelts; i++)
1512 {
1513 tree elt = (i < in_nelts
1514 ? VECTOR_CST_ELT (arg1, i)
1515 : VECTOR_CST_ELT (arg2, i - in_nelts));
1516 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1517 ? NOP_EXPR : FIX_TRUNC_EXPR,
1518 TREE_TYPE (type), elt);
1519 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1520 return NULL_TREE;
1521 elts.quick_push (elt);
1522 }
1523
1524 return build_vector (type, elts);
1525 }
1526
1527 case VEC_WIDEN_MULT_LO_EXPR:
1528 case VEC_WIDEN_MULT_HI_EXPR:
1529 case VEC_WIDEN_MULT_EVEN_EXPR:
1530 case VEC_WIDEN_MULT_ODD_EXPR:
1531 {
1532 unsigned int out_nelts, in_nelts, out, ofs, scale;
1533
1534 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1535 return NULL_TREE;
1536
1537 in_nelts = VECTOR_CST_NELTS (arg1);
1538 out_nelts = in_nelts / 2;
1539 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1540 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1541
1542 if (code == VEC_WIDEN_MULT_LO_EXPR)
1543 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1544 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1545 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1546 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1547 scale = 1, ofs = 0;
1548 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1549 scale = 1, ofs = 1;
1550
1551 auto_vec<tree, 32> elts (out_nelts);
1552 for (out = 0; out < out_nelts; out++)
1553 {
1554 unsigned int in = (out << scale) + ofs;
1555 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1556 VECTOR_CST_ELT (arg1, in));
1557 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1558 VECTOR_CST_ELT (arg2, in));
1559
1560 if (t1 == NULL_TREE || t2 == NULL_TREE)
1561 return NULL_TREE;
1562 tree elt = const_binop (MULT_EXPR, t1, t2);
1563 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1564 return NULL_TREE;
1565 elts.quick_push (elt);
1566 }
1567
1568 return build_vector (type, elts);
1569 }
1570
1571 default:;
1572 }
1573
1574 if (TREE_CODE_CLASS (code) != tcc_binary)
1575 return NULL_TREE;
1576
1577 /* Make sure type and arg0 have the same saturating flag. */
1578 gcc_checking_assert (TYPE_SATURATING (type)
1579 == TYPE_SATURATING (TREE_TYPE (arg1)));
1580
1581 return const_binop (code, arg1, arg2);
1582 }
1583
1584 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1585 Return zero if computing the constants is not possible. */
1586
1587 tree
1588 const_unop (enum tree_code code, tree type, tree arg0)
1589 {
1590 /* Don't perform the operation, other than NEGATE and ABS, if
1591 flag_signaling_nans is on and the operand is a signaling NaN. */
1592 if (TREE_CODE (arg0) == REAL_CST
1593 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1594 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1595 && code != NEGATE_EXPR
1596 && code != ABS_EXPR)
1597 return NULL_TREE;
1598
1599 switch (code)
1600 {
1601 CASE_CONVERT:
1602 case FLOAT_EXPR:
1603 case FIX_TRUNC_EXPR:
1604 case FIXED_CONVERT_EXPR:
1605 return fold_convert_const (code, type, arg0);
1606
1607 case ADDR_SPACE_CONVERT_EXPR:
1608 /* If the source address is 0, and the source address space
1609 cannot have a valid object at 0, fold to dest type null. */
1610 if (integer_zerop (arg0)
1611 && !(targetm.addr_space.zero_address_valid
1612 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1613 return fold_convert_const (code, type, arg0);
1614 break;
1615
1616 case VIEW_CONVERT_EXPR:
1617 return fold_view_convert_expr (type, arg0);
1618
1619 case NEGATE_EXPR:
1620 {
1621 /* Can't call fold_negate_const directly here as that doesn't
1622 handle all cases and we might not be able to negate some
1623 constants. */
1624 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1625 if (tem && CONSTANT_CLASS_P (tem))
1626 return tem;
1627 break;
1628 }
1629
1630 case ABS_EXPR:
1631 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1632 return fold_abs_const (arg0, type);
1633 break;
1634
1635 case CONJ_EXPR:
1636 if (TREE_CODE (arg0) == COMPLEX_CST)
1637 {
1638 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1639 TREE_TYPE (type));
1640 return build_complex (type, TREE_REALPART (arg0), ipart);
1641 }
1642 break;
1643
1644 case BIT_NOT_EXPR:
1645 if (TREE_CODE (arg0) == INTEGER_CST)
1646 return fold_not_const (arg0, type);
1647 /* Perform BIT_NOT_EXPR on each element individually. */
1648 else if (TREE_CODE (arg0) == VECTOR_CST)
1649 {
1650 tree elem;
1651 unsigned count = VECTOR_CST_NELTS (arg0), i;
1652
1653 auto_vec<tree, 32> elements (count);
1654 for (i = 0; i < count; i++)
1655 {
1656 elem = VECTOR_CST_ELT (arg0, i);
1657 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1658 if (elem == NULL_TREE)
1659 break;
1660 elements.quick_push (elem);
1661 }
1662 if (i == count)
1663 return build_vector (type, elements);
1664 }
1665 break;
1666
1667 case TRUTH_NOT_EXPR:
1668 if (TREE_CODE (arg0) == INTEGER_CST)
1669 return constant_boolean_node (integer_zerop (arg0), type);
1670 break;
1671
1672 case REALPART_EXPR:
1673 if (TREE_CODE (arg0) == COMPLEX_CST)
1674 return fold_convert (type, TREE_REALPART (arg0));
1675 break;
1676
1677 case IMAGPART_EXPR:
1678 if (TREE_CODE (arg0) == COMPLEX_CST)
1679 return fold_convert (type, TREE_IMAGPART (arg0));
1680 break;
1681
1682 case VEC_UNPACK_LO_EXPR:
1683 case VEC_UNPACK_HI_EXPR:
1684 case VEC_UNPACK_FLOAT_LO_EXPR:
1685 case VEC_UNPACK_FLOAT_HI_EXPR:
1686 {
1687 unsigned int out_nelts, in_nelts, i;
1688 enum tree_code subcode;
1689
1690 if (TREE_CODE (arg0) != VECTOR_CST)
1691 return NULL_TREE;
1692
1693 in_nelts = VECTOR_CST_NELTS (arg0);
1694 out_nelts = in_nelts / 2;
1695 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1696
1697 unsigned int offset = 0;
1698 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1699 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1700 offset = out_nelts;
1701
1702 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1703 subcode = NOP_EXPR;
1704 else
1705 subcode = FLOAT_EXPR;
1706
1707 auto_vec<tree, 32> elts (out_nelts);
1708 for (i = 0; i < out_nelts; i++)
1709 {
1710 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1711 VECTOR_CST_ELT (arg0, i + offset));
1712 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1713 return NULL_TREE;
1714 elts.quick_push (elt);
1715 }
1716
1717 return build_vector (type, elts);
1718 }
1719
1720 default:
1721 break;
1722 }
1723
1724 return NULL_TREE;
1725 }
1726
1727 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1728 indicates which particular sizetype to create. */
1729
1730 tree
1731 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1732 {
1733 return build_int_cst (sizetype_tab[(int) kind], number);
1734 }
1735 \f
1736 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1737 is a tree code. The type of the result is taken from the operands.
1738 Both must be equivalent integer types, ala int_binop_types_match_p.
1739 If the operands are constant, so is the result. */
1740
1741 tree
1742 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1743 {
1744 tree type = TREE_TYPE (arg0);
1745
1746 if (arg0 == error_mark_node || arg1 == error_mark_node)
1747 return error_mark_node;
1748
1749 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1750 TREE_TYPE (arg1)));
1751
1752 /* Handle the special case of two integer constants faster. */
1753 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1754 {
1755 /* And some specific cases even faster than that. */
1756 if (code == PLUS_EXPR)
1757 {
1758 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1759 return arg1;
1760 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1761 return arg0;
1762 }
1763 else if (code == MINUS_EXPR)
1764 {
1765 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1766 return arg0;
1767 }
1768 else if (code == MULT_EXPR)
1769 {
1770 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1771 return arg1;
1772 }
1773
1774 /* Handle general case of two integer constants. For sizetype
1775 constant calculations we always want to know about overflow,
1776 even in the unsigned case. */
1777 return int_const_binop_1 (code, arg0, arg1, -1);
1778 }
1779
1780 return fold_build2_loc (loc, code, type, arg0, arg1);
1781 }
1782
1783 /* Given two values, either both of sizetype or both of bitsizetype,
1784 compute the difference between the two values. Return the value
1785 in signed type corresponding to the type of the operands. */
1786
1787 tree
1788 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1789 {
1790 tree type = TREE_TYPE (arg0);
1791 tree ctype;
1792
1793 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1794 TREE_TYPE (arg1)));
1795
1796 /* If the type is already signed, just do the simple thing. */
1797 if (!TYPE_UNSIGNED (type))
1798 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1799
1800 if (type == sizetype)
1801 ctype = ssizetype;
1802 else if (type == bitsizetype)
1803 ctype = sbitsizetype;
1804 else
1805 ctype = signed_type_for (type);
1806
1807 /* If either operand is not a constant, do the conversions to the signed
1808 type and subtract. The hardware will do the right thing with any
1809 overflow in the subtraction. */
1810 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1811 return size_binop_loc (loc, MINUS_EXPR,
1812 fold_convert_loc (loc, ctype, arg0),
1813 fold_convert_loc (loc, ctype, arg1));
1814
1815 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1816 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1817 overflow) and negate (which can't either). Special-case a result
1818 of zero while we're here. */
1819 if (tree_int_cst_equal (arg0, arg1))
1820 return build_int_cst (ctype, 0);
1821 else if (tree_int_cst_lt (arg1, arg0))
1822 return fold_convert_loc (loc, ctype,
1823 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1824 else
1825 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1826 fold_convert_loc (loc, ctype,
1827 size_binop_loc (loc,
1828 MINUS_EXPR,
1829 arg1, arg0)));
1830 }
1831 \f
1832 /* A subroutine of fold_convert_const handling conversions of an
1833 INTEGER_CST to another integer type. */
1834
1835 static tree
1836 fold_convert_const_int_from_int (tree type, const_tree arg1)
1837 {
1838 /* Given an integer constant, make new constant with new type,
1839 appropriately sign-extended or truncated. Use widest_int
1840 so that any extension is done according ARG1's type. */
1841 return force_fit_type (type, wi::to_widest (arg1),
1842 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1843 TREE_OVERFLOW (arg1));
1844 }
1845
1846 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1847 to an integer type. */
1848
1849 static tree
1850 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1851 {
1852 bool overflow = false;
1853 tree t;
1854
1855 /* The following code implements the floating point to integer
1856 conversion rules required by the Java Language Specification,
1857 that IEEE NaNs are mapped to zero and values that overflow
1858 the target precision saturate, i.e. values greater than
1859 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1860 are mapped to INT_MIN. These semantics are allowed by the
1861 C and C++ standards that simply state that the behavior of
1862 FP-to-integer conversion is unspecified upon overflow. */
1863
1864 wide_int val;
1865 REAL_VALUE_TYPE r;
1866 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1867
1868 switch (code)
1869 {
1870 case FIX_TRUNC_EXPR:
1871 real_trunc (&r, VOIDmode, &x);
1872 break;
1873
1874 default:
1875 gcc_unreachable ();
1876 }
1877
1878 /* If R is NaN, return zero and show we have an overflow. */
1879 if (REAL_VALUE_ISNAN (r))
1880 {
1881 overflow = true;
1882 val = wi::zero (TYPE_PRECISION (type));
1883 }
1884
1885 /* See if R is less than the lower bound or greater than the
1886 upper bound. */
1887
1888 if (! overflow)
1889 {
1890 tree lt = TYPE_MIN_VALUE (type);
1891 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1892 if (real_less (&r, &l))
1893 {
1894 overflow = true;
1895 val = wi::to_wide (lt);
1896 }
1897 }
1898
1899 if (! overflow)
1900 {
1901 tree ut = TYPE_MAX_VALUE (type);
1902 if (ut)
1903 {
1904 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1905 if (real_less (&u, &r))
1906 {
1907 overflow = true;
1908 val = wi::to_wide (ut);
1909 }
1910 }
1911 }
1912
1913 if (! overflow)
1914 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1915
1916 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1917 return t;
1918 }
1919
1920 /* A subroutine of fold_convert_const handling conversions of a
1921 FIXED_CST to an integer type. */
1922
1923 static tree
1924 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1925 {
1926 tree t;
1927 double_int temp, temp_trunc;
1928 scalar_mode mode;
1929
1930 /* Right shift FIXED_CST to temp by fbit. */
1931 temp = TREE_FIXED_CST (arg1).data;
1932 mode = TREE_FIXED_CST (arg1).mode;
1933 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1934 {
1935 temp = temp.rshift (GET_MODE_FBIT (mode),
1936 HOST_BITS_PER_DOUBLE_INT,
1937 SIGNED_FIXED_POINT_MODE_P (mode));
1938
1939 /* Left shift temp to temp_trunc by fbit. */
1940 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1941 HOST_BITS_PER_DOUBLE_INT,
1942 SIGNED_FIXED_POINT_MODE_P (mode));
1943 }
1944 else
1945 {
1946 temp = double_int_zero;
1947 temp_trunc = double_int_zero;
1948 }
1949
1950 /* If FIXED_CST is negative, we need to round the value toward 0.
1951 By checking if the fractional bits are not zero to add 1 to temp. */
1952 if (SIGNED_FIXED_POINT_MODE_P (mode)
1953 && temp_trunc.is_negative ()
1954 && TREE_FIXED_CST (arg1).data != temp_trunc)
1955 temp += double_int_one;
1956
1957 /* Given a fixed-point constant, make new constant with new type,
1958 appropriately sign-extended or truncated. */
1959 t = force_fit_type (type, temp, -1,
1960 (temp.is_negative ()
1961 && (TYPE_UNSIGNED (type)
1962 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1963 | TREE_OVERFLOW (arg1));
1964
1965 return t;
1966 }
1967
1968 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1969 to another floating point type. */
1970
1971 static tree
1972 fold_convert_const_real_from_real (tree type, const_tree arg1)
1973 {
1974 REAL_VALUE_TYPE value;
1975 tree t;
1976
1977 /* Don't perform the operation if flag_signaling_nans is on
1978 and the operand is a signaling NaN. */
1979 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1980 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1981 return NULL_TREE;
1982
1983 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1984 t = build_real (type, value);
1985
1986 /* If converting an infinity or NAN to a representation that doesn't
1987 have one, set the overflow bit so that we can produce some kind of
1988 error message at the appropriate point if necessary. It's not the
1989 most user-friendly message, but it's better than nothing. */
1990 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1991 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1992 TREE_OVERFLOW (t) = 1;
1993 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1994 && !MODE_HAS_NANS (TYPE_MODE (type)))
1995 TREE_OVERFLOW (t) = 1;
1996 /* Regular overflow, conversion produced an infinity in a mode that
1997 can't represent them. */
1998 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1999 && REAL_VALUE_ISINF (value)
2000 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2001 TREE_OVERFLOW (t) = 1;
2002 else
2003 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2004 return t;
2005 }
2006
2007 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2008 to a floating point type. */
2009
2010 static tree
2011 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2012 {
2013 REAL_VALUE_TYPE value;
2014 tree t;
2015
2016 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2017 &TREE_FIXED_CST (arg1));
2018 t = build_real (type, value);
2019
2020 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2021 return t;
2022 }
2023
2024 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2025 to another fixed-point type. */
2026
2027 static tree
2028 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2029 {
2030 FIXED_VALUE_TYPE value;
2031 tree t;
2032 bool overflow_p;
2033
2034 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2035 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2036 t = build_fixed (type, value);
2037
2038 /* Propagate overflow flags. */
2039 if (overflow_p | TREE_OVERFLOW (arg1))
2040 TREE_OVERFLOW (t) = 1;
2041 return t;
2042 }
2043
2044 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2045 to a fixed-point type. */
2046
2047 static tree
2048 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2049 {
2050 FIXED_VALUE_TYPE value;
2051 tree t;
2052 bool overflow_p;
2053 double_int di;
2054
2055 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2056
2057 di.low = TREE_INT_CST_ELT (arg1, 0);
2058 if (TREE_INT_CST_NUNITS (arg1) == 1)
2059 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2060 else
2061 di.high = TREE_INT_CST_ELT (arg1, 1);
2062
2063 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2064 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2065 TYPE_SATURATING (type));
2066 t = build_fixed (type, value);
2067
2068 /* Propagate overflow flags. */
2069 if (overflow_p | TREE_OVERFLOW (arg1))
2070 TREE_OVERFLOW (t) = 1;
2071 return t;
2072 }
2073
2074 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2075 to a fixed-point type. */
2076
2077 static tree
2078 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2079 {
2080 FIXED_VALUE_TYPE value;
2081 tree t;
2082 bool overflow_p;
2083
2084 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2085 &TREE_REAL_CST (arg1),
2086 TYPE_SATURATING (type));
2087 t = build_fixed (type, value);
2088
2089 /* Propagate overflow flags. */
2090 if (overflow_p | TREE_OVERFLOW (arg1))
2091 TREE_OVERFLOW (t) = 1;
2092 return t;
2093 }
2094
2095 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2096 type TYPE. If no simplification can be done return NULL_TREE. */
2097
2098 static tree
2099 fold_convert_const (enum tree_code code, tree type, tree arg1)
2100 {
2101 if (TREE_TYPE (arg1) == type)
2102 return arg1;
2103
2104 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2105 || TREE_CODE (type) == OFFSET_TYPE)
2106 {
2107 if (TREE_CODE (arg1) == INTEGER_CST)
2108 return fold_convert_const_int_from_int (type, arg1);
2109 else if (TREE_CODE (arg1) == REAL_CST)
2110 return fold_convert_const_int_from_real (code, type, arg1);
2111 else if (TREE_CODE (arg1) == FIXED_CST)
2112 return fold_convert_const_int_from_fixed (type, arg1);
2113 }
2114 else if (TREE_CODE (type) == REAL_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return build_real_from_int_cst (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_real_from_real (type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_real_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == FIXED_CST)
2126 return fold_convert_const_fixed_from_fixed (type, arg1);
2127 else if (TREE_CODE (arg1) == INTEGER_CST)
2128 return fold_convert_const_fixed_from_int (type, arg1);
2129 else if (TREE_CODE (arg1) == REAL_CST)
2130 return fold_convert_const_fixed_from_real (type, arg1);
2131 }
2132 else if (TREE_CODE (type) == VECTOR_TYPE)
2133 {
2134 if (TREE_CODE (arg1) == VECTOR_CST
2135 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2136 {
2137 int len = VECTOR_CST_NELTS (arg1);
2138 tree elttype = TREE_TYPE (type);
2139 auto_vec<tree, 32> v (len);
2140 for (int i = 0; i < len; ++i)
2141 {
2142 tree elt = VECTOR_CST_ELT (arg1, i);
2143 tree cvt = fold_convert_const (code, elttype, elt);
2144 if (cvt == NULL_TREE)
2145 return NULL_TREE;
2146 v.quick_push (cvt);
2147 }
2148 return build_vector (type, v);
2149 }
2150 }
2151 return NULL_TREE;
2152 }
2153
2154 /* Construct a vector of zero elements of vector type TYPE. */
2155
2156 static tree
2157 build_zero_vector (tree type)
2158 {
2159 tree t;
2160
2161 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2162 return build_vector_from_val (type, t);
2163 }
2164
2165 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2166
2167 bool
2168 fold_convertible_p (const_tree type, const_tree arg)
2169 {
2170 tree orig = TREE_TYPE (arg);
2171
2172 if (type == orig)
2173 return true;
2174
2175 if (TREE_CODE (arg) == ERROR_MARK
2176 || TREE_CODE (type) == ERROR_MARK
2177 || TREE_CODE (orig) == ERROR_MARK)
2178 return false;
2179
2180 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2181 return true;
2182
2183 switch (TREE_CODE (type))
2184 {
2185 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2186 case POINTER_TYPE: case REFERENCE_TYPE:
2187 case OFFSET_TYPE:
2188 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2189 || TREE_CODE (orig) == OFFSET_TYPE);
2190
2191 case REAL_TYPE:
2192 case FIXED_POINT_TYPE:
2193 case VECTOR_TYPE:
2194 case VOID_TYPE:
2195 return TREE_CODE (type) == TREE_CODE (orig);
2196
2197 default:
2198 return false;
2199 }
2200 }
2201
2202 /* Convert expression ARG to type TYPE. Used by the middle-end for
2203 simple conversions in preference to calling the front-end's convert. */
2204
2205 tree
2206 fold_convert_loc (location_t loc, tree type, tree arg)
2207 {
2208 tree orig = TREE_TYPE (arg);
2209 tree tem;
2210
2211 if (type == orig)
2212 return arg;
2213
2214 if (TREE_CODE (arg) == ERROR_MARK
2215 || TREE_CODE (type) == ERROR_MARK
2216 || TREE_CODE (orig) == ERROR_MARK)
2217 return error_mark_node;
2218
2219 switch (TREE_CODE (type))
2220 {
2221 case POINTER_TYPE:
2222 case REFERENCE_TYPE:
2223 /* Handle conversions between pointers to different address spaces. */
2224 if (POINTER_TYPE_P (orig)
2225 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2226 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2227 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2228 /* fall through */
2229
2230 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2231 case OFFSET_TYPE:
2232 if (TREE_CODE (arg) == INTEGER_CST)
2233 {
2234 tem = fold_convert_const (NOP_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2236 return tem;
2237 }
2238 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2239 || TREE_CODE (orig) == OFFSET_TYPE)
2240 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2241 if (TREE_CODE (orig) == COMPLEX_TYPE)
2242 return fold_convert_loc (loc, type,
2243 fold_build1_loc (loc, REALPART_EXPR,
2244 TREE_TYPE (orig), arg));
2245 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2246 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2247 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2248
2249 case REAL_TYPE:
2250 if (TREE_CODE (arg) == INTEGER_CST)
2251 {
2252 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2253 if (tem != NULL_TREE)
2254 return tem;
2255 }
2256 else if (TREE_CODE (arg) == REAL_CST)
2257 {
2258 tem = fold_convert_const (NOP_EXPR, type, arg);
2259 if (tem != NULL_TREE)
2260 return tem;
2261 }
2262 else if (TREE_CODE (arg) == FIXED_CST)
2263 {
2264 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2265 if (tem != NULL_TREE)
2266 return tem;
2267 }
2268
2269 switch (TREE_CODE (orig))
2270 {
2271 case INTEGER_TYPE:
2272 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2273 case POINTER_TYPE: case REFERENCE_TYPE:
2274 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2275
2276 case REAL_TYPE:
2277 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2278
2279 case FIXED_POINT_TYPE:
2280 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2281
2282 case COMPLEX_TYPE:
2283 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2284 return fold_convert_loc (loc, type, tem);
2285
2286 default:
2287 gcc_unreachable ();
2288 }
2289
2290 case FIXED_POINT_TYPE:
2291 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2292 || TREE_CODE (arg) == REAL_CST)
2293 {
2294 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2295 if (tem != NULL_TREE)
2296 goto fold_convert_exit;
2297 }
2298
2299 switch (TREE_CODE (orig))
2300 {
2301 case FIXED_POINT_TYPE:
2302 case INTEGER_TYPE:
2303 case ENUMERAL_TYPE:
2304 case BOOLEAN_TYPE:
2305 case REAL_TYPE:
2306 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2307
2308 case COMPLEX_TYPE:
2309 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2310 return fold_convert_loc (loc, type, tem);
2311
2312 default:
2313 gcc_unreachable ();
2314 }
2315
2316 case COMPLEX_TYPE:
2317 switch (TREE_CODE (orig))
2318 {
2319 case INTEGER_TYPE:
2320 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2321 case POINTER_TYPE: case REFERENCE_TYPE:
2322 case REAL_TYPE:
2323 case FIXED_POINT_TYPE:
2324 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2325 fold_convert_loc (loc, TREE_TYPE (type), arg),
2326 fold_convert_loc (loc, TREE_TYPE (type),
2327 integer_zero_node));
2328 case COMPLEX_TYPE:
2329 {
2330 tree rpart, ipart;
2331
2332 if (TREE_CODE (arg) == COMPLEX_EXPR)
2333 {
2334 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2335 TREE_OPERAND (arg, 0));
2336 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2337 TREE_OPERAND (arg, 1));
2338 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2339 }
2340
2341 arg = save_expr (arg);
2342 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2343 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2345 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2346 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2347 }
2348
2349 default:
2350 gcc_unreachable ();
2351 }
2352
2353 case VECTOR_TYPE:
2354 if (integer_zerop (arg))
2355 return build_zero_vector (type);
2356 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2357 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2358 || TREE_CODE (orig) == VECTOR_TYPE);
2359 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2360
2361 case VOID_TYPE:
2362 tem = fold_ignored_result (arg);
2363 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2364
2365 default:
2366 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2367 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2368 gcc_unreachable ();
2369 }
2370 fold_convert_exit:
2371 protected_set_expr_location_unshare (tem, loc);
2372 return tem;
2373 }
2374 \f
2375 /* Return false if expr can be assumed not to be an lvalue, true
2376 otherwise. */
2377
2378 static bool
2379 maybe_lvalue_p (const_tree x)
2380 {
2381 /* We only need to wrap lvalue tree codes. */
2382 switch (TREE_CODE (x))
2383 {
2384 case VAR_DECL:
2385 case PARM_DECL:
2386 case RESULT_DECL:
2387 case LABEL_DECL:
2388 case FUNCTION_DECL:
2389 case SSA_NAME:
2390
2391 case COMPONENT_REF:
2392 case MEM_REF:
2393 case INDIRECT_REF:
2394 case ARRAY_REF:
2395 case ARRAY_RANGE_REF:
2396 case BIT_FIELD_REF:
2397 case OBJ_TYPE_REF:
2398
2399 case REALPART_EXPR:
2400 case IMAGPART_EXPR:
2401 case PREINCREMENT_EXPR:
2402 case PREDECREMENT_EXPR:
2403 case SAVE_EXPR:
2404 case TRY_CATCH_EXPR:
2405 case WITH_CLEANUP_EXPR:
2406 case COMPOUND_EXPR:
2407 case MODIFY_EXPR:
2408 case TARGET_EXPR:
2409 case COND_EXPR:
2410 case BIND_EXPR:
2411 break;
2412
2413 default:
2414 /* Assume the worst for front-end tree codes. */
2415 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2416 break;
2417 return false;
2418 }
2419
2420 return true;
2421 }
2422
2423 /* Return an expr equal to X but certainly not valid as an lvalue. */
2424
2425 tree
2426 non_lvalue_loc (location_t loc, tree x)
2427 {
2428 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2429 us. */
2430 if (in_gimple_form)
2431 return x;
2432
2433 if (! maybe_lvalue_p (x))
2434 return x;
2435 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2436 }
2437
2438 /* When pedantic, return an expr equal to X but certainly not valid as a
2439 pedantic lvalue. Otherwise, return X. */
2440
2441 static tree
2442 pedantic_non_lvalue_loc (location_t loc, tree x)
2443 {
2444 return protected_set_expr_location_unshare (x, loc);
2445 }
2446 \f
2447 /* Given a tree comparison code, return the code that is the logical inverse.
2448 It is generally not safe to do this for floating-point comparisons, except
2449 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2450 ERROR_MARK in this case. */
2451
2452 enum tree_code
2453 invert_tree_comparison (enum tree_code code, bool honor_nans)
2454 {
2455 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2456 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2457 return ERROR_MARK;
2458
2459 switch (code)
2460 {
2461 case EQ_EXPR:
2462 return NE_EXPR;
2463 case NE_EXPR:
2464 return EQ_EXPR;
2465 case GT_EXPR:
2466 return honor_nans ? UNLE_EXPR : LE_EXPR;
2467 case GE_EXPR:
2468 return honor_nans ? UNLT_EXPR : LT_EXPR;
2469 case LT_EXPR:
2470 return honor_nans ? UNGE_EXPR : GE_EXPR;
2471 case LE_EXPR:
2472 return honor_nans ? UNGT_EXPR : GT_EXPR;
2473 case LTGT_EXPR:
2474 return UNEQ_EXPR;
2475 case UNEQ_EXPR:
2476 return LTGT_EXPR;
2477 case UNGT_EXPR:
2478 return LE_EXPR;
2479 case UNGE_EXPR:
2480 return LT_EXPR;
2481 case UNLT_EXPR:
2482 return GE_EXPR;
2483 case UNLE_EXPR:
2484 return GT_EXPR;
2485 case ORDERED_EXPR:
2486 return UNORDERED_EXPR;
2487 case UNORDERED_EXPR:
2488 return ORDERED_EXPR;
2489 default:
2490 gcc_unreachable ();
2491 }
2492 }
2493
2494 /* Similar, but return the comparison that results if the operands are
2495 swapped. This is safe for floating-point. */
2496
2497 enum tree_code
2498 swap_tree_comparison (enum tree_code code)
2499 {
2500 switch (code)
2501 {
2502 case EQ_EXPR:
2503 case NE_EXPR:
2504 case ORDERED_EXPR:
2505 case UNORDERED_EXPR:
2506 case LTGT_EXPR:
2507 case UNEQ_EXPR:
2508 return code;
2509 case GT_EXPR:
2510 return LT_EXPR;
2511 case GE_EXPR:
2512 return LE_EXPR;
2513 case LT_EXPR:
2514 return GT_EXPR;
2515 case LE_EXPR:
2516 return GE_EXPR;
2517 case UNGT_EXPR:
2518 return UNLT_EXPR;
2519 case UNGE_EXPR:
2520 return UNLE_EXPR;
2521 case UNLT_EXPR:
2522 return UNGT_EXPR;
2523 case UNLE_EXPR:
2524 return UNGE_EXPR;
2525 default:
2526 gcc_unreachable ();
2527 }
2528 }
2529
2530
2531 /* Convert a comparison tree code from an enum tree_code representation
2532 into a compcode bit-based encoding. This function is the inverse of
2533 compcode_to_comparison. */
2534
2535 static enum comparison_code
2536 comparison_to_compcode (enum tree_code code)
2537 {
2538 switch (code)
2539 {
2540 case LT_EXPR:
2541 return COMPCODE_LT;
2542 case EQ_EXPR:
2543 return COMPCODE_EQ;
2544 case LE_EXPR:
2545 return COMPCODE_LE;
2546 case GT_EXPR:
2547 return COMPCODE_GT;
2548 case NE_EXPR:
2549 return COMPCODE_NE;
2550 case GE_EXPR:
2551 return COMPCODE_GE;
2552 case ORDERED_EXPR:
2553 return COMPCODE_ORD;
2554 case UNORDERED_EXPR:
2555 return COMPCODE_UNORD;
2556 case UNLT_EXPR:
2557 return COMPCODE_UNLT;
2558 case UNEQ_EXPR:
2559 return COMPCODE_UNEQ;
2560 case UNLE_EXPR:
2561 return COMPCODE_UNLE;
2562 case UNGT_EXPR:
2563 return COMPCODE_UNGT;
2564 case LTGT_EXPR:
2565 return COMPCODE_LTGT;
2566 case UNGE_EXPR:
2567 return COMPCODE_UNGE;
2568 default:
2569 gcc_unreachable ();
2570 }
2571 }
2572
2573 /* Convert a compcode bit-based encoding of a comparison operator back
2574 to GCC's enum tree_code representation. This function is the
2575 inverse of comparison_to_compcode. */
2576
2577 static enum tree_code
2578 compcode_to_comparison (enum comparison_code code)
2579 {
2580 switch (code)
2581 {
2582 case COMPCODE_LT:
2583 return LT_EXPR;
2584 case COMPCODE_EQ:
2585 return EQ_EXPR;
2586 case COMPCODE_LE:
2587 return LE_EXPR;
2588 case COMPCODE_GT:
2589 return GT_EXPR;
2590 case COMPCODE_NE:
2591 return NE_EXPR;
2592 case COMPCODE_GE:
2593 return GE_EXPR;
2594 case COMPCODE_ORD:
2595 return ORDERED_EXPR;
2596 case COMPCODE_UNORD:
2597 return UNORDERED_EXPR;
2598 case COMPCODE_UNLT:
2599 return UNLT_EXPR;
2600 case COMPCODE_UNEQ:
2601 return UNEQ_EXPR;
2602 case COMPCODE_UNLE:
2603 return UNLE_EXPR;
2604 case COMPCODE_UNGT:
2605 return UNGT_EXPR;
2606 case COMPCODE_LTGT:
2607 return LTGT_EXPR;
2608 case COMPCODE_UNGE:
2609 return UNGE_EXPR;
2610 default:
2611 gcc_unreachable ();
2612 }
2613 }
2614
2615 /* Return a tree for the comparison which is the combination of
2616 doing the AND or OR (depending on CODE) of the two operations LCODE
2617 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2618 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2619 if this makes the transformation invalid. */
2620
2621 tree
2622 combine_comparisons (location_t loc,
2623 enum tree_code code, enum tree_code lcode,
2624 enum tree_code rcode, tree truth_type,
2625 tree ll_arg, tree lr_arg)
2626 {
2627 bool honor_nans = HONOR_NANS (ll_arg);
2628 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2629 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2630 int compcode;
2631
2632 switch (code)
2633 {
2634 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2635 compcode = lcompcode & rcompcode;
2636 break;
2637
2638 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2639 compcode = lcompcode | rcompcode;
2640 break;
2641
2642 default:
2643 return NULL_TREE;
2644 }
2645
2646 if (!honor_nans)
2647 {
2648 /* Eliminate unordered comparisons, as well as LTGT and ORD
2649 which are not used unless the mode has NaNs. */
2650 compcode &= ~COMPCODE_UNORD;
2651 if (compcode == COMPCODE_LTGT)
2652 compcode = COMPCODE_NE;
2653 else if (compcode == COMPCODE_ORD)
2654 compcode = COMPCODE_TRUE;
2655 }
2656 else if (flag_trapping_math)
2657 {
2658 /* Check that the original operation and the optimized ones will trap
2659 under the same condition. */
2660 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2661 && (lcompcode != COMPCODE_EQ)
2662 && (lcompcode != COMPCODE_ORD);
2663 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2664 && (rcompcode != COMPCODE_EQ)
2665 && (rcompcode != COMPCODE_ORD);
2666 bool trap = (compcode & COMPCODE_UNORD) == 0
2667 && (compcode != COMPCODE_EQ)
2668 && (compcode != COMPCODE_ORD);
2669
2670 /* In a short-circuited boolean expression the LHS might be
2671 such that the RHS, if evaluated, will never trap. For
2672 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2673 if neither x nor y is NaN. (This is a mixed blessing: for
2674 example, the expression above will never trap, hence
2675 optimizing it to x < y would be invalid). */
2676 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2677 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2678 rtrap = false;
2679
2680 /* If the comparison was short-circuited, and only the RHS
2681 trapped, we may now generate a spurious trap. */
2682 if (rtrap && !ltrap
2683 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2684 return NULL_TREE;
2685
2686 /* If we changed the conditions that cause a trap, we lose. */
2687 if ((ltrap || rtrap) != trap)
2688 return NULL_TREE;
2689 }
2690
2691 if (compcode == COMPCODE_TRUE)
2692 return constant_boolean_node (true, truth_type);
2693 else if (compcode == COMPCODE_FALSE)
2694 return constant_boolean_node (false, truth_type);
2695 else
2696 {
2697 enum tree_code tcode;
2698
2699 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2700 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2701 }
2702 }
2703 \f
2704 /* Return nonzero if two operands (typically of the same tree node)
2705 are necessarily equal. FLAGS modifies behavior as follows:
2706
2707 If OEP_ONLY_CONST is set, only return nonzero for constants.
2708 This function tests whether the operands are indistinguishable;
2709 it does not test whether they are equal using C's == operation.
2710 The distinction is important for IEEE floating point, because
2711 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2712 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2713
2714 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2715 even though it may hold multiple values during a function.
2716 This is because a GCC tree node guarantees that nothing else is
2717 executed between the evaluation of its "operands" (which may often
2718 be evaluated in arbitrary order). Hence if the operands themselves
2719 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2720 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2721 unset means assuming isochronic (or instantaneous) tree equivalence.
2722 Unless comparing arbitrary expression trees, such as from different
2723 statements, this flag can usually be left unset.
2724
2725 If OEP_PURE_SAME is set, then pure functions with identical arguments
2726 are considered the same. It is used when the caller has other ways
2727 to ensure that global memory is unchanged in between.
2728
2729 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2730 not values of expressions.
2731
2732 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2733 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2734
2735 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2736 any operand with side effect. This is unnecesarily conservative in the
2737 case we know that arg0 and arg1 are in disjoint code paths (such as in
2738 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2739 addresses with TREE_CONSTANT flag set so we know that &var == &var
2740 even if var is volatile. */
2741
2742 int
2743 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2744 {
2745 /* When checking, verify at the outermost operand_equal_p call that
2746 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2747 hash value. */
2748 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2749 {
2750 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2751 {
2752 if (arg0 != arg1)
2753 {
2754 inchash::hash hstate0 (0), hstate1 (0);
2755 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2756 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2757 hashval_t h0 = hstate0.end ();
2758 hashval_t h1 = hstate1.end ();
2759 gcc_assert (h0 == h1);
2760 }
2761 return 1;
2762 }
2763 else
2764 return 0;
2765 }
2766
2767 /* If either is ERROR_MARK, they aren't equal. */
2768 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2769 || TREE_TYPE (arg0) == error_mark_node
2770 || TREE_TYPE (arg1) == error_mark_node)
2771 return 0;
2772
2773 /* Similar, if either does not have a type (like a released SSA name),
2774 they aren't equal. */
2775 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2776 return 0;
2777
2778 /* We cannot consider pointers to different address space equal. */
2779 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2780 && POINTER_TYPE_P (TREE_TYPE (arg1))
2781 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2782 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2783 return 0;
2784
2785 /* Check equality of integer constants before bailing out due to
2786 precision differences. */
2787 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2788 {
2789 /* Address of INTEGER_CST is not defined; check that we did not forget
2790 to drop the OEP_ADDRESS_OF flags. */
2791 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2792 return tree_int_cst_equal (arg0, arg1);
2793 }
2794
2795 if (!(flags & OEP_ADDRESS_OF))
2796 {
2797 /* If both types don't have the same signedness, then we can't consider
2798 them equal. We must check this before the STRIP_NOPS calls
2799 because they may change the signedness of the arguments. As pointers
2800 strictly don't have a signedness, require either two pointers or
2801 two non-pointers as well. */
2802 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2803 || POINTER_TYPE_P (TREE_TYPE (arg0))
2804 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2805 return 0;
2806
2807 /* If both types don't have the same precision, then it is not safe
2808 to strip NOPs. */
2809 if (element_precision (TREE_TYPE (arg0))
2810 != element_precision (TREE_TYPE (arg1)))
2811 return 0;
2812
2813 STRIP_NOPS (arg0);
2814 STRIP_NOPS (arg1);
2815 }
2816 #if 0
2817 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2818 sanity check once the issue is solved. */
2819 else
2820 /* Addresses of conversions and SSA_NAMEs (and many other things)
2821 are not defined. Check that we did not forget to drop the
2822 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2823 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2824 && TREE_CODE (arg0) != SSA_NAME);
2825 #endif
2826
2827 /* In case both args are comparisons but with different comparison
2828 code, try to swap the comparison operands of one arg to produce
2829 a match and compare that variant. */
2830 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2831 && COMPARISON_CLASS_P (arg0)
2832 && COMPARISON_CLASS_P (arg1))
2833 {
2834 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2835
2836 if (TREE_CODE (arg0) == swap_code)
2837 return operand_equal_p (TREE_OPERAND (arg0, 0),
2838 TREE_OPERAND (arg1, 1), flags)
2839 && operand_equal_p (TREE_OPERAND (arg0, 1),
2840 TREE_OPERAND (arg1, 0), flags);
2841 }
2842
2843 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2844 {
2845 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2846 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2847 ;
2848 else if (flags & OEP_ADDRESS_OF)
2849 {
2850 /* If we are interested in comparing addresses ignore
2851 MEM_REF wrappings of the base that can appear just for
2852 TBAA reasons. */
2853 if (TREE_CODE (arg0) == MEM_REF
2854 && DECL_P (arg1)
2855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2856 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2857 && integer_zerop (TREE_OPERAND (arg0, 1)))
2858 return 1;
2859 else if (TREE_CODE (arg1) == MEM_REF
2860 && DECL_P (arg0)
2861 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2862 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2863 && integer_zerop (TREE_OPERAND (arg1, 1)))
2864 return 1;
2865 return 0;
2866 }
2867 else
2868 return 0;
2869 }
2870
2871 /* When not checking adddresses, this is needed for conversions and for
2872 COMPONENT_REF. Might as well play it safe and always test this. */
2873 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2874 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2875 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2876 && !(flags & OEP_ADDRESS_OF)))
2877 return 0;
2878
2879 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2880 We don't care about side effects in that case because the SAVE_EXPR
2881 takes care of that for us. In all other cases, two expressions are
2882 equal if they have no side effects. If we have two identical
2883 expressions with side effects that should be treated the same due
2884 to the only side effects being identical SAVE_EXPR's, that will
2885 be detected in the recursive calls below.
2886 If we are taking an invariant address of two identical objects
2887 they are necessarily equal as well. */
2888 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2889 && (TREE_CODE (arg0) == SAVE_EXPR
2890 || (flags & OEP_MATCH_SIDE_EFFECTS)
2891 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2892 return 1;
2893
2894 /* Next handle constant cases, those for which we can return 1 even
2895 if ONLY_CONST is set. */
2896 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2897 switch (TREE_CODE (arg0))
2898 {
2899 case INTEGER_CST:
2900 return tree_int_cst_equal (arg0, arg1);
2901
2902 case FIXED_CST:
2903 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2904 TREE_FIXED_CST (arg1));
2905
2906 case REAL_CST:
2907 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2908 return 1;
2909
2910
2911 if (!HONOR_SIGNED_ZEROS (arg0))
2912 {
2913 /* If we do not distinguish between signed and unsigned zero,
2914 consider them equal. */
2915 if (real_zerop (arg0) && real_zerop (arg1))
2916 return 1;
2917 }
2918 return 0;
2919
2920 case VECTOR_CST:
2921 {
2922 unsigned i;
2923
2924 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2925 return 0;
2926
2927 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2928 {
2929 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2930 VECTOR_CST_ELT (arg1, i), flags))
2931 return 0;
2932 }
2933 return 1;
2934 }
2935
2936 case COMPLEX_CST:
2937 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2938 flags)
2939 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2940 flags));
2941
2942 case STRING_CST:
2943 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2944 && ! memcmp (TREE_STRING_POINTER (arg0),
2945 TREE_STRING_POINTER (arg1),
2946 TREE_STRING_LENGTH (arg0)));
2947
2948 case ADDR_EXPR:
2949 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2950 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2951 flags | OEP_ADDRESS_OF
2952 | OEP_MATCH_SIDE_EFFECTS);
2953 case CONSTRUCTOR:
2954 /* In GIMPLE empty constructors are allowed in initializers of
2955 aggregates. */
2956 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2957 default:
2958 break;
2959 }
2960
2961 if (flags & OEP_ONLY_CONST)
2962 return 0;
2963
2964 /* Define macros to test an operand from arg0 and arg1 for equality and a
2965 variant that allows null and views null as being different from any
2966 non-null value. In the latter case, if either is null, the both
2967 must be; otherwise, do the normal comparison. */
2968 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2969 TREE_OPERAND (arg1, N), flags)
2970
2971 #define OP_SAME_WITH_NULL(N) \
2972 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2973 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2974
2975 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2976 {
2977 case tcc_unary:
2978 /* Two conversions are equal only if signedness and modes match. */
2979 switch (TREE_CODE (arg0))
2980 {
2981 CASE_CONVERT:
2982 case FIX_TRUNC_EXPR:
2983 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2984 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2985 return 0;
2986 break;
2987 default:
2988 break;
2989 }
2990
2991 return OP_SAME (0);
2992
2993
2994 case tcc_comparison:
2995 case tcc_binary:
2996 if (OP_SAME (0) && OP_SAME (1))
2997 return 1;
2998
2999 /* For commutative ops, allow the other order. */
3000 return (commutative_tree_code (TREE_CODE (arg0))
3001 && operand_equal_p (TREE_OPERAND (arg0, 0),
3002 TREE_OPERAND (arg1, 1), flags)
3003 && operand_equal_p (TREE_OPERAND (arg0, 1),
3004 TREE_OPERAND (arg1, 0), flags));
3005
3006 case tcc_reference:
3007 /* If either of the pointer (or reference) expressions we are
3008 dereferencing contain a side effect, these cannot be equal,
3009 but their addresses can be. */
3010 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3011 && (TREE_SIDE_EFFECTS (arg0)
3012 || TREE_SIDE_EFFECTS (arg1)))
3013 return 0;
3014
3015 switch (TREE_CODE (arg0))
3016 {
3017 case INDIRECT_REF:
3018 if (!(flags & OEP_ADDRESS_OF)
3019 && (TYPE_ALIGN (TREE_TYPE (arg0))
3020 != TYPE_ALIGN (TREE_TYPE (arg1))))
3021 return 0;
3022 flags &= ~OEP_ADDRESS_OF;
3023 return OP_SAME (0);
3024
3025 case IMAGPART_EXPR:
3026 /* Require the same offset. */
3027 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3028 TYPE_SIZE (TREE_TYPE (arg1)),
3029 flags & ~OEP_ADDRESS_OF))
3030 return 0;
3031
3032 /* Fallthru. */
3033 case REALPART_EXPR:
3034 case VIEW_CONVERT_EXPR:
3035 return OP_SAME (0);
3036
3037 case TARGET_MEM_REF:
3038 case MEM_REF:
3039 if (!(flags & OEP_ADDRESS_OF))
3040 {
3041 /* Require equal access sizes */
3042 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3043 && (!TYPE_SIZE (TREE_TYPE (arg0))
3044 || !TYPE_SIZE (TREE_TYPE (arg1))
3045 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3046 TYPE_SIZE (TREE_TYPE (arg1)),
3047 flags)))
3048 return 0;
3049 /* Verify that access happens in similar types. */
3050 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3051 return 0;
3052 /* Verify that accesses are TBAA compatible. */
3053 if (!alias_ptr_types_compatible_p
3054 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3055 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3056 || (MR_DEPENDENCE_CLIQUE (arg0)
3057 != MR_DEPENDENCE_CLIQUE (arg1))
3058 || (MR_DEPENDENCE_BASE (arg0)
3059 != MR_DEPENDENCE_BASE (arg1)))
3060 return 0;
3061 /* Verify that alignment is compatible. */
3062 if (TYPE_ALIGN (TREE_TYPE (arg0))
3063 != TYPE_ALIGN (TREE_TYPE (arg1)))
3064 return 0;
3065 }
3066 flags &= ~OEP_ADDRESS_OF;
3067 return (OP_SAME (0) && OP_SAME (1)
3068 /* TARGET_MEM_REF require equal extra operands. */
3069 && (TREE_CODE (arg0) != TARGET_MEM_REF
3070 || (OP_SAME_WITH_NULL (2)
3071 && OP_SAME_WITH_NULL (3)
3072 && OP_SAME_WITH_NULL (4))));
3073
3074 case ARRAY_REF:
3075 case ARRAY_RANGE_REF:
3076 if (!OP_SAME (0))
3077 return 0;
3078 flags &= ~OEP_ADDRESS_OF;
3079 /* Compare the array index by value if it is constant first as we
3080 may have different types but same value here. */
3081 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3082 TREE_OPERAND (arg1, 1))
3083 || OP_SAME (1))
3084 && OP_SAME_WITH_NULL (2)
3085 && OP_SAME_WITH_NULL (3)
3086 /* Compare low bound and element size as with OEP_ADDRESS_OF
3087 we have to account for the offset of the ref. */
3088 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3089 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3090 || (operand_equal_p (array_ref_low_bound
3091 (CONST_CAST_TREE (arg0)),
3092 array_ref_low_bound
3093 (CONST_CAST_TREE (arg1)), flags)
3094 && operand_equal_p (array_ref_element_size
3095 (CONST_CAST_TREE (arg0)),
3096 array_ref_element_size
3097 (CONST_CAST_TREE (arg1)),
3098 flags))));
3099
3100 case COMPONENT_REF:
3101 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3102 may be NULL when we're called to compare MEM_EXPRs. */
3103 if (!OP_SAME_WITH_NULL (0)
3104 || !OP_SAME (1))
3105 return 0;
3106 flags &= ~OEP_ADDRESS_OF;
3107 return OP_SAME_WITH_NULL (2);
3108
3109 case BIT_FIELD_REF:
3110 if (!OP_SAME (0))
3111 return 0;
3112 flags &= ~OEP_ADDRESS_OF;
3113 return OP_SAME (1) && OP_SAME (2);
3114
3115 default:
3116 return 0;
3117 }
3118
3119 case tcc_expression:
3120 switch (TREE_CODE (arg0))
3121 {
3122 case ADDR_EXPR:
3123 /* Be sure we pass right ADDRESS_OF flag. */
3124 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3125 return operand_equal_p (TREE_OPERAND (arg0, 0),
3126 TREE_OPERAND (arg1, 0),
3127 flags | OEP_ADDRESS_OF);
3128
3129 case TRUTH_NOT_EXPR:
3130 return OP_SAME (0);
3131
3132 case TRUTH_ANDIF_EXPR:
3133 case TRUTH_ORIF_EXPR:
3134 return OP_SAME (0) && OP_SAME (1);
3135
3136 case FMA_EXPR:
3137 case WIDEN_MULT_PLUS_EXPR:
3138 case WIDEN_MULT_MINUS_EXPR:
3139 if (!OP_SAME (2))
3140 return 0;
3141 /* The multiplcation operands are commutative. */
3142 /* FALLTHRU */
3143
3144 case TRUTH_AND_EXPR:
3145 case TRUTH_OR_EXPR:
3146 case TRUTH_XOR_EXPR:
3147 if (OP_SAME (0) && OP_SAME (1))
3148 return 1;
3149
3150 /* Otherwise take into account this is a commutative operation. */
3151 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3152 TREE_OPERAND (arg1, 1), flags)
3153 && operand_equal_p (TREE_OPERAND (arg0, 1),
3154 TREE_OPERAND (arg1, 0), flags));
3155
3156 case COND_EXPR:
3157 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3158 return 0;
3159 flags &= ~OEP_ADDRESS_OF;
3160 return OP_SAME (0);
3161
3162 case BIT_INSERT_EXPR:
3163 /* BIT_INSERT_EXPR has an implict operand as the type precision
3164 of op1. Need to check to make sure they are the same. */
3165 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3166 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3167 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3168 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3169 return false;
3170 /* FALLTHRU */
3171
3172 case VEC_COND_EXPR:
3173 case DOT_PROD_EXPR:
3174 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3175
3176 case MODIFY_EXPR:
3177 case INIT_EXPR:
3178 case COMPOUND_EXPR:
3179 case PREDECREMENT_EXPR:
3180 case PREINCREMENT_EXPR:
3181 case POSTDECREMENT_EXPR:
3182 case POSTINCREMENT_EXPR:
3183 if (flags & OEP_LEXICOGRAPHIC)
3184 return OP_SAME (0) && OP_SAME (1);
3185 return 0;
3186
3187 case CLEANUP_POINT_EXPR:
3188 case EXPR_STMT:
3189 if (flags & OEP_LEXICOGRAPHIC)
3190 return OP_SAME (0);
3191 return 0;
3192
3193 default:
3194 return 0;
3195 }
3196
3197 case tcc_vl_exp:
3198 switch (TREE_CODE (arg0))
3199 {
3200 case CALL_EXPR:
3201 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3202 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3203 /* If not both CALL_EXPRs are either internal or normal function
3204 functions, then they are not equal. */
3205 return 0;
3206 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3207 {
3208 /* If the CALL_EXPRs call different internal functions, then they
3209 are not equal. */
3210 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3211 return 0;
3212 }
3213 else
3214 {
3215 /* If the CALL_EXPRs call different functions, then they are not
3216 equal. */
3217 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3218 flags))
3219 return 0;
3220 }
3221
3222 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3223 {
3224 unsigned int cef = call_expr_flags (arg0);
3225 if (flags & OEP_PURE_SAME)
3226 cef &= ECF_CONST | ECF_PURE;
3227 else
3228 cef &= ECF_CONST;
3229 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3230 return 0;
3231 }
3232
3233 /* Now see if all the arguments are the same. */
3234 {
3235 const_call_expr_arg_iterator iter0, iter1;
3236 const_tree a0, a1;
3237 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3238 a1 = first_const_call_expr_arg (arg1, &iter1);
3239 a0 && a1;
3240 a0 = next_const_call_expr_arg (&iter0),
3241 a1 = next_const_call_expr_arg (&iter1))
3242 if (! operand_equal_p (a0, a1, flags))
3243 return 0;
3244
3245 /* If we get here and both argument lists are exhausted
3246 then the CALL_EXPRs are equal. */
3247 return ! (a0 || a1);
3248 }
3249 default:
3250 return 0;
3251 }
3252
3253 case tcc_declaration:
3254 /* Consider __builtin_sqrt equal to sqrt. */
3255 return (TREE_CODE (arg0) == FUNCTION_DECL
3256 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3257 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3258 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3259
3260 case tcc_exceptional:
3261 if (TREE_CODE (arg0) == CONSTRUCTOR)
3262 {
3263 /* In GIMPLE constructors are used only to build vectors from
3264 elements. Individual elements in the constructor must be
3265 indexed in increasing order and form an initial sequence.
3266
3267 We make no effort to compare constructors in generic.
3268 (see sem_variable::equals in ipa-icf which can do so for
3269 constants). */
3270 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3271 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3272 return 0;
3273
3274 /* Be sure that vectors constructed have the same representation.
3275 We only tested element precision and modes to match.
3276 Vectors may be BLKmode and thus also check that the number of
3277 parts match. */
3278 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3279 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3280 return 0;
3281
3282 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3283 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3284 unsigned int len = vec_safe_length (v0);
3285
3286 if (len != vec_safe_length (v1))
3287 return 0;
3288
3289 for (unsigned int i = 0; i < len; i++)
3290 {
3291 constructor_elt *c0 = &(*v0)[i];
3292 constructor_elt *c1 = &(*v1)[i];
3293
3294 if (!operand_equal_p (c0->value, c1->value, flags)
3295 /* In GIMPLE the indexes can be either NULL or matching i.
3296 Double check this so we won't get false
3297 positives for GENERIC. */
3298 || (c0->index
3299 && (TREE_CODE (c0->index) != INTEGER_CST
3300 || !compare_tree_int (c0->index, i)))
3301 || (c1->index
3302 && (TREE_CODE (c1->index) != INTEGER_CST
3303 || !compare_tree_int (c1->index, i))))
3304 return 0;
3305 }
3306 return 1;
3307 }
3308 else if (TREE_CODE (arg0) == STATEMENT_LIST
3309 && (flags & OEP_LEXICOGRAPHIC))
3310 {
3311 /* Compare the STATEMENT_LISTs. */
3312 tree_stmt_iterator tsi1, tsi2;
3313 tree body1 = CONST_CAST_TREE (arg0);
3314 tree body2 = CONST_CAST_TREE (arg1);
3315 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3316 tsi_next (&tsi1), tsi_next (&tsi2))
3317 {
3318 /* The lists don't have the same number of statements. */
3319 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3320 return 0;
3321 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3322 return 1;
3323 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3324 OEP_LEXICOGRAPHIC))
3325 return 0;
3326 }
3327 }
3328 return 0;
3329
3330 case tcc_statement:
3331 switch (TREE_CODE (arg0))
3332 {
3333 case RETURN_EXPR:
3334 if (flags & OEP_LEXICOGRAPHIC)
3335 return OP_SAME_WITH_NULL (0);
3336 return 0;
3337 default:
3338 return 0;
3339 }
3340
3341 default:
3342 return 0;
3343 }
3344
3345 #undef OP_SAME
3346 #undef OP_SAME_WITH_NULL
3347 }
3348 \f
3349 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3350 with a different signedness or a narrower precision. */
3351
3352 static bool
3353 operand_equal_for_comparison_p (tree arg0, tree arg1)
3354 {
3355 if (operand_equal_p (arg0, arg1, 0))
3356 return true;
3357
3358 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3359 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3360 return false;
3361
3362 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3363 and see if the inner values are the same. This removes any
3364 signedness comparison, which doesn't matter here. */
3365 tree op0 = arg0;
3366 tree op1 = arg1;
3367 STRIP_NOPS (op0);
3368 STRIP_NOPS (op1);
3369 if (operand_equal_p (op0, op1, 0))
3370 return true;
3371
3372 /* Discard a single widening conversion from ARG1 and see if the inner
3373 value is the same as ARG0. */
3374 if (CONVERT_EXPR_P (arg1)
3375 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3376 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3377 < TYPE_PRECISION (TREE_TYPE (arg1))
3378 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3379 return true;
3380
3381 return false;
3382 }
3383 \f
3384 /* See if ARG is an expression that is either a comparison or is performing
3385 arithmetic on comparisons. The comparisons must only be comparing
3386 two different values, which will be stored in *CVAL1 and *CVAL2; if
3387 they are nonzero it means that some operands have already been found.
3388 No variables may be used anywhere else in the expression except in the
3389 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3390 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3391
3392 If this is true, return 1. Otherwise, return zero. */
3393
3394 static int
3395 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3396 {
3397 enum tree_code code = TREE_CODE (arg);
3398 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3399
3400 /* We can handle some of the tcc_expression cases here. */
3401 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3402 tclass = tcc_unary;
3403 else if (tclass == tcc_expression
3404 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3405 || code == COMPOUND_EXPR))
3406 tclass = tcc_binary;
3407
3408 else if (tclass == tcc_expression && code == SAVE_EXPR
3409 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3410 {
3411 /* If we've already found a CVAL1 or CVAL2, this expression is
3412 two complex to handle. */
3413 if (*cval1 || *cval2)
3414 return 0;
3415
3416 tclass = tcc_unary;
3417 *save_p = 1;
3418 }
3419
3420 switch (tclass)
3421 {
3422 case tcc_unary:
3423 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3424
3425 case tcc_binary:
3426 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3427 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3428 cval1, cval2, save_p));
3429
3430 case tcc_constant:
3431 return 1;
3432
3433 case tcc_expression:
3434 if (code == COND_EXPR)
3435 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3436 cval1, cval2, save_p)
3437 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3438 cval1, cval2, save_p)
3439 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3440 cval1, cval2, save_p));
3441 return 0;
3442
3443 case tcc_comparison:
3444 /* First see if we can handle the first operand, then the second. For
3445 the second operand, we know *CVAL1 can't be zero. It must be that
3446 one side of the comparison is each of the values; test for the
3447 case where this isn't true by failing if the two operands
3448 are the same. */
3449
3450 if (operand_equal_p (TREE_OPERAND (arg, 0),
3451 TREE_OPERAND (arg, 1), 0))
3452 return 0;
3453
3454 if (*cval1 == 0)
3455 *cval1 = TREE_OPERAND (arg, 0);
3456 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3457 ;
3458 else if (*cval2 == 0)
3459 *cval2 = TREE_OPERAND (arg, 0);
3460 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3461 ;
3462 else
3463 return 0;
3464
3465 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3466 ;
3467 else if (*cval2 == 0)
3468 *cval2 = TREE_OPERAND (arg, 1);
3469 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3470 ;
3471 else
3472 return 0;
3473
3474 return 1;
3475
3476 default:
3477 return 0;
3478 }
3479 }
3480 \f
3481 /* ARG is a tree that is known to contain just arithmetic operations and
3482 comparisons. Evaluate the operations in the tree substituting NEW0 for
3483 any occurrence of OLD0 as an operand of a comparison and likewise for
3484 NEW1 and OLD1. */
3485
3486 static tree
3487 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3488 tree old1, tree new1)
3489 {
3490 tree type = TREE_TYPE (arg);
3491 enum tree_code code = TREE_CODE (arg);
3492 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3493
3494 /* We can handle some of the tcc_expression cases here. */
3495 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3496 tclass = tcc_unary;
3497 else if (tclass == tcc_expression
3498 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3499 tclass = tcc_binary;
3500
3501 switch (tclass)
3502 {
3503 case tcc_unary:
3504 return fold_build1_loc (loc, code, type,
3505 eval_subst (loc, TREE_OPERAND (arg, 0),
3506 old0, new0, old1, new1));
3507
3508 case tcc_binary:
3509 return fold_build2_loc (loc, code, type,
3510 eval_subst (loc, TREE_OPERAND (arg, 0),
3511 old0, new0, old1, new1),
3512 eval_subst (loc, TREE_OPERAND (arg, 1),
3513 old0, new0, old1, new1));
3514
3515 case tcc_expression:
3516 switch (code)
3517 {
3518 case SAVE_EXPR:
3519 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3520 old1, new1);
3521
3522 case COMPOUND_EXPR:
3523 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3524 old1, new1);
3525
3526 case COND_EXPR:
3527 return fold_build3_loc (loc, code, type,
3528 eval_subst (loc, TREE_OPERAND (arg, 0),
3529 old0, new0, old1, new1),
3530 eval_subst (loc, TREE_OPERAND (arg, 1),
3531 old0, new0, old1, new1),
3532 eval_subst (loc, TREE_OPERAND (arg, 2),
3533 old0, new0, old1, new1));
3534 default:
3535 break;
3536 }
3537 /* Fall through - ??? */
3538
3539 case tcc_comparison:
3540 {
3541 tree arg0 = TREE_OPERAND (arg, 0);
3542 tree arg1 = TREE_OPERAND (arg, 1);
3543
3544 /* We need to check both for exact equality and tree equality. The
3545 former will be true if the operand has a side-effect. In that
3546 case, we know the operand occurred exactly once. */
3547
3548 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3549 arg0 = new0;
3550 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3551 arg0 = new1;
3552
3553 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3554 arg1 = new0;
3555 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3556 arg1 = new1;
3557
3558 return fold_build2_loc (loc, code, type, arg0, arg1);
3559 }
3560
3561 default:
3562 return arg;
3563 }
3564 }
3565 \f
3566 /* Return a tree for the case when the result of an expression is RESULT
3567 converted to TYPE and OMITTED was previously an operand of the expression
3568 but is now not needed (e.g., we folded OMITTED * 0).
3569
3570 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3571 the conversion of RESULT to TYPE. */
3572
3573 tree
3574 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3575 {
3576 tree t = fold_convert_loc (loc, type, result);
3577
3578 /* If the resulting operand is an empty statement, just return the omitted
3579 statement casted to void. */
3580 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3581 return build1_loc (loc, NOP_EXPR, void_type_node,
3582 fold_ignored_result (omitted));
3583
3584 if (TREE_SIDE_EFFECTS (omitted))
3585 return build2_loc (loc, COMPOUND_EXPR, type,
3586 fold_ignored_result (omitted), t);
3587
3588 return non_lvalue_loc (loc, t);
3589 }
3590
3591 /* Return a tree for the case when the result of an expression is RESULT
3592 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3593 of the expression but are now not needed.
3594
3595 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3596 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3597 evaluated before OMITTED2. Otherwise, if neither has side effects,
3598 just do the conversion of RESULT to TYPE. */
3599
3600 tree
3601 omit_two_operands_loc (location_t loc, tree type, tree result,
3602 tree omitted1, tree omitted2)
3603 {
3604 tree t = fold_convert_loc (loc, type, result);
3605
3606 if (TREE_SIDE_EFFECTS (omitted2))
3607 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3608 if (TREE_SIDE_EFFECTS (omitted1))
3609 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3610
3611 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3612 }
3613
3614 \f
3615 /* Return a simplified tree node for the truth-negation of ARG. This
3616 never alters ARG itself. We assume that ARG is an operation that
3617 returns a truth value (0 or 1).
3618
3619 FIXME: one would think we would fold the result, but it causes
3620 problems with the dominator optimizer. */
3621
3622 static tree
3623 fold_truth_not_expr (location_t loc, tree arg)
3624 {
3625 tree type = TREE_TYPE (arg);
3626 enum tree_code code = TREE_CODE (arg);
3627 location_t loc1, loc2;
3628
3629 /* If this is a comparison, we can simply invert it, except for
3630 floating-point non-equality comparisons, in which case we just
3631 enclose a TRUTH_NOT_EXPR around what we have. */
3632
3633 if (TREE_CODE_CLASS (code) == tcc_comparison)
3634 {
3635 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3636 if (FLOAT_TYPE_P (op_type)
3637 && flag_trapping_math
3638 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3639 && code != NE_EXPR && code != EQ_EXPR)
3640 return NULL_TREE;
3641
3642 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3643 if (code == ERROR_MARK)
3644 return NULL_TREE;
3645
3646 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3647 TREE_OPERAND (arg, 1));
3648 if (TREE_NO_WARNING (arg))
3649 TREE_NO_WARNING (ret) = 1;
3650 return ret;
3651 }
3652
3653 switch (code)
3654 {
3655 case INTEGER_CST:
3656 return constant_boolean_node (integer_zerop (arg), type);
3657
3658 case TRUTH_AND_EXPR:
3659 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3660 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3661 return build2_loc (loc, TRUTH_OR_EXPR, type,
3662 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3663 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3664
3665 case TRUTH_OR_EXPR:
3666 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3667 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3668 return build2_loc (loc, TRUTH_AND_EXPR, type,
3669 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3670 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3671
3672 case TRUTH_XOR_EXPR:
3673 /* Here we can invert either operand. We invert the first operand
3674 unless the second operand is a TRUTH_NOT_EXPR in which case our
3675 result is the XOR of the first operand with the inside of the
3676 negation of the second operand. */
3677
3678 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3679 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3680 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3681 else
3682 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3683 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3684 TREE_OPERAND (arg, 1));
3685
3686 case TRUTH_ANDIF_EXPR:
3687 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3688 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3689 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3690 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3691 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3692
3693 case TRUTH_ORIF_EXPR:
3694 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3695 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3696 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3697 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3698 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3699
3700 case TRUTH_NOT_EXPR:
3701 return TREE_OPERAND (arg, 0);
3702
3703 case COND_EXPR:
3704 {
3705 tree arg1 = TREE_OPERAND (arg, 1);
3706 tree arg2 = TREE_OPERAND (arg, 2);
3707
3708 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3709 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3710
3711 /* A COND_EXPR may have a throw as one operand, which
3712 then has void type. Just leave void operands
3713 as they are. */
3714 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3715 VOID_TYPE_P (TREE_TYPE (arg1))
3716 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3717 VOID_TYPE_P (TREE_TYPE (arg2))
3718 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3719 }
3720
3721 case COMPOUND_EXPR:
3722 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3723 return build2_loc (loc, COMPOUND_EXPR, type,
3724 TREE_OPERAND (arg, 0),
3725 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3726
3727 case NON_LVALUE_EXPR:
3728 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3729 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3730
3731 CASE_CONVERT:
3732 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3733 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3734
3735 /* fall through */
3736
3737 case FLOAT_EXPR:
3738 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3739 return build1_loc (loc, TREE_CODE (arg), type,
3740 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3741
3742 case BIT_AND_EXPR:
3743 if (!integer_onep (TREE_OPERAND (arg, 1)))
3744 return NULL_TREE;
3745 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3746
3747 case SAVE_EXPR:
3748 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3749
3750 case CLEANUP_POINT_EXPR:
3751 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3752 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3753 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3754
3755 default:
3756 return NULL_TREE;
3757 }
3758 }
3759
3760 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3761 assume that ARG is an operation that returns a truth value (0 or 1
3762 for scalars, 0 or -1 for vectors). Return the folded expression if
3763 folding is successful. Otherwise, return NULL_TREE. */
3764
3765 static tree
3766 fold_invert_truthvalue (location_t loc, tree arg)
3767 {
3768 tree type = TREE_TYPE (arg);
3769 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3770 ? BIT_NOT_EXPR
3771 : TRUTH_NOT_EXPR,
3772 type, arg);
3773 }
3774
3775 /* Return a simplified tree node for the truth-negation of ARG. This
3776 never alters ARG itself. We assume that ARG is an operation that
3777 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3778
3779 tree
3780 invert_truthvalue_loc (location_t loc, tree arg)
3781 {
3782 if (TREE_CODE (arg) == ERROR_MARK)
3783 return arg;
3784
3785 tree type = TREE_TYPE (arg);
3786 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3787 ? BIT_NOT_EXPR
3788 : TRUTH_NOT_EXPR,
3789 type, arg);
3790 }
3791 \f
3792 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3793 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3794 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3795 is the original memory reference used to preserve the alias set of
3796 the access. */
3797
3798 static tree
3799 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3800 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3801 int unsignedp, int reversep)
3802 {
3803 tree result, bftype;
3804
3805 /* Attempt not to lose the access path if possible. */
3806 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3807 {
3808 tree ninner = TREE_OPERAND (orig_inner, 0);
3809 machine_mode nmode;
3810 HOST_WIDE_INT nbitsize, nbitpos;
3811 tree noffset;
3812 int nunsignedp, nreversep, nvolatilep = 0;
3813 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3814 &noffset, &nmode, &nunsignedp,
3815 &nreversep, &nvolatilep);
3816 if (base == inner
3817 && noffset == NULL_TREE
3818 && nbitsize >= bitsize
3819 && nbitpos <= bitpos
3820 && bitpos + bitsize <= nbitpos + nbitsize
3821 && !reversep
3822 && !nreversep
3823 && !nvolatilep)
3824 {
3825 inner = ninner;
3826 bitpos -= nbitpos;
3827 }
3828 }
3829
3830 alias_set_type iset = get_alias_set (orig_inner);
3831 if (iset == 0 && get_alias_set (inner) != iset)
3832 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3833 build_fold_addr_expr (inner),
3834 build_int_cst (ptr_type_node, 0));
3835
3836 if (bitpos == 0 && !reversep)
3837 {
3838 tree size = TYPE_SIZE (TREE_TYPE (inner));
3839 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3840 || POINTER_TYPE_P (TREE_TYPE (inner)))
3841 && tree_fits_shwi_p (size)
3842 && tree_to_shwi (size) == bitsize)
3843 return fold_convert_loc (loc, type, inner);
3844 }
3845
3846 bftype = type;
3847 if (TYPE_PRECISION (bftype) != bitsize
3848 || TYPE_UNSIGNED (bftype) == !unsignedp)
3849 bftype = build_nonstandard_integer_type (bitsize, 0);
3850
3851 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3852 bitsize_int (bitsize), bitsize_int (bitpos));
3853 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3854
3855 if (bftype != type)
3856 result = fold_convert_loc (loc, type, result);
3857
3858 return result;
3859 }
3860
3861 /* Optimize a bit-field compare.
3862
3863 There are two cases: First is a compare against a constant and the
3864 second is a comparison of two items where the fields are at the same
3865 bit position relative to the start of a chunk (byte, halfword, word)
3866 large enough to contain it. In these cases we can avoid the shift
3867 implicit in bitfield extractions.
3868
3869 For constants, we emit a compare of the shifted constant with the
3870 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3871 compared. For two fields at the same position, we do the ANDs with the
3872 similar mask and compare the result of the ANDs.
3873
3874 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3875 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3876 are the left and right operands of the comparison, respectively.
3877
3878 If the optimization described above can be done, we return the resulting
3879 tree. Otherwise we return zero. */
3880
3881 static tree
3882 optimize_bit_field_compare (location_t loc, enum tree_code code,
3883 tree compare_type, tree lhs, tree rhs)
3884 {
3885 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3886 tree type = TREE_TYPE (lhs);
3887 tree unsigned_type;
3888 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3889 machine_mode lmode, rmode;
3890 scalar_int_mode nmode;
3891 int lunsignedp, runsignedp;
3892 int lreversep, rreversep;
3893 int lvolatilep = 0, rvolatilep = 0;
3894 tree linner, rinner = NULL_TREE;
3895 tree mask;
3896 tree offset;
3897
3898 /* Get all the information about the extractions being done. If the bit size
3899 if the same as the size of the underlying object, we aren't doing an
3900 extraction at all and so can do nothing. We also don't want to
3901 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3902 then will no longer be able to replace it. */
3903 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3904 &lunsignedp, &lreversep, &lvolatilep);
3905 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3906 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3907 return 0;
3908
3909 if (const_p)
3910 rreversep = lreversep;
3911 else
3912 {
3913 /* If this is not a constant, we can only do something if bit positions,
3914 sizes, signedness and storage order are the same. */
3915 rinner
3916 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3917 &runsignedp, &rreversep, &rvolatilep);
3918
3919 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3920 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3921 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3922 return 0;
3923 }
3924
3925 /* Honor the C++ memory model and mimic what RTL expansion does. */
3926 unsigned HOST_WIDE_INT bitstart = 0;
3927 unsigned HOST_WIDE_INT bitend = 0;
3928 if (TREE_CODE (lhs) == COMPONENT_REF)
3929 {
3930 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3931 if (offset != NULL_TREE)
3932 return 0;
3933 }
3934
3935 /* See if we can find a mode to refer to this field. We should be able to,
3936 but fail if we can't. */
3937 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
3938 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3939 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3940 TYPE_ALIGN (TREE_TYPE (rinner))),
3941 BITS_PER_WORD, false, &nmode))
3942 return 0;
3943
3944 /* Set signed and unsigned types of the precision of this mode for the
3945 shifts below. */
3946 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3947
3948 /* Compute the bit position and size for the new reference and our offset
3949 within it. If the new reference is the same size as the original, we
3950 won't optimize anything, so return zero. */
3951 nbitsize = GET_MODE_BITSIZE (nmode);
3952 nbitpos = lbitpos & ~ (nbitsize - 1);
3953 lbitpos -= nbitpos;
3954 if (nbitsize == lbitsize)
3955 return 0;
3956
3957 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3958 lbitpos = nbitsize - lbitsize - lbitpos;
3959
3960 /* Make the mask to be used against the extracted field. */
3961 mask = build_int_cst_type (unsigned_type, -1);
3962 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3963 mask = const_binop (RSHIFT_EXPR, mask,
3964 size_int (nbitsize - lbitsize - lbitpos));
3965
3966 if (! const_p)
3967 {
3968 if (nbitpos < 0)
3969 return 0;
3970
3971 /* If not comparing with constant, just rework the comparison
3972 and return. */
3973 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
3974 nbitsize, nbitpos, 1, lreversep);
3975 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
3976 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
3977 nbitsize, nbitpos, 1, rreversep);
3978 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
3979 return fold_build2_loc (loc, code, compare_type, t1, t2);
3980 }
3981
3982 /* Otherwise, we are handling the constant case. See if the constant is too
3983 big for the field. Warn and return a tree for 0 (false) if so. We do
3984 this not only for its own sake, but to avoid having to test for this
3985 error case below. If we didn't, we might generate wrong code.
3986
3987 For unsigned fields, the constant shifted right by the field length should
3988 be all zero. For signed fields, the high-order bits should agree with
3989 the sign bit. */
3990
3991 if (lunsignedp)
3992 {
3993 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
3994 {
3995 warning (0, "comparison is always %d due to width of bit-field",
3996 code == NE_EXPR);
3997 return constant_boolean_node (code == NE_EXPR, compare_type);
3998 }
3999 }
4000 else
4001 {
4002 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4003 if (tem != 0 && tem != -1)
4004 {
4005 warning (0, "comparison is always %d due to width of bit-field",
4006 code == NE_EXPR);
4007 return constant_boolean_node (code == NE_EXPR, compare_type);
4008 }
4009 }
4010
4011 if (nbitpos < 0)
4012 return 0;
4013
4014 /* Single-bit compares should always be against zero. */
4015 if (lbitsize == 1 && ! integer_zerop (rhs))
4016 {
4017 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4018 rhs = build_int_cst (type, 0);
4019 }
4020
4021 /* Make a new bitfield reference, shift the constant over the
4022 appropriate number of bits and mask it with the computed mask
4023 (in case this was a signed field). If we changed it, make a new one. */
4024 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4025 nbitsize, nbitpos, 1, lreversep);
4026
4027 rhs = const_binop (BIT_AND_EXPR,
4028 const_binop (LSHIFT_EXPR,
4029 fold_convert_loc (loc, unsigned_type, rhs),
4030 size_int (lbitpos)),
4031 mask);
4032
4033 lhs = build2_loc (loc, code, compare_type,
4034 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4035 return lhs;
4036 }
4037 \f
4038 /* Subroutine for fold_truth_andor_1: decode a field reference.
4039
4040 If EXP is a comparison reference, we return the innermost reference.
4041
4042 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4043 set to the starting bit number.
4044
4045 If the innermost field can be completely contained in a mode-sized
4046 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4047
4048 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4049 otherwise it is not changed.
4050
4051 *PUNSIGNEDP is set to the signedness of the field.
4052
4053 *PREVERSEP is set to the storage order of the field.
4054
4055 *PMASK is set to the mask used. This is either contained in a
4056 BIT_AND_EXPR or derived from the width of the field.
4057
4058 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4059
4060 Return 0 if this is not a component reference or is one that we can't
4061 do anything with. */
4062
4063 static tree
4064 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4065 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4066 int *punsignedp, int *preversep, int *pvolatilep,
4067 tree *pmask, tree *pand_mask)
4068 {
4069 tree exp = *exp_;
4070 tree outer_type = 0;
4071 tree and_mask = 0;
4072 tree mask, inner, offset;
4073 tree unsigned_type;
4074 unsigned int precision;
4075
4076 /* All the optimizations using this function assume integer fields.
4077 There are problems with FP fields since the type_for_size call
4078 below can fail for, e.g., XFmode. */
4079 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4080 return 0;
4081
4082 /* We are interested in the bare arrangement of bits, so strip everything
4083 that doesn't affect the machine mode. However, record the type of the
4084 outermost expression if it may matter below. */
4085 if (CONVERT_EXPR_P (exp)
4086 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4087 outer_type = TREE_TYPE (exp);
4088 STRIP_NOPS (exp);
4089
4090 if (TREE_CODE (exp) == BIT_AND_EXPR)
4091 {
4092 and_mask = TREE_OPERAND (exp, 1);
4093 exp = TREE_OPERAND (exp, 0);
4094 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4095 if (TREE_CODE (and_mask) != INTEGER_CST)
4096 return 0;
4097 }
4098
4099 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4100 punsignedp, preversep, pvolatilep);
4101 if ((inner == exp && and_mask == 0)
4102 || *pbitsize < 0 || offset != 0
4103 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4104 /* Reject out-of-bound accesses (PR79731). */
4105 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4106 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4107 *pbitpos + *pbitsize) < 0))
4108 return 0;
4109
4110 *exp_ = exp;
4111
4112 /* If the number of bits in the reference is the same as the bitsize of
4113 the outer type, then the outer type gives the signedness. Otherwise
4114 (in case of a small bitfield) the signedness is unchanged. */
4115 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4116 *punsignedp = TYPE_UNSIGNED (outer_type);
4117
4118 /* Compute the mask to access the bitfield. */
4119 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4120 precision = TYPE_PRECISION (unsigned_type);
4121
4122 mask = build_int_cst_type (unsigned_type, -1);
4123
4124 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4125 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4126
4127 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4128 if (and_mask != 0)
4129 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4130 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4131
4132 *pmask = mask;
4133 *pand_mask = and_mask;
4134 return inner;
4135 }
4136
4137 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4138 bit positions and MASK is SIGNED. */
4139
4140 static int
4141 all_ones_mask_p (const_tree mask, unsigned int size)
4142 {
4143 tree type = TREE_TYPE (mask);
4144 unsigned int precision = TYPE_PRECISION (type);
4145
4146 /* If this function returns true when the type of the mask is
4147 UNSIGNED, then there will be errors. In particular see
4148 gcc.c-torture/execute/990326-1.c. There does not appear to be
4149 any documentation paper trail as to why this is so. But the pre
4150 wide-int worked with that restriction and it has been preserved
4151 here. */
4152 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4153 return false;
4154
4155 return wi::mask (size, false, precision) == wi::to_wide (mask);
4156 }
4157
4158 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4159 represents the sign bit of EXP's type. If EXP represents a sign
4160 or zero extension, also test VAL against the unextended type.
4161 The return value is the (sub)expression whose sign bit is VAL,
4162 or NULL_TREE otherwise. */
4163
4164 tree
4165 sign_bit_p (tree exp, const_tree val)
4166 {
4167 int width;
4168 tree t;
4169
4170 /* Tree EXP must have an integral type. */
4171 t = TREE_TYPE (exp);
4172 if (! INTEGRAL_TYPE_P (t))
4173 return NULL_TREE;
4174
4175 /* Tree VAL must be an integer constant. */
4176 if (TREE_CODE (val) != INTEGER_CST
4177 || TREE_OVERFLOW (val))
4178 return NULL_TREE;
4179
4180 width = TYPE_PRECISION (t);
4181 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4182 return exp;
4183
4184 /* Handle extension from a narrower type. */
4185 if (TREE_CODE (exp) == NOP_EXPR
4186 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4187 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4188
4189 return NULL_TREE;
4190 }
4191
4192 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4193 to be evaluated unconditionally. */
4194
4195 static int
4196 simple_operand_p (const_tree exp)
4197 {
4198 /* Strip any conversions that don't change the machine mode. */
4199 STRIP_NOPS (exp);
4200
4201 return (CONSTANT_CLASS_P (exp)
4202 || TREE_CODE (exp) == SSA_NAME
4203 || (DECL_P (exp)
4204 && ! TREE_ADDRESSABLE (exp)
4205 && ! TREE_THIS_VOLATILE (exp)
4206 && ! DECL_NONLOCAL (exp)
4207 /* Don't regard global variables as simple. They may be
4208 allocated in ways unknown to the compiler (shared memory,
4209 #pragma weak, etc). */
4210 && ! TREE_PUBLIC (exp)
4211 && ! DECL_EXTERNAL (exp)
4212 /* Weakrefs are not safe to be read, since they can be NULL.
4213 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4214 have DECL_WEAK flag set. */
4215 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4216 /* Loading a static variable is unduly expensive, but global
4217 registers aren't expensive. */
4218 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4219 }
4220
4221 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4222 to be evaluated unconditionally.
4223 I addition to simple_operand_p, we assume that comparisons, conversions,
4224 and logic-not operations are simple, if their operands are simple, too. */
4225
4226 static bool
4227 simple_operand_p_2 (tree exp)
4228 {
4229 enum tree_code code;
4230
4231 if (TREE_SIDE_EFFECTS (exp)
4232 || tree_could_trap_p (exp))
4233 return false;
4234
4235 while (CONVERT_EXPR_P (exp))
4236 exp = TREE_OPERAND (exp, 0);
4237
4238 code = TREE_CODE (exp);
4239
4240 if (TREE_CODE_CLASS (code) == tcc_comparison)
4241 return (simple_operand_p (TREE_OPERAND (exp, 0))
4242 && simple_operand_p (TREE_OPERAND (exp, 1)));
4243
4244 if (code == TRUTH_NOT_EXPR)
4245 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4246
4247 return simple_operand_p (exp);
4248 }
4249
4250 \f
4251 /* The following functions are subroutines to fold_range_test and allow it to
4252 try to change a logical combination of comparisons into a range test.
4253
4254 For example, both
4255 X == 2 || X == 3 || X == 4 || X == 5
4256 and
4257 X >= 2 && X <= 5
4258 are converted to
4259 (unsigned) (X - 2) <= 3
4260
4261 We describe each set of comparisons as being either inside or outside
4262 a range, using a variable named like IN_P, and then describe the
4263 range with a lower and upper bound. If one of the bounds is omitted,
4264 it represents either the highest or lowest value of the type.
4265
4266 In the comments below, we represent a range by two numbers in brackets
4267 preceded by a "+" to designate being inside that range, or a "-" to
4268 designate being outside that range, so the condition can be inverted by
4269 flipping the prefix. An omitted bound is represented by a "-". For
4270 example, "- [-, 10]" means being outside the range starting at the lowest
4271 possible value and ending at 10, in other words, being greater than 10.
4272 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4273 always false.
4274
4275 We set up things so that the missing bounds are handled in a consistent
4276 manner so neither a missing bound nor "true" and "false" need to be
4277 handled using a special case. */
4278
4279 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4280 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4281 and UPPER1_P are nonzero if the respective argument is an upper bound
4282 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4283 must be specified for a comparison. ARG1 will be converted to ARG0's
4284 type if both are specified. */
4285
4286 static tree
4287 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4288 tree arg1, int upper1_p)
4289 {
4290 tree tem;
4291 int result;
4292 int sgn0, sgn1;
4293
4294 /* If neither arg represents infinity, do the normal operation.
4295 Else, if not a comparison, return infinity. Else handle the special
4296 comparison rules. Note that most of the cases below won't occur, but
4297 are handled for consistency. */
4298
4299 if (arg0 != 0 && arg1 != 0)
4300 {
4301 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4302 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4303 STRIP_NOPS (tem);
4304 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4305 }
4306
4307 if (TREE_CODE_CLASS (code) != tcc_comparison)
4308 return 0;
4309
4310 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4311 for neither. In real maths, we cannot assume open ended ranges are
4312 the same. But, this is computer arithmetic, where numbers are finite.
4313 We can therefore make the transformation of any unbounded range with
4314 the value Z, Z being greater than any representable number. This permits
4315 us to treat unbounded ranges as equal. */
4316 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4317 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4318 switch (code)
4319 {
4320 case EQ_EXPR:
4321 result = sgn0 == sgn1;
4322 break;
4323 case NE_EXPR:
4324 result = sgn0 != sgn1;
4325 break;
4326 case LT_EXPR:
4327 result = sgn0 < sgn1;
4328 break;
4329 case LE_EXPR:
4330 result = sgn0 <= sgn1;
4331 break;
4332 case GT_EXPR:
4333 result = sgn0 > sgn1;
4334 break;
4335 case GE_EXPR:
4336 result = sgn0 >= sgn1;
4337 break;
4338 default:
4339 gcc_unreachable ();
4340 }
4341
4342 return constant_boolean_node (result, type);
4343 }
4344 \f
4345 /* Helper routine for make_range. Perform one step for it, return
4346 new expression if the loop should continue or NULL_TREE if it should
4347 stop. */
4348
4349 tree
4350 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4351 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4352 bool *strict_overflow_p)
4353 {
4354 tree arg0_type = TREE_TYPE (arg0);
4355 tree n_low, n_high, low = *p_low, high = *p_high;
4356 int in_p = *p_in_p, n_in_p;
4357
4358 switch (code)
4359 {
4360 case TRUTH_NOT_EXPR:
4361 /* We can only do something if the range is testing for zero. */
4362 if (low == NULL_TREE || high == NULL_TREE
4363 || ! integer_zerop (low) || ! integer_zerop (high))
4364 return NULL_TREE;
4365 *p_in_p = ! in_p;
4366 return arg0;
4367
4368 case EQ_EXPR: case NE_EXPR:
4369 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4370 /* We can only do something if the range is testing for zero
4371 and if the second operand is an integer constant. Note that
4372 saying something is "in" the range we make is done by
4373 complementing IN_P since it will set in the initial case of
4374 being not equal to zero; "out" is leaving it alone. */
4375 if (low == NULL_TREE || high == NULL_TREE
4376 || ! integer_zerop (low) || ! integer_zerop (high)
4377 || TREE_CODE (arg1) != INTEGER_CST)
4378 return NULL_TREE;
4379
4380 switch (code)
4381 {
4382 case NE_EXPR: /* - [c, c] */
4383 low = high = arg1;
4384 break;
4385 case EQ_EXPR: /* + [c, c] */
4386 in_p = ! in_p, low = high = arg1;
4387 break;
4388 case GT_EXPR: /* - [-, c] */
4389 low = 0, high = arg1;
4390 break;
4391 case GE_EXPR: /* + [c, -] */
4392 in_p = ! in_p, low = arg1, high = 0;
4393 break;
4394 case LT_EXPR: /* - [c, -] */
4395 low = arg1, high = 0;
4396 break;
4397 case LE_EXPR: /* + [-, c] */
4398 in_p = ! in_p, low = 0, high = arg1;
4399 break;
4400 default:
4401 gcc_unreachable ();
4402 }
4403
4404 /* If this is an unsigned comparison, we also know that EXP is
4405 greater than or equal to zero. We base the range tests we make
4406 on that fact, so we record it here so we can parse existing
4407 range tests. We test arg0_type since often the return type
4408 of, e.g. EQ_EXPR, is boolean. */
4409 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4410 {
4411 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4412 in_p, low, high, 1,
4413 build_int_cst (arg0_type, 0),
4414 NULL_TREE))
4415 return NULL_TREE;
4416
4417 in_p = n_in_p, low = n_low, high = n_high;
4418
4419 /* If the high bound is missing, but we have a nonzero low
4420 bound, reverse the range so it goes from zero to the low bound
4421 minus 1. */
4422 if (high == 0 && low && ! integer_zerop (low))
4423 {
4424 in_p = ! in_p;
4425 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4426 build_int_cst (TREE_TYPE (low), 1), 0);
4427 low = build_int_cst (arg0_type, 0);
4428 }
4429 }
4430
4431 *p_low = low;
4432 *p_high = high;
4433 *p_in_p = in_p;
4434 return arg0;
4435
4436 case NEGATE_EXPR:
4437 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4438 low and high are non-NULL, then normalize will DTRT. */
4439 if (!TYPE_UNSIGNED (arg0_type)
4440 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4441 {
4442 if (low == NULL_TREE)
4443 low = TYPE_MIN_VALUE (arg0_type);
4444 if (high == NULL_TREE)
4445 high = TYPE_MAX_VALUE (arg0_type);
4446 }
4447
4448 /* (-x) IN [a,b] -> x in [-b, -a] */
4449 n_low = range_binop (MINUS_EXPR, exp_type,
4450 build_int_cst (exp_type, 0),
4451 0, high, 1);
4452 n_high = range_binop (MINUS_EXPR, exp_type,
4453 build_int_cst (exp_type, 0),
4454 0, low, 0);
4455 if (n_high != 0 && TREE_OVERFLOW (n_high))
4456 return NULL_TREE;
4457 goto normalize;
4458
4459 case BIT_NOT_EXPR:
4460 /* ~ X -> -X - 1 */
4461 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4462 build_int_cst (exp_type, 1));
4463
4464 case PLUS_EXPR:
4465 case MINUS_EXPR:
4466 if (TREE_CODE (arg1) != INTEGER_CST)
4467 return NULL_TREE;
4468
4469 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4470 move a constant to the other side. */
4471 if (!TYPE_UNSIGNED (arg0_type)
4472 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4473 return NULL_TREE;
4474
4475 /* If EXP is signed, any overflow in the computation is undefined,
4476 so we don't worry about it so long as our computations on
4477 the bounds don't overflow. For unsigned, overflow is defined
4478 and this is exactly the right thing. */
4479 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4480 arg0_type, low, 0, arg1, 0);
4481 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4482 arg0_type, high, 1, arg1, 0);
4483 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4484 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4485 return NULL_TREE;
4486
4487 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4488 *strict_overflow_p = true;
4489
4490 normalize:
4491 /* Check for an unsigned range which has wrapped around the maximum
4492 value thus making n_high < n_low, and normalize it. */
4493 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4494 {
4495 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4496 build_int_cst (TREE_TYPE (n_high), 1), 0);
4497 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4498 build_int_cst (TREE_TYPE (n_low), 1), 0);
4499
4500 /* If the range is of the form +/- [ x+1, x ], we won't
4501 be able to normalize it. But then, it represents the
4502 whole range or the empty set, so make it
4503 +/- [ -, - ]. */
4504 if (tree_int_cst_equal (n_low, low)
4505 && tree_int_cst_equal (n_high, high))
4506 low = high = 0;
4507 else
4508 in_p = ! in_p;
4509 }
4510 else
4511 low = n_low, high = n_high;
4512
4513 *p_low = low;
4514 *p_high = high;
4515 *p_in_p = in_p;
4516 return arg0;
4517
4518 CASE_CONVERT:
4519 case NON_LVALUE_EXPR:
4520 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4521 return NULL_TREE;
4522
4523 if (! INTEGRAL_TYPE_P (arg0_type)
4524 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4525 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4526 return NULL_TREE;
4527
4528 n_low = low, n_high = high;
4529
4530 if (n_low != 0)
4531 n_low = fold_convert_loc (loc, arg0_type, n_low);
4532
4533 if (n_high != 0)
4534 n_high = fold_convert_loc (loc, arg0_type, n_high);
4535
4536 /* If we're converting arg0 from an unsigned type, to exp,
4537 a signed type, we will be doing the comparison as unsigned.
4538 The tests above have already verified that LOW and HIGH
4539 are both positive.
4540
4541 So we have to ensure that we will handle large unsigned
4542 values the same way that the current signed bounds treat
4543 negative values. */
4544
4545 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4546 {
4547 tree high_positive;
4548 tree equiv_type;
4549 /* For fixed-point modes, we need to pass the saturating flag
4550 as the 2nd parameter. */
4551 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4552 equiv_type
4553 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4554 TYPE_SATURATING (arg0_type));
4555 else
4556 equiv_type
4557 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4558
4559 /* A range without an upper bound is, naturally, unbounded.
4560 Since convert would have cropped a very large value, use
4561 the max value for the destination type. */
4562 high_positive
4563 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4564 : TYPE_MAX_VALUE (arg0_type);
4565
4566 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4567 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4568 fold_convert_loc (loc, arg0_type,
4569 high_positive),
4570 build_int_cst (arg0_type, 1));
4571
4572 /* If the low bound is specified, "and" the range with the
4573 range for which the original unsigned value will be
4574 positive. */
4575 if (low != 0)
4576 {
4577 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4578 1, fold_convert_loc (loc, arg0_type,
4579 integer_zero_node),
4580 high_positive))
4581 return NULL_TREE;
4582
4583 in_p = (n_in_p == in_p);
4584 }
4585 else
4586 {
4587 /* Otherwise, "or" the range with the range of the input
4588 that will be interpreted as negative. */
4589 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4590 1, fold_convert_loc (loc, arg0_type,
4591 integer_zero_node),
4592 high_positive))
4593 return NULL_TREE;
4594
4595 in_p = (in_p != n_in_p);
4596 }
4597 }
4598
4599 *p_low = n_low;
4600 *p_high = n_high;
4601 *p_in_p = in_p;
4602 return arg0;
4603
4604 default:
4605 return NULL_TREE;
4606 }
4607 }
4608
4609 /* Given EXP, a logical expression, set the range it is testing into
4610 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4611 actually being tested. *PLOW and *PHIGH will be made of the same
4612 type as the returned expression. If EXP is not a comparison, we
4613 will most likely not be returning a useful value and range. Set
4614 *STRICT_OVERFLOW_P to true if the return value is only valid
4615 because signed overflow is undefined; otherwise, do not change
4616 *STRICT_OVERFLOW_P. */
4617
4618 tree
4619 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4620 bool *strict_overflow_p)
4621 {
4622 enum tree_code code;
4623 tree arg0, arg1 = NULL_TREE;
4624 tree exp_type, nexp;
4625 int in_p;
4626 tree low, high;
4627 location_t loc = EXPR_LOCATION (exp);
4628
4629 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4630 and see if we can refine the range. Some of the cases below may not
4631 happen, but it doesn't seem worth worrying about this. We "continue"
4632 the outer loop when we've changed something; otherwise we "break"
4633 the switch, which will "break" the while. */
4634
4635 in_p = 0;
4636 low = high = build_int_cst (TREE_TYPE (exp), 0);
4637
4638 while (1)
4639 {
4640 code = TREE_CODE (exp);
4641 exp_type = TREE_TYPE (exp);
4642 arg0 = NULL_TREE;
4643
4644 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4645 {
4646 if (TREE_OPERAND_LENGTH (exp) > 0)
4647 arg0 = TREE_OPERAND (exp, 0);
4648 if (TREE_CODE_CLASS (code) == tcc_binary
4649 || TREE_CODE_CLASS (code) == tcc_comparison
4650 || (TREE_CODE_CLASS (code) == tcc_expression
4651 && TREE_OPERAND_LENGTH (exp) > 1))
4652 arg1 = TREE_OPERAND (exp, 1);
4653 }
4654 if (arg0 == NULL_TREE)
4655 break;
4656
4657 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4658 &high, &in_p, strict_overflow_p);
4659 if (nexp == NULL_TREE)
4660 break;
4661 exp = nexp;
4662 }
4663
4664 /* If EXP is a constant, we can evaluate whether this is true or false. */
4665 if (TREE_CODE (exp) == INTEGER_CST)
4666 {
4667 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4668 exp, 0, low, 0))
4669 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4670 exp, 1, high, 1)));
4671 low = high = 0;
4672 exp = 0;
4673 }
4674
4675 *pin_p = in_p, *plow = low, *phigh = high;
4676 return exp;
4677 }
4678
4679 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4680 a bitwise check i.e. when
4681 LOW == 0xXX...X00...0
4682 HIGH == 0xXX...X11...1
4683 Return corresponding mask in MASK and stem in VALUE. */
4684
4685 static bool
4686 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4687 tree *value)
4688 {
4689 if (TREE_CODE (low) != INTEGER_CST
4690 || TREE_CODE (high) != INTEGER_CST)
4691 return false;
4692
4693 unsigned prec = TYPE_PRECISION (type);
4694 wide_int lo = wi::to_wide (low, prec);
4695 wide_int hi = wi::to_wide (high, prec);
4696
4697 wide_int end_mask = lo ^ hi;
4698 if ((end_mask & (end_mask + 1)) != 0
4699 || (lo & end_mask) != 0)
4700 return false;
4701
4702 wide_int stem_mask = ~end_mask;
4703 wide_int stem = lo & stem_mask;
4704 if (stem != (hi & stem_mask))
4705 return false;
4706
4707 *mask = wide_int_to_tree (type, stem_mask);
4708 *value = wide_int_to_tree (type, stem);
4709
4710 return true;
4711 }
4712 \f
4713 /* Helper routine for build_range_check and match.pd. Return the type to
4714 perform the check or NULL if it shouldn't be optimized. */
4715
4716 tree
4717 range_check_type (tree etype)
4718 {
4719 /* First make sure that arithmetics in this type is valid, then make sure
4720 that it wraps around. */
4721 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4722 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4723 TYPE_UNSIGNED (etype));
4724
4725 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4726 {
4727 tree utype, minv, maxv;
4728
4729 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4730 for the type in question, as we rely on this here. */
4731 utype = unsigned_type_for (etype);
4732 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4733 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4734 build_int_cst (TREE_TYPE (maxv), 1), 1);
4735 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4736
4737 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4738 minv, 1, maxv, 1)))
4739 etype = utype;
4740 else
4741 return NULL_TREE;
4742 }
4743 return etype;
4744 }
4745
4746 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4747 type, TYPE, return an expression to test if EXP is in (or out of, depending
4748 on IN_P) the range. Return 0 if the test couldn't be created. */
4749
4750 tree
4751 build_range_check (location_t loc, tree type, tree exp, int in_p,
4752 tree low, tree high)
4753 {
4754 tree etype = TREE_TYPE (exp), mask, value;
4755
4756 /* Disable this optimization for function pointer expressions
4757 on targets that require function pointer canonicalization. */
4758 if (targetm.have_canonicalize_funcptr_for_compare ()
4759 && TREE_CODE (etype) == POINTER_TYPE
4760 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4761 return NULL_TREE;
4762
4763 if (! in_p)
4764 {
4765 value = build_range_check (loc, type, exp, 1, low, high);
4766 if (value != 0)
4767 return invert_truthvalue_loc (loc, value);
4768
4769 return 0;
4770 }
4771
4772 if (low == 0 && high == 0)
4773 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4774
4775 if (low == 0)
4776 return fold_build2_loc (loc, LE_EXPR, type, exp,
4777 fold_convert_loc (loc, etype, high));
4778
4779 if (high == 0)
4780 return fold_build2_loc (loc, GE_EXPR, type, exp,
4781 fold_convert_loc (loc, etype, low));
4782
4783 if (operand_equal_p (low, high, 0))
4784 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4785 fold_convert_loc (loc, etype, low));
4786
4787 if (TREE_CODE (exp) == BIT_AND_EXPR
4788 && maskable_range_p (low, high, etype, &mask, &value))
4789 return fold_build2_loc (loc, EQ_EXPR, type,
4790 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4791 exp, mask),
4792 value);
4793
4794 if (integer_zerop (low))
4795 {
4796 if (! TYPE_UNSIGNED (etype))
4797 {
4798 etype = unsigned_type_for (etype);
4799 high = fold_convert_loc (loc, etype, high);
4800 exp = fold_convert_loc (loc, etype, exp);
4801 }
4802 return build_range_check (loc, type, exp, 1, 0, high);
4803 }
4804
4805 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4806 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4807 {
4808 int prec = TYPE_PRECISION (etype);
4809
4810 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4811 {
4812 if (TYPE_UNSIGNED (etype))
4813 {
4814 tree signed_etype = signed_type_for (etype);
4815 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4816 etype
4817 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4818 else
4819 etype = signed_etype;
4820 exp = fold_convert_loc (loc, etype, exp);
4821 }
4822 return fold_build2_loc (loc, GT_EXPR, type, exp,
4823 build_int_cst (etype, 0));
4824 }
4825 }
4826
4827 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4828 This requires wrap-around arithmetics for the type of the expression. */
4829 etype = range_check_type (etype);
4830 if (etype == NULL_TREE)
4831 return NULL_TREE;
4832
4833 if (POINTER_TYPE_P (etype))
4834 etype = unsigned_type_for (etype);
4835
4836 high = fold_convert_loc (loc, etype, high);
4837 low = fold_convert_loc (loc, etype, low);
4838 exp = fold_convert_loc (loc, etype, exp);
4839
4840 value = const_binop (MINUS_EXPR, high, low);
4841
4842 if (value != 0 && !TREE_OVERFLOW (value))
4843 return build_range_check (loc, type,
4844 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4845 1, build_int_cst (etype, 0), value);
4846
4847 return 0;
4848 }
4849 \f
4850 /* Return the predecessor of VAL in its type, handling the infinite case. */
4851
4852 static tree
4853 range_predecessor (tree val)
4854 {
4855 tree type = TREE_TYPE (val);
4856
4857 if (INTEGRAL_TYPE_P (type)
4858 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4859 return 0;
4860 else
4861 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4862 build_int_cst (TREE_TYPE (val), 1), 0);
4863 }
4864
4865 /* Return the successor of VAL in its type, handling the infinite case. */
4866
4867 static tree
4868 range_successor (tree val)
4869 {
4870 tree type = TREE_TYPE (val);
4871
4872 if (INTEGRAL_TYPE_P (type)
4873 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4874 return 0;
4875 else
4876 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4877 build_int_cst (TREE_TYPE (val), 1), 0);
4878 }
4879
4880 /* Given two ranges, see if we can merge them into one. Return 1 if we
4881 can, 0 if we can't. Set the output range into the specified parameters. */
4882
4883 bool
4884 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4885 tree high0, int in1_p, tree low1, tree high1)
4886 {
4887 int no_overlap;
4888 int subset;
4889 int temp;
4890 tree tem;
4891 int in_p;
4892 tree low, high;
4893 int lowequal = ((low0 == 0 && low1 == 0)
4894 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4895 low0, 0, low1, 0)));
4896 int highequal = ((high0 == 0 && high1 == 0)
4897 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4898 high0, 1, high1, 1)));
4899
4900 /* Make range 0 be the range that starts first, or ends last if they
4901 start at the same value. Swap them if it isn't. */
4902 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4903 low0, 0, low1, 0))
4904 || (lowequal
4905 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4906 high1, 1, high0, 1))))
4907 {
4908 temp = in0_p, in0_p = in1_p, in1_p = temp;
4909 tem = low0, low0 = low1, low1 = tem;
4910 tem = high0, high0 = high1, high1 = tem;
4911 }
4912
4913 /* Now flag two cases, whether the ranges are disjoint or whether the
4914 second range is totally subsumed in the first. Note that the tests
4915 below are simplified by the ones above. */
4916 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4917 high0, 1, low1, 0));
4918 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4919 high1, 1, high0, 1));
4920
4921 /* We now have four cases, depending on whether we are including or
4922 excluding the two ranges. */
4923 if (in0_p && in1_p)
4924 {
4925 /* If they don't overlap, the result is false. If the second range
4926 is a subset it is the result. Otherwise, the range is from the start
4927 of the second to the end of the first. */
4928 if (no_overlap)
4929 in_p = 0, low = high = 0;
4930 else if (subset)
4931 in_p = 1, low = low1, high = high1;
4932 else
4933 in_p = 1, low = low1, high = high0;
4934 }
4935
4936 else if (in0_p && ! in1_p)
4937 {
4938 /* If they don't overlap, the result is the first range. If they are
4939 equal, the result is false. If the second range is a subset of the
4940 first, and the ranges begin at the same place, we go from just after
4941 the end of the second range to the end of the first. If the second
4942 range is not a subset of the first, or if it is a subset and both
4943 ranges end at the same place, the range starts at the start of the
4944 first range and ends just before the second range.
4945 Otherwise, we can't describe this as a single range. */
4946 if (no_overlap)
4947 in_p = 1, low = low0, high = high0;
4948 else if (lowequal && highequal)
4949 in_p = 0, low = high = 0;
4950 else if (subset && lowequal)
4951 {
4952 low = range_successor (high1);
4953 high = high0;
4954 in_p = 1;
4955 if (low == 0)
4956 {
4957 /* We are in the weird situation where high0 > high1 but
4958 high1 has no successor. Punt. */
4959 return 0;
4960 }
4961 }
4962 else if (! subset || highequal)
4963 {
4964 low = low0;
4965 high = range_predecessor (low1);
4966 in_p = 1;
4967 if (high == 0)
4968 {
4969 /* low0 < low1 but low1 has no predecessor. Punt. */
4970 return 0;
4971 }
4972 }
4973 else
4974 return 0;
4975 }
4976
4977 else if (! in0_p && in1_p)
4978 {
4979 /* If they don't overlap, the result is the second range. If the second
4980 is a subset of the first, the result is false. Otherwise,
4981 the range starts just after the first range and ends at the
4982 end of the second. */
4983 if (no_overlap)
4984 in_p = 1, low = low1, high = high1;
4985 else if (subset || highequal)
4986 in_p = 0, low = high = 0;
4987 else
4988 {
4989 low = range_successor (high0);
4990 high = high1;
4991 in_p = 1;
4992 if (low == 0)
4993 {
4994 /* high1 > high0 but high0 has no successor. Punt. */
4995 return 0;
4996 }
4997 }
4998 }
4999
5000 else
5001 {
5002 /* The case where we are excluding both ranges. Here the complex case
5003 is if they don't overlap. In that case, the only time we have a
5004 range is if they are adjacent. If the second is a subset of the
5005 first, the result is the first. Otherwise, the range to exclude
5006 starts at the beginning of the first range and ends at the end of the
5007 second. */
5008 if (no_overlap)
5009 {
5010 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5011 range_successor (high0),
5012 1, low1, 0)))
5013 in_p = 0, low = low0, high = high1;
5014 else
5015 {
5016 /* Canonicalize - [min, x] into - [-, x]. */
5017 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5018 switch (TREE_CODE (TREE_TYPE (low0)))
5019 {
5020 case ENUMERAL_TYPE:
5021 if (TYPE_PRECISION (TREE_TYPE (low0))
5022 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5023 break;
5024 /* FALLTHROUGH */
5025 case INTEGER_TYPE:
5026 if (tree_int_cst_equal (low0,
5027 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5028 low0 = 0;
5029 break;
5030 case POINTER_TYPE:
5031 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5032 && integer_zerop (low0))
5033 low0 = 0;
5034 break;
5035 default:
5036 break;
5037 }
5038
5039 /* Canonicalize - [x, max] into - [x, -]. */
5040 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5041 switch (TREE_CODE (TREE_TYPE (high1)))
5042 {
5043 case ENUMERAL_TYPE:
5044 if (TYPE_PRECISION (TREE_TYPE (high1))
5045 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5046 break;
5047 /* FALLTHROUGH */
5048 case INTEGER_TYPE:
5049 if (tree_int_cst_equal (high1,
5050 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5051 high1 = 0;
5052 break;
5053 case POINTER_TYPE:
5054 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5055 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5056 high1, 1,
5057 build_int_cst (TREE_TYPE (high1), 1),
5058 1)))
5059 high1 = 0;
5060 break;
5061 default:
5062 break;
5063 }
5064
5065 /* The ranges might be also adjacent between the maximum and
5066 minimum values of the given type. For
5067 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5068 return + [x + 1, y - 1]. */
5069 if (low0 == 0 && high1 == 0)
5070 {
5071 low = range_successor (high0);
5072 high = range_predecessor (low1);
5073 if (low == 0 || high == 0)
5074 return 0;
5075
5076 in_p = 1;
5077 }
5078 else
5079 return 0;
5080 }
5081 }
5082 else if (subset)
5083 in_p = 0, low = low0, high = high0;
5084 else
5085 in_p = 0, low = low0, high = high1;
5086 }
5087
5088 *pin_p = in_p, *plow = low, *phigh = high;
5089 return 1;
5090 }
5091 \f
5092
5093 /* Subroutine of fold, looking inside expressions of the form
5094 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5095 of the COND_EXPR. This function is being used also to optimize
5096 A op B ? C : A, by reversing the comparison first.
5097
5098 Return a folded expression whose code is not a COND_EXPR
5099 anymore, or NULL_TREE if no folding opportunity is found. */
5100
5101 static tree
5102 fold_cond_expr_with_comparison (location_t loc, tree type,
5103 tree arg0, tree arg1, tree arg2)
5104 {
5105 enum tree_code comp_code = TREE_CODE (arg0);
5106 tree arg00 = TREE_OPERAND (arg0, 0);
5107 tree arg01 = TREE_OPERAND (arg0, 1);
5108 tree arg1_type = TREE_TYPE (arg1);
5109 tree tem;
5110
5111 STRIP_NOPS (arg1);
5112 STRIP_NOPS (arg2);
5113
5114 /* If we have A op 0 ? A : -A, consider applying the following
5115 transformations:
5116
5117 A == 0? A : -A same as -A
5118 A != 0? A : -A same as A
5119 A >= 0? A : -A same as abs (A)
5120 A > 0? A : -A same as abs (A)
5121 A <= 0? A : -A same as -abs (A)
5122 A < 0? A : -A same as -abs (A)
5123
5124 None of these transformations work for modes with signed
5125 zeros. If A is +/-0, the first two transformations will
5126 change the sign of the result (from +0 to -0, or vice
5127 versa). The last four will fix the sign of the result,
5128 even though the original expressions could be positive or
5129 negative, depending on the sign of A.
5130
5131 Note that all these transformations are correct if A is
5132 NaN, since the two alternatives (A and -A) are also NaNs. */
5133 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5134 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5135 ? real_zerop (arg01)
5136 : integer_zerop (arg01))
5137 && ((TREE_CODE (arg2) == NEGATE_EXPR
5138 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5139 /* In the case that A is of the form X-Y, '-A' (arg2) may
5140 have already been folded to Y-X, check for that. */
5141 || (TREE_CODE (arg1) == MINUS_EXPR
5142 && TREE_CODE (arg2) == MINUS_EXPR
5143 && operand_equal_p (TREE_OPERAND (arg1, 0),
5144 TREE_OPERAND (arg2, 1), 0)
5145 && operand_equal_p (TREE_OPERAND (arg1, 1),
5146 TREE_OPERAND (arg2, 0), 0))))
5147 switch (comp_code)
5148 {
5149 case EQ_EXPR:
5150 case UNEQ_EXPR:
5151 tem = fold_convert_loc (loc, arg1_type, arg1);
5152 return fold_convert_loc (loc, type, negate_expr (tem));
5153 case NE_EXPR:
5154 case LTGT_EXPR:
5155 return fold_convert_loc (loc, type, arg1);
5156 case UNGE_EXPR:
5157 case UNGT_EXPR:
5158 if (flag_trapping_math)
5159 break;
5160 /* Fall through. */
5161 case GE_EXPR:
5162 case GT_EXPR:
5163 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5164 break;
5165 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5166 return fold_convert_loc (loc, type, tem);
5167 case UNLE_EXPR:
5168 case UNLT_EXPR:
5169 if (flag_trapping_math)
5170 break;
5171 /* FALLTHRU */
5172 case LE_EXPR:
5173 case LT_EXPR:
5174 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5175 break;
5176 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5177 return negate_expr (fold_convert_loc (loc, type, tem));
5178 default:
5179 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5180 break;
5181 }
5182
5183 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5184 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5185 both transformations are correct when A is NaN: A != 0
5186 is then true, and A == 0 is false. */
5187
5188 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5189 && integer_zerop (arg01) && integer_zerop (arg2))
5190 {
5191 if (comp_code == NE_EXPR)
5192 return fold_convert_loc (loc, type, arg1);
5193 else if (comp_code == EQ_EXPR)
5194 return build_zero_cst (type);
5195 }
5196
5197 /* Try some transformations of A op B ? A : B.
5198
5199 A == B? A : B same as B
5200 A != B? A : B same as A
5201 A >= B? A : B same as max (A, B)
5202 A > B? A : B same as max (B, A)
5203 A <= B? A : B same as min (A, B)
5204 A < B? A : B same as min (B, A)
5205
5206 As above, these transformations don't work in the presence
5207 of signed zeros. For example, if A and B are zeros of
5208 opposite sign, the first two transformations will change
5209 the sign of the result. In the last four, the original
5210 expressions give different results for (A=+0, B=-0) and
5211 (A=-0, B=+0), but the transformed expressions do not.
5212
5213 The first two transformations are correct if either A or B
5214 is a NaN. In the first transformation, the condition will
5215 be false, and B will indeed be chosen. In the case of the
5216 second transformation, the condition A != B will be true,
5217 and A will be chosen.
5218
5219 The conversions to max() and min() are not correct if B is
5220 a number and A is not. The conditions in the original
5221 expressions will be false, so all four give B. The min()
5222 and max() versions would give a NaN instead. */
5223 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5224 && operand_equal_for_comparison_p (arg01, arg2)
5225 /* Avoid these transformations if the COND_EXPR may be used
5226 as an lvalue in the C++ front-end. PR c++/19199. */
5227 && (in_gimple_form
5228 || VECTOR_TYPE_P (type)
5229 || (! lang_GNU_CXX ()
5230 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5231 || ! maybe_lvalue_p (arg1)
5232 || ! maybe_lvalue_p (arg2)))
5233 {
5234 tree comp_op0 = arg00;
5235 tree comp_op1 = arg01;
5236 tree comp_type = TREE_TYPE (comp_op0);
5237
5238 switch (comp_code)
5239 {
5240 case EQ_EXPR:
5241 return fold_convert_loc (loc, type, arg2);
5242 case NE_EXPR:
5243 return fold_convert_loc (loc, type, arg1);
5244 case LE_EXPR:
5245 case LT_EXPR:
5246 case UNLE_EXPR:
5247 case UNLT_EXPR:
5248 /* In C++ a ?: expression can be an lvalue, so put the
5249 operand which will be used if they are equal first
5250 so that we can convert this back to the
5251 corresponding COND_EXPR. */
5252 if (!HONOR_NANS (arg1))
5253 {
5254 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5255 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5256 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5257 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5258 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5259 comp_op1, comp_op0);
5260 return fold_convert_loc (loc, type, tem);
5261 }
5262 break;
5263 case GE_EXPR:
5264 case GT_EXPR:
5265 case UNGE_EXPR:
5266 case UNGT_EXPR:
5267 if (!HONOR_NANS (arg1))
5268 {
5269 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5270 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5271 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5272 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5273 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5274 comp_op1, comp_op0);
5275 return fold_convert_loc (loc, type, tem);
5276 }
5277 break;
5278 case UNEQ_EXPR:
5279 if (!HONOR_NANS (arg1))
5280 return fold_convert_loc (loc, type, arg2);
5281 break;
5282 case LTGT_EXPR:
5283 if (!HONOR_NANS (arg1))
5284 return fold_convert_loc (loc, type, arg1);
5285 break;
5286 default:
5287 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5288 break;
5289 }
5290 }
5291
5292 return NULL_TREE;
5293 }
5294
5295
5296 \f
5297 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5298 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5299 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5300 false) >= 2)
5301 #endif
5302
5303 /* EXP is some logical combination of boolean tests. See if we can
5304 merge it into some range test. Return the new tree if so. */
5305
5306 static tree
5307 fold_range_test (location_t loc, enum tree_code code, tree type,
5308 tree op0, tree op1)
5309 {
5310 int or_op = (code == TRUTH_ORIF_EXPR
5311 || code == TRUTH_OR_EXPR);
5312 int in0_p, in1_p, in_p;
5313 tree low0, low1, low, high0, high1, high;
5314 bool strict_overflow_p = false;
5315 tree tem, lhs, rhs;
5316 const char * const warnmsg = G_("assuming signed overflow does not occur "
5317 "when simplifying range test");
5318
5319 if (!INTEGRAL_TYPE_P (type))
5320 return 0;
5321
5322 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5323 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5324
5325 /* If this is an OR operation, invert both sides; we will invert
5326 again at the end. */
5327 if (or_op)
5328 in0_p = ! in0_p, in1_p = ! in1_p;
5329
5330 /* If both expressions are the same, if we can merge the ranges, and we
5331 can build the range test, return it or it inverted. If one of the
5332 ranges is always true or always false, consider it to be the same
5333 expression as the other. */
5334 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5335 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5336 in1_p, low1, high1)
5337 && 0 != (tem = (build_range_check (loc, type,
5338 lhs != 0 ? lhs
5339 : rhs != 0 ? rhs : integer_zero_node,
5340 in_p, low, high))))
5341 {
5342 if (strict_overflow_p)
5343 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5344 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5345 }
5346
5347 /* On machines where the branch cost is expensive, if this is a
5348 short-circuited branch and the underlying object on both sides
5349 is the same, make a non-short-circuit operation. */
5350 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5351 && !flag_sanitize_coverage
5352 && lhs != 0 && rhs != 0
5353 && (code == TRUTH_ANDIF_EXPR
5354 || code == TRUTH_ORIF_EXPR)
5355 && operand_equal_p (lhs, rhs, 0))
5356 {
5357 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5358 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5359 which cases we can't do this. */
5360 if (simple_operand_p (lhs))
5361 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5362 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5363 type, op0, op1);
5364
5365 else if (!lang_hooks.decls.global_bindings_p ()
5366 && !CONTAINS_PLACEHOLDER_P (lhs))
5367 {
5368 tree common = save_expr (lhs);
5369
5370 if (0 != (lhs = build_range_check (loc, type, common,
5371 or_op ? ! in0_p : in0_p,
5372 low0, high0))
5373 && (0 != (rhs = build_range_check (loc, type, common,
5374 or_op ? ! in1_p : in1_p,
5375 low1, high1))))
5376 {
5377 if (strict_overflow_p)
5378 fold_overflow_warning (warnmsg,
5379 WARN_STRICT_OVERFLOW_COMPARISON);
5380 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5381 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5382 type, lhs, rhs);
5383 }
5384 }
5385 }
5386
5387 return 0;
5388 }
5389 \f
5390 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5391 bit value. Arrange things so the extra bits will be set to zero if and
5392 only if C is signed-extended to its full width. If MASK is nonzero,
5393 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5394
5395 static tree
5396 unextend (tree c, int p, int unsignedp, tree mask)
5397 {
5398 tree type = TREE_TYPE (c);
5399 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5400 tree temp;
5401
5402 if (p == modesize || unsignedp)
5403 return c;
5404
5405 /* We work by getting just the sign bit into the low-order bit, then
5406 into the high-order bit, then sign-extend. We then XOR that value
5407 with C. */
5408 temp = build_int_cst (TREE_TYPE (c),
5409 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5410
5411 /* We must use a signed type in order to get an arithmetic right shift.
5412 However, we must also avoid introducing accidental overflows, so that
5413 a subsequent call to integer_zerop will work. Hence we must
5414 do the type conversion here. At this point, the constant is either
5415 zero or one, and the conversion to a signed type can never overflow.
5416 We could get an overflow if this conversion is done anywhere else. */
5417 if (TYPE_UNSIGNED (type))
5418 temp = fold_convert (signed_type_for (type), temp);
5419
5420 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5421 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5422 if (mask != 0)
5423 temp = const_binop (BIT_AND_EXPR, temp,
5424 fold_convert (TREE_TYPE (c), mask));
5425 /* If necessary, convert the type back to match the type of C. */
5426 if (TYPE_UNSIGNED (type))
5427 temp = fold_convert (type, temp);
5428
5429 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5430 }
5431 \f
5432 /* For an expression that has the form
5433 (A && B) || ~B
5434 or
5435 (A || B) && ~B,
5436 we can drop one of the inner expressions and simplify to
5437 A || ~B
5438 or
5439 A && ~B
5440 LOC is the location of the resulting expression. OP is the inner
5441 logical operation; the left-hand side in the examples above, while CMPOP
5442 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5443 removing a condition that guards another, as in
5444 (A != NULL && A->...) || A == NULL
5445 which we must not transform. If RHS_ONLY is true, only eliminate the
5446 right-most operand of the inner logical operation. */
5447
5448 static tree
5449 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5450 bool rhs_only)
5451 {
5452 tree type = TREE_TYPE (cmpop);
5453 enum tree_code code = TREE_CODE (cmpop);
5454 enum tree_code truthop_code = TREE_CODE (op);
5455 tree lhs = TREE_OPERAND (op, 0);
5456 tree rhs = TREE_OPERAND (op, 1);
5457 tree orig_lhs = lhs, orig_rhs = rhs;
5458 enum tree_code rhs_code = TREE_CODE (rhs);
5459 enum tree_code lhs_code = TREE_CODE (lhs);
5460 enum tree_code inv_code;
5461
5462 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5463 return NULL_TREE;
5464
5465 if (TREE_CODE_CLASS (code) != tcc_comparison)
5466 return NULL_TREE;
5467
5468 if (rhs_code == truthop_code)
5469 {
5470 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5471 if (newrhs != NULL_TREE)
5472 {
5473 rhs = newrhs;
5474 rhs_code = TREE_CODE (rhs);
5475 }
5476 }
5477 if (lhs_code == truthop_code && !rhs_only)
5478 {
5479 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5480 if (newlhs != NULL_TREE)
5481 {
5482 lhs = newlhs;
5483 lhs_code = TREE_CODE (lhs);
5484 }
5485 }
5486
5487 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5488 if (inv_code == rhs_code
5489 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5490 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5491 return lhs;
5492 if (!rhs_only && inv_code == lhs_code
5493 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5494 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5495 return rhs;
5496 if (rhs != orig_rhs || lhs != orig_lhs)
5497 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5498 lhs, rhs);
5499 return NULL_TREE;
5500 }
5501
5502 /* Find ways of folding logical expressions of LHS and RHS:
5503 Try to merge two comparisons to the same innermost item.
5504 Look for range tests like "ch >= '0' && ch <= '9'".
5505 Look for combinations of simple terms on machines with expensive branches
5506 and evaluate the RHS unconditionally.
5507
5508 For example, if we have p->a == 2 && p->b == 4 and we can make an
5509 object large enough to span both A and B, we can do this with a comparison
5510 against the object ANDed with the a mask.
5511
5512 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5513 operations to do this with one comparison.
5514
5515 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5516 function and the one above.
5517
5518 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5519 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5520
5521 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5522 two operands.
5523
5524 We return the simplified tree or 0 if no optimization is possible. */
5525
5526 static tree
5527 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5528 tree lhs, tree rhs)
5529 {
5530 /* If this is the "or" of two comparisons, we can do something if
5531 the comparisons are NE_EXPR. If this is the "and", we can do something
5532 if the comparisons are EQ_EXPR. I.e.,
5533 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5534
5535 WANTED_CODE is this operation code. For single bit fields, we can
5536 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5537 comparison for one-bit fields. */
5538
5539 enum tree_code wanted_code;
5540 enum tree_code lcode, rcode;
5541 tree ll_arg, lr_arg, rl_arg, rr_arg;
5542 tree ll_inner, lr_inner, rl_inner, rr_inner;
5543 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5544 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5545 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5546 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5547 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5548 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5549 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5550 scalar_int_mode lnmode, rnmode;
5551 tree ll_mask, lr_mask, rl_mask, rr_mask;
5552 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5553 tree l_const, r_const;
5554 tree lntype, rntype, result;
5555 HOST_WIDE_INT first_bit, end_bit;
5556 int volatilep;
5557
5558 /* Start by getting the comparison codes. Fail if anything is volatile.
5559 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5560 it were surrounded with a NE_EXPR. */
5561
5562 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5563 return 0;
5564
5565 lcode = TREE_CODE (lhs);
5566 rcode = TREE_CODE (rhs);
5567
5568 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5569 {
5570 lhs = build2 (NE_EXPR, truth_type, lhs,
5571 build_int_cst (TREE_TYPE (lhs), 0));
5572 lcode = NE_EXPR;
5573 }
5574
5575 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5576 {
5577 rhs = build2 (NE_EXPR, truth_type, rhs,
5578 build_int_cst (TREE_TYPE (rhs), 0));
5579 rcode = NE_EXPR;
5580 }
5581
5582 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5583 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5584 return 0;
5585
5586 ll_arg = TREE_OPERAND (lhs, 0);
5587 lr_arg = TREE_OPERAND (lhs, 1);
5588 rl_arg = TREE_OPERAND (rhs, 0);
5589 rr_arg = TREE_OPERAND (rhs, 1);
5590
5591 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5592 if (simple_operand_p (ll_arg)
5593 && simple_operand_p (lr_arg))
5594 {
5595 if (operand_equal_p (ll_arg, rl_arg, 0)
5596 && operand_equal_p (lr_arg, rr_arg, 0))
5597 {
5598 result = combine_comparisons (loc, code, lcode, rcode,
5599 truth_type, ll_arg, lr_arg);
5600 if (result)
5601 return result;
5602 }
5603 else if (operand_equal_p (ll_arg, rr_arg, 0)
5604 && operand_equal_p (lr_arg, rl_arg, 0))
5605 {
5606 result = combine_comparisons (loc, code, lcode,
5607 swap_tree_comparison (rcode),
5608 truth_type, ll_arg, lr_arg);
5609 if (result)
5610 return result;
5611 }
5612 }
5613
5614 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5615 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5616
5617 /* If the RHS can be evaluated unconditionally and its operands are
5618 simple, it wins to evaluate the RHS unconditionally on machines
5619 with expensive branches. In this case, this isn't a comparison
5620 that can be merged. */
5621
5622 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5623 false) >= 2
5624 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5625 && simple_operand_p (rl_arg)
5626 && simple_operand_p (rr_arg))
5627 {
5628 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5629 if (code == TRUTH_OR_EXPR
5630 && lcode == NE_EXPR && integer_zerop (lr_arg)
5631 && rcode == NE_EXPR && integer_zerop (rr_arg)
5632 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5633 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5634 return build2_loc (loc, NE_EXPR, truth_type,
5635 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5636 ll_arg, rl_arg),
5637 build_int_cst (TREE_TYPE (ll_arg), 0));
5638
5639 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5640 if (code == TRUTH_AND_EXPR
5641 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5642 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5643 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5644 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5645 return build2_loc (loc, EQ_EXPR, truth_type,
5646 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5647 ll_arg, rl_arg),
5648 build_int_cst (TREE_TYPE (ll_arg), 0));
5649 }
5650
5651 /* See if the comparisons can be merged. Then get all the parameters for
5652 each side. */
5653
5654 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5655 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5656 return 0;
5657
5658 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5659 volatilep = 0;
5660 ll_inner = decode_field_reference (loc, &ll_arg,
5661 &ll_bitsize, &ll_bitpos, &ll_mode,
5662 &ll_unsignedp, &ll_reversep, &volatilep,
5663 &ll_mask, &ll_and_mask);
5664 lr_inner = decode_field_reference (loc, &lr_arg,
5665 &lr_bitsize, &lr_bitpos, &lr_mode,
5666 &lr_unsignedp, &lr_reversep, &volatilep,
5667 &lr_mask, &lr_and_mask);
5668 rl_inner = decode_field_reference (loc, &rl_arg,
5669 &rl_bitsize, &rl_bitpos, &rl_mode,
5670 &rl_unsignedp, &rl_reversep, &volatilep,
5671 &rl_mask, &rl_and_mask);
5672 rr_inner = decode_field_reference (loc, &rr_arg,
5673 &rr_bitsize, &rr_bitpos, &rr_mode,
5674 &rr_unsignedp, &rr_reversep, &volatilep,
5675 &rr_mask, &rr_and_mask);
5676
5677 /* It must be true that the inner operation on the lhs of each
5678 comparison must be the same if we are to be able to do anything.
5679 Then see if we have constants. If not, the same must be true for
5680 the rhs's. */
5681 if (volatilep
5682 || ll_reversep != rl_reversep
5683 || ll_inner == 0 || rl_inner == 0
5684 || ! operand_equal_p (ll_inner, rl_inner, 0))
5685 return 0;
5686
5687 if (TREE_CODE (lr_arg) == INTEGER_CST
5688 && TREE_CODE (rr_arg) == INTEGER_CST)
5689 {
5690 l_const = lr_arg, r_const = rr_arg;
5691 lr_reversep = ll_reversep;
5692 }
5693 else if (lr_reversep != rr_reversep
5694 || lr_inner == 0 || rr_inner == 0
5695 || ! operand_equal_p (lr_inner, rr_inner, 0))
5696 return 0;
5697 else
5698 l_const = r_const = 0;
5699
5700 /* If either comparison code is not correct for our logical operation,
5701 fail. However, we can convert a one-bit comparison against zero into
5702 the opposite comparison against that bit being set in the field. */
5703
5704 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5705 if (lcode != wanted_code)
5706 {
5707 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5708 {
5709 /* Make the left operand unsigned, since we are only interested
5710 in the value of one bit. Otherwise we are doing the wrong
5711 thing below. */
5712 ll_unsignedp = 1;
5713 l_const = ll_mask;
5714 }
5715 else
5716 return 0;
5717 }
5718
5719 /* This is analogous to the code for l_const above. */
5720 if (rcode != wanted_code)
5721 {
5722 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5723 {
5724 rl_unsignedp = 1;
5725 r_const = rl_mask;
5726 }
5727 else
5728 return 0;
5729 }
5730
5731 /* See if we can find a mode that contains both fields being compared on
5732 the left. If we can't, fail. Otherwise, update all constants and masks
5733 to be relative to a field of that size. */
5734 first_bit = MIN (ll_bitpos, rl_bitpos);
5735 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5736 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5737 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5738 volatilep, &lnmode))
5739 return 0;
5740
5741 lnbitsize = GET_MODE_BITSIZE (lnmode);
5742 lnbitpos = first_bit & ~ (lnbitsize - 1);
5743 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5744 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5745
5746 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5747 {
5748 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5749 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5750 }
5751
5752 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5753 size_int (xll_bitpos));
5754 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5755 size_int (xrl_bitpos));
5756
5757 if (l_const)
5758 {
5759 l_const = fold_convert_loc (loc, lntype, l_const);
5760 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5761 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5762 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5763 fold_build1_loc (loc, BIT_NOT_EXPR,
5764 lntype, ll_mask))))
5765 {
5766 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5767
5768 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5769 }
5770 }
5771 if (r_const)
5772 {
5773 r_const = fold_convert_loc (loc, lntype, r_const);
5774 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5775 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5776 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5777 fold_build1_loc (loc, BIT_NOT_EXPR,
5778 lntype, rl_mask))))
5779 {
5780 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5781
5782 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5783 }
5784 }
5785
5786 /* If the right sides are not constant, do the same for it. Also,
5787 disallow this optimization if a size or signedness mismatch occurs
5788 between the left and right sides. */
5789 if (l_const == 0)
5790 {
5791 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5792 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5793 /* Make sure the two fields on the right
5794 correspond to the left without being swapped. */
5795 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5796 return 0;
5797
5798 first_bit = MIN (lr_bitpos, rr_bitpos);
5799 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5800 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5801 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5802 volatilep, &rnmode))
5803 return 0;
5804
5805 rnbitsize = GET_MODE_BITSIZE (rnmode);
5806 rnbitpos = first_bit & ~ (rnbitsize - 1);
5807 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5808 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5809
5810 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5811 {
5812 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5813 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5814 }
5815
5816 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5817 rntype, lr_mask),
5818 size_int (xlr_bitpos));
5819 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5820 rntype, rr_mask),
5821 size_int (xrr_bitpos));
5822
5823 /* Make a mask that corresponds to both fields being compared.
5824 Do this for both items being compared. If the operands are the
5825 same size and the bits being compared are in the same position
5826 then we can do this by masking both and comparing the masked
5827 results. */
5828 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5829 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5830 if (lnbitsize == rnbitsize
5831 && xll_bitpos == xlr_bitpos
5832 && lnbitpos >= 0
5833 && rnbitpos >= 0)
5834 {
5835 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5836 lntype, lnbitsize, lnbitpos,
5837 ll_unsignedp || rl_unsignedp, ll_reversep);
5838 if (! all_ones_mask_p (ll_mask, lnbitsize))
5839 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5840
5841 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5842 rntype, rnbitsize, rnbitpos,
5843 lr_unsignedp || rr_unsignedp, lr_reversep);
5844 if (! all_ones_mask_p (lr_mask, rnbitsize))
5845 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5846
5847 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5848 }
5849
5850 /* There is still another way we can do something: If both pairs of
5851 fields being compared are adjacent, we may be able to make a wider
5852 field containing them both.
5853
5854 Note that we still must mask the lhs/rhs expressions. Furthermore,
5855 the mask must be shifted to account for the shift done by
5856 make_bit_field_ref. */
5857 if (((ll_bitsize + ll_bitpos == rl_bitpos
5858 && lr_bitsize + lr_bitpos == rr_bitpos)
5859 || (ll_bitpos == rl_bitpos + rl_bitsize
5860 && lr_bitpos == rr_bitpos + rr_bitsize))
5861 && ll_bitpos >= 0
5862 && rl_bitpos >= 0
5863 && lr_bitpos >= 0
5864 && rr_bitpos >= 0)
5865 {
5866 tree type;
5867
5868 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5869 ll_bitsize + rl_bitsize,
5870 MIN (ll_bitpos, rl_bitpos),
5871 ll_unsignedp, ll_reversep);
5872 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5873 lr_bitsize + rr_bitsize,
5874 MIN (lr_bitpos, rr_bitpos),
5875 lr_unsignedp, lr_reversep);
5876
5877 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5878 size_int (MIN (xll_bitpos, xrl_bitpos)));
5879 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5880 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5881
5882 /* Convert to the smaller type before masking out unwanted bits. */
5883 type = lntype;
5884 if (lntype != rntype)
5885 {
5886 if (lnbitsize > rnbitsize)
5887 {
5888 lhs = fold_convert_loc (loc, rntype, lhs);
5889 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5890 type = rntype;
5891 }
5892 else if (lnbitsize < rnbitsize)
5893 {
5894 rhs = fold_convert_loc (loc, lntype, rhs);
5895 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5896 type = lntype;
5897 }
5898 }
5899
5900 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5901 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5902
5903 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5904 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5905
5906 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5907 }
5908
5909 return 0;
5910 }
5911
5912 /* Handle the case of comparisons with constants. If there is something in
5913 common between the masks, those bits of the constants must be the same.
5914 If not, the condition is always false. Test for this to avoid generating
5915 incorrect code below. */
5916 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5917 if (! integer_zerop (result)
5918 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5919 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5920 {
5921 if (wanted_code == NE_EXPR)
5922 {
5923 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5924 return constant_boolean_node (true, truth_type);
5925 }
5926 else
5927 {
5928 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5929 return constant_boolean_node (false, truth_type);
5930 }
5931 }
5932
5933 if (lnbitpos < 0)
5934 return 0;
5935
5936 /* Construct the expression we will return. First get the component
5937 reference we will make. Unless the mask is all ones the width of
5938 that field, perform the mask operation. Then compare with the
5939 merged constant. */
5940 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5941 lntype, lnbitsize, lnbitpos,
5942 ll_unsignedp || rl_unsignedp, ll_reversep);
5943
5944 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5945 if (! all_ones_mask_p (ll_mask, lnbitsize))
5946 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5947
5948 return build2_loc (loc, wanted_code, truth_type, result,
5949 const_binop (BIT_IOR_EXPR, l_const, r_const));
5950 }
5951 \f
5952 /* T is an integer expression that is being multiplied, divided, or taken a
5953 modulus (CODE says which and what kind of divide or modulus) by a
5954 constant C. See if we can eliminate that operation by folding it with
5955 other operations already in T. WIDE_TYPE, if non-null, is a type that
5956 should be used for the computation if wider than our type.
5957
5958 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5959 (X * 2) + (Y * 4). We must, however, be assured that either the original
5960 expression would not overflow or that overflow is undefined for the type
5961 in the language in question.
5962
5963 If we return a non-null expression, it is an equivalent form of the
5964 original computation, but need not be in the original type.
5965
5966 We set *STRICT_OVERFLOW_P to true if the return values depends on
5967 signed overflow being undefined. Otherwise we do not change
5968 *STRICT_OVERFLOW_P. */
5969
5970 static tree
5971 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5972 bool *strict_overflow_p)
5973 {
5974 /* To avoid exponential search depth, refuse to allow recursion past
5975 three levels. Beyond that (1) it's highly unlikely that we'll find
5976 something interesting and (2) we've probably processed it before
5977 when we built the inner expression. */
5978
5979 static int depth;
5980 tree ret;
5981
5982 if (depth > 3)
5983 return NULL;
5984
5985 depth++;
5986 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5987 depth--;
5988
5989 return ret;
5990 }
5991
5992 static tree
5993 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5994 bool *strict_overflow_p)
5995 {
5996 tree type = TREE_TYPE (t);
5997 enum tree_code tcode = TREE_CODE (t);
5998 tree ctype = (wide_type != 0
5999 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6000 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6001 ? wide_type : type);
6002 tree t1, t2;
6003 int same_p = tcode == code;
6004 tree op0 = NULL_TREE, op1 = NULL_TREE;
6005 bool sub_strict_overflow_p;
6006
6007 /* Don't deal with constants of zero here; they confuse the code below. */
6008 if (integer_zerop (c))
6009 return NULL_TREE;
6010
6011 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6012 op0 = TREE_OPERAND (t, 0);
6013
6014 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6015 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6016
6017 /* Note that we need not handle conditional operations here since fold
6018 already handles those cases. So just do arithmetic here. */
6019 switch (tcode)
6020 {
6021 case INTEGER_CST:
6022 /* For a constant, we can always simplify if we are a multiply
6023 or (for divide and modulus) if it is a multiple of our constant. */
6024 if (code == MULT_EXPR
6025 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6026 TYPE_SIGN (type)))
6027 {
6028 tree tem = const_binop (code, fold_convert (ctype, t),
6029 fold_convert (ctype, c));
6030 /* If the multiplication overflowed, we lost information on it.
6031 See PR68142 and PR69845. */
6032 if (TREE_OVERFLOW (tem))
6033 return NULL_TREE;
6034 return tem;
6035 }
6036 break;
6037
6038 CASE_CONVERT: case NON_LVALUE_EXPR:
6039 /* If op0 is an expression ... */
6040 if ((COMPARISON_CLASS_P (op0)
6041 || UNARY_CLASS_P (op0)
6042 || BINARY_CLASS_P (op0)
6043 || VL_EXP_CLASS_P (op0)
6044 || EXPRESSION_CLASS_P (op0))
6045 /* ... and has wrapping overflow, and its type is smaller
6046 than ctype, then we cannot pass through as widening. */
6047 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6048 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6049 && (TYPE_PRECISION (ctype)
6050 > TYPE_PRECISION (TREE_TYPE (op0))))
6051 /* ... or this is a truncation (t is narrower than op0),
6052 then we cannot pass through this narrowing. */
6053 || (TYPE_PRECISION (type)
6054 < TYPE_PRECISION (TREE_TYPE (op0)))
6055 /* ... or signedness changes for division or modulus,
6056 then we cannot pass through this conversion. */
6057 || (code != MULT_EXPR
6058 && (TYPE_UNSIGNED (ctype)
6059 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6060 /* ... or has undefined overflow while the converted to
6061 type has not, we cannot do the operation in the inner type
6062 as that would introduce undefined overflow. */
6063 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6064 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6065 && !TYPE_OVERFLOW_UNDEFINED (type))))
6066 break;
6067
6068 /* Pass the constant down and see if we can make a simplification. If
6069 we can, replace this expression with the inner simplification for
6070 possible later conversion to our or some other type. */
6071 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6072 && TREE_CODE (t2) == INTEGER_CST
6073 && !TREE_OVERFLOW (t2)
6074 && (0 != (t1 = extract_muldiv (op0, t2, code,
6075 code == MULT_EXPR
6076 ? ctype : NULL_TREE,
6077 strict_overflow_p))))
6078 return t1;
6079 break;
6080
6081 case ABS_EXPR:
6082 /* If widening the type changes it from signed to unsigned, then we
6083 must avoid building ABS_EXPR itself as unsigned. */
6084 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6085 {
6086 tree cstype = (*signed_type_for) (ctype);
6087 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6088 != 0)
6089 {
6090 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6091 return fold_convert (ctype, t1);
6092 }
6093 break;
6094 }
6095 /* If the constant is negative, we cannot simplify this. */
6096 if (tree_int_cst_sgn (c) == -1)
6097 break;
6098 /* FALLTHROUGH */
6099 case NEGATE_EXPR:
6100 /* For division and modulus, type can't be unsigned, as e.g.
6101 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6102 For signed types, even with wrapping overflow, this is fine. */
6103 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6104 break;
6105 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6106 != 0)
6107 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6108 break;
6109
6110 case MIN_EXPR: case MAX_EXPR:
6111 /* If widening the type changes the signedness, then we can't perform
6112 this optimization as that changes the result. */
6113 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6114 break;
6115
6116 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6117 sub_strict_overflow_p = false;
6118 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6119 &sub_strict_overflow_p)) != 0
6120 && (t2 = extract_muldiv (op1, c, code, wide_type,
6121 &sub_strict_overflow_p)) != 0)
6122 {
6123 if (tree_int_cst_sgn (c) < 0)
6124 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6125 if (sub_strict_overflow_p)
6126 *strict_overflow_p = true;
6127 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6128 fold_convert (ctype, t2));
6129 }
6130 break;
6131
6132 case LSHIFT_EXPR: case RSHIFT_EXPR:
6133 /* If the second operand is constant, this is a multiplication
6134 or floor division, by a power of two, so we can treat it that
6135 way unless the multiplier or divisor overflows. Signed
6136 left-shift overflow is implementation-defined rather than
6137 undefined in C90, so do not convert signed left shift into
6138 multiplication. */
6139 if (TREE_CODE (op1) == INTEGER_CST
6140 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6141 /* const_binop may not detect overflow correctly,
6142 so check for it explicitly here. */
6143 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6144 wi::to_wide (op1))
6145 && 0 != (t1 = fold_convert (ctype,
6146 const_binop (LSHIFT_EXPR,
6147 size_one_node,
6148 op1)))
6149 && !TREE_OVERFLOW (t1))
6150 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6151 ? MULT_EXPR : FLOOR_DIV_EXPR,
6152 ctype,
6153 fold_convert (ctype, op0),
6154 t1),
6155 c, code, wide_type, strict_overflow_p);
6156 break;
6157
6158 case PLUS_EXPR: case MINUS_EXPR:
6159 /* See if we can eliminate the operation on both sides. If we can, we
6160 can return a new PLUS or MINUS. If we can't, the only remaining
6161 cases where we can do anything are if the second operand is a
6162 constant. */
6163 sub_strict_overflow_p = false;
6164 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6165 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6166 if (t1 != 0 && t2 != 0
6167 && TYPE_OVERFLOW_WRAPS (ctype)
6168 && (code == MULT_EXPR
6169 /* If not multiplication, we can only do this if both operands
6170 are divisible by c. */
6171 || (multiple_of_p (ctype, op0, c)
6172 && multiple_of_p (ctype, op1, c))))
6173 {
6174 if (sub_strict_overflow_p)
6175 *strict_overflow_p = true;
6176 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6177 fold_convert (ctype, t2));
6178 }
6179
6180 /* If this was a subtraction, negate OP1 and set it to be an addition.
6181 This simplifies the logic below. */
6182 if (tcode == MINUS_EXPR)
6183 {
6184 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6185 /* If OP1 was not easily negatable, the constant may be OP0. */
6186 if (TREE_CODE (op0) == INTEGER_CST)
6187 {
6188 std::swap (op0, op1);
6189 std::swap (t1, t2);
6190 }
6191 }
6192
6193 if (TREE_CODE (op1) != INTEGER_CST)
6194 break;
6195
6196 /* If either OP1 or C are negative, this optimization is not safe for
6197 some of the division and remainder types while for others we need
6198 to change the code. */
6199 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6200 {
6201 if (code == CEIL_DIV_EXPR)
6202 code = FLOOR_DIV_EXPR;
6203 else if (code == FLOOR_DIV_EXPR)
6204 code = CEIL_DIV_EXPR;
6205 else if (code != MULT_EXPR
6206 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6207 break;
6208 }
6209
6210 /* If it's a multiply or a division/modulus operation of a multiple
6211 of our constant, do the operation and verify it doesn't overflow. */
6212 if (code == MULT_EXPR
6213 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6214 TYPE_SIGN (type)))
6215 {
6216 op1 = const_binop (code, fold_convert (ctype, op1),
6217 fold_convert (ctype, c));
6218 /* We allow the constant to overflow with wrapping semantics. */
6219 if (op1 == 0
6220 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6221 break;
6222 }
6223 else
6224 break;
6225
6226 /* If we have an unsigned type, we cannot widen the operation since it
6227 will change the result if the original computation overflowed. */
6228 if (TYPE_UNSIGNED (ctype) && ctype != type)
6229 break;
6230
6231 /* The last case is if we are a multiply. In that case, we can
6232 apply the distributive law to commute the multiply and addition
6233 if the multiplication of the constants doesn't overflow
6234 and overflow is defined. With undefined overflow
6235 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6236 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6237 return fold_build2 (tcode, ctype,
6238 fold_build2 (code, ctype,
6239 fold_convert (ctype, op0),
6240 fold_convert (ctype, c)),
6241 op1);
6242
6243 break;
6244
6245 case MULT_EXPR:
6246 /* We have a special case here if we are doing something like
6247 (C * 8) % 4 since we know that's zero. */
6248 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6249 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6250 /* If the multiplication can overflow we cannot optimize this. */
6251 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6252 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6253 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6254 TYPE_SIGN (type)))
6255 {
6256 *strict_overflow_p = true;
6257 return omit_one_operand (type, integer_zero_node, op0);
6258 }
6259
6260 /* ... fall through ... */
6261
6262 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6263 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6264 /* If we can extract our operation from the LHS, do so and return a
6265 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6266 do something only if the second operand is a constant. */
6267 if (same_p
6268 && TYPE_OVERFLOW_WRAPS (ctype)
6269 && (t1 = extract_muldiv (op0, c, code, wide_type,
6270 strict_overflow_p)) != 0)
6271 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6272 fold_convert (ctype, op1));
6273 else if (tcode == MULT_EXPR && code == MULT_EXPR
6274 && TYPE_OVERFLOW_WRAPS (ctype)
6275 && (t1 = extract_muldiv (op1, c, code, wide_type,
6276 strict_overflow_p)) != 0)
6277 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6278 fold_convert (ctype, t1));
6279 else if (TREE_CODE (op1) != INTEGER_CST)
6280 return 0;
6281
6282 /* If these are the same operation types, we can associate them
6283 assuming no overflow. */
6284 if (tcode == code)
6285 {
6286 bool overflow_p = false;
6287 bool overflow_mul_p;
6288 signop sign = TYPE_SIGN (ctype);
6289 unsigned prec = TYPE_PRECISION (ctype);
6290 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6291 wi::to_wide (c, prec),
6292 sign, &overflow_mul_p);
6293 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6294 if (overflow_mul_p
6295 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6296 overflow_p = true;
6297 if (!overflow_p)
6298 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6299 wide_int_to_tree (ctype, mul));
6300 }
6301
6302 /* If these operations "cancel" each other, we have the main
6303 optimizations of this pass, which occur when either constant is a
6304 multiple of the other, in which case we replace this with either an
6305 operation or CODE or TCODE.
6306
6307 If we have an unsigned type, we cannot do this since it will change
6308 the result if the original computation overflowed. */
6309 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6310 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6311 || (tcode == MULT_EXPR
6312 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6313 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6314 && code != MULT_EXPR)))
6315 {
6316 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6317 TYPE_SIGN (type)))
6318 {
6319 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6320 *strict_overflow_p = true;
6321 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6322 fold_convert (ctype,
6323 const_binop (TRUNC_DIV_EXPR,
6324 op1, c)));
6325 }
6326 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6327 TYPE_SIGN (type)))
6328 {
6329 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6330 *strict_overflow_p = true;
6331 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6332 fold_convert (ctype,
6333 const_binop (TRUNC_DIV_EXPR,
6334 c, op1)));
6335 }
6336 }
6337 break;
6338
6339 default:
6340 break;
6341 }
6342
6343 return 0;
6344 }
6345 \f
6346 /* Return a node which has the indicated constant VALUE (either 0 or
6347 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6348 and is of the indicated TYPE. */
6349
6350 tree
6351 constant_boolean_node (bool value, tree type)
6352 {
6353 if (type == integer_type_node)
6354 return value ? integer_one_node : integer_zero_node;
6355 else if (type == boolean_type_node)
6356 return value ? boolean_true_node : boolean_false_node;
6357 else if (TREE_CODE (type) == VECTOR_TYPE)
6358 return build_vector_from_val (type,
6359 build_int_cst (TREE_TYPE (type),
6360 value ? -1 : 0));
6361 else
6362 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6363 }
6364
6365
6366 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6367 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6368 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6369 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6370 COND is the first argument to CODE; otherwise (as in the example
6371 given here), it is the second argument. TYPE is the type of the
6372 original expression. Return NULL_TREE if no simplification is
6373 possible. */
6374
6375 static tree
6376 fold_binary_op_with_conditional_arg (location_t loc,
6377 enum tree_code code,
6378 tree type, tree op0, tree op1,
6379 tree cond, tree arg, int cond_first_p)
6380 {
6381 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6382 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6383 tree test, true_value, false_value;
6384 tree lhs = NULL_TREE;
6385 tree rhs = NULL_TREE;
6386 enum tree_code cond_code = COND_EXPR;
6387
6388 if (TREE_CODE (cond) == COND_EXPR
6389 || TREE_CODE (cond) == VEC_COND_EXPR)
6390 {
6391 test = TREE_OPERAND (cond, 0);
6392 true_value = TREE_OPERAND (cond, 1);
6393 false_value = TREE_OPERAND (cond, 2);
6394 /* If this operand throws an expression, then it does not make
6395 sense to try to perform a logical or arithmetic operation
6396 involving it. */
6397 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6398 lhs = true_value;
6399 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6400 rhs = false_value;
6401 }
6402 else if (!(TREE_CODE (type) != VECTOR_TYPE
6403 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6404 {
6405 tree testtype = TREE_TYPE (cond);
6406 test = cond;
6407 true_value = constant_boolean_node (true, testtype);
6408 false_value = constant_boolean_node (false, testtype);
6409 }
6410 else
6411 /* Detect the case of mixing vector and scalar types - bail out. */
6412 return NULL_TREE;
6413
6414 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6415 cond_code = VEC_COND_EXPR;
6416
6417 /* This transformation is only worthwhile if we don't have to wrap ARG
6418 in a SAVE_EXPR and the operation can be simplified without recursing
6419 on at least one of the branches once its pushed inside the COND_EXPR. */
6420 if (!TREE_CONSTANT (arg)
6421 && (TREE_SIDE_EFFECTS (arg)
6422 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6423 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6424 return NULL_TREE;
6425
6426 arg = fold_convert_loc (loc, arg_type, arg);
6427 if (lhs == 0)
6428 {
6429 true_value = fold_convert_loc (loc, cond_type, true_value);
6430 if (cond_first_p)
6431 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6432 else
6433 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6434 }
6435 if (rhs == 0)
6436 {
6437 false_value = fold_convert_loc (loc, cond_type, false_value);
6438 if (cond_first_p)
6439 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6440 else
6441 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6442 }
6443
6444 /* Check that we have simplified at least one of the branches. */
6445 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6446 return NULL_TREE;
6447
6448 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6449 }
6450
6451 \f
6452 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6453
6454 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6455 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6456 ADDEND is the same as X.
6457
6458 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6459 and finite. The problematic cases are when X is zero, and its mode
6460 has signed zeros. In the case of rounding towards -infinity,
6461 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6462 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6463
6464 bool
6465 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6466 {
6467 if (!real_zerop (addend))
6468 return false;
6469
6470 /* Don't allow the fold with -fsignaling-nans. */
6471 if (HONOR_SNANS (element_mode (type)))
6472 return false;
6473
6474 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6475 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6476 return true;
6477
6478 /* In a vector or complex, we would need to check the sign of all zeros. */
6479 if (TREE_CODE (addend) != REAL_CST)
6480 return false;
6481
6482 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6483 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6484 negate = !negate;
6485
6486 /* The mode has signed zeros, and we have to honor their sign.
6487 In this situation, there is only one case we can return true for.
6488 X - 0 is the same as X unless rounding towards -infinity is
6489 supported. */
6490 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6491 }
6492
6493 /* Subroutine of match.pd that optimizes comparisons of a division by
6494 a nonzero integer constant against an integer constant, i.e.
6495 X/C1 op C2.
6496
6497 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6498 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6499
6500 enum tree_code
6501 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6502 tree *hi, bool *neg_overflow)
6503 {
6504 tree prod, tmp, type = TREE_TYPE (c1);
6505 signop sign = TYPE_SIGN (type);
6506 bool overflow;
6507
6508 /* We have to do this the hard way to detect unsigned overflow.
6509 prod = int_const_binop (MULT_EXPR, c1, c2); */
6510 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6511 prod = force_fit_type (type, val, -1, overflow);
6512 *neg_overflow = false;
6513
6514 if (sign == UNSIGNED)
6515 {
6516 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6517 *lo = prod;
6518
6519 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6520 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6521 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6522 }
6523 else if (tree_int_cst_sgn (c1) >= 0)
6524 {
6525 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6526 switch (tree_int_cst_sgn (c2))
6527 {
6528 case -1:
6529 *neg_overflow = true;
6530 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6531 *hi = prod;
6532 break;
6533
6534 case 0:
6535 *lo = fold_negate_const (tmp, type);
6536 *hi = tmp;
6537 break;
6538
6539 case 1:
6540 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6541 *lo = prod;
6542 break;
6543
6544 default:
6545 gcc_unreachable ();
6546 }
6547 }
6548 else
6549 {
6550 /* A negative divisor reverses the relational operators. */
6551 code = swap_tree_comparison (code);
6552
6553 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6554 switch (tree_int_cst_sgn (c2))
6555 {
6556 case -1:
6557 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6558 *lo = prod;
6559 break;
6560
6561 case 0:
6562 *hi = fold_negate_const (tmp, type);
6563 *lo = tmp;
6564 break;
6565
6566 case 1:
6567 *neg_overflow = true;
6568 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6569 *hi = prod;
6570 break;
6571
6572 default:
6573 gcc_unreachable ();
6574 }
6575 }
6576
6577 if (code != EQ_EXPR && code != NE_EXPR)
6578 return code;
6579
6580 if (TREE_OVERFLOW (*lo)
6581 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6582 *lo = NULL_TREE;
6583 if (TREE_OVERFLOW (*hi)
6584 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6585 *hi = NULL_TREE;
6586
6587 return code;
6588 }
6589
6590
6591 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6592 equality/inequality test, then return a simplified form of the test
6593 using a sign testing. Otherwise return NULL. TYPE is the desired
6594 result type. */
6595
6596 static tree
6597 fold_single_bit_test_into_sign_test (location_t loc,
6598 enum tree_code code, tree arg0, tree arg1,
6599 tree result_type)
6600 {
6601 /* If this is testing a single bit, we can optimize the test. */
6602 if ((code == NE_EXPR || code == EQ_EXPR)
6603 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6604 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6605 {
6606 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6607 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6608 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6609
6610 if (arg00 != NULL_TREE
6611 /* This is only a win if casting to a signed type is cheap,
6612 i.e. when arg00's type is not a partial mode. */
6613 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6614 {
6615 tree stype = signed_type_for (TREE_TYPE (arg00));
6616 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6617 result_type,
6618 fold_convert_loc (loc, stype, arg00),
6619 build_int_cst (stype, 0));
6620 }
6621 }
6622
6623 return NULL_TREE;
6624 }
6625
6626 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6627 equality/inequality test, then return a simplified form of
6628 the test using shifts and logical operations. Otherwise return
6629 NULL. TYPE is the desired result type. */
6630
6631 tree
6632 fold_single_bit_test (location_t loc, enum tree_code code,
6633 tree arg0, tree arg1, tree result_type)
6634 {
6635 /* If this is testing a single bit, we can optimize the test. */
6636 if ((code == NE_EXPR || code == EQ_EXPR)
6637 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6638 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6639 {
6640 tree inner = TREE_OPERAND (arg0, 0);
6641 tree type = TREE_TYPE (arg0);
6642 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6643 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6644 int ops_unsigned;
6645 tree signed_type, unsigned_type, intermediate_type;
6646 tree tem, one;
6647
6648 /* First, see if we can fold the single bit test into a sign-bit
6649 test. */
6650 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6651 result_type);
6652 if (tem)
6653 return tem;
6654
6655 /* Otherwise we have (A & C) != 0 where C is a single bit,
6656 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6657 Similarly for (A & C) == 0. */
6658
6659 /* If INNER is a right shift of a constant and it plus BITNUM does
6660 not overflow, adjust BITNUM and INNER. */
6661 if (TREE_CODE (inner) == RSHIFT_EXPR
6662 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6663 && bitnum < TYPE_PRECISION (type)
6664 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6665 TYPE_PRECISION (type) - bitnum))
6666 {
6667 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6668 inner = TREE_OPERAND (inner, 0);
6669 }
6670
6671 /* If we are going to be able to omit the AND below, we must do our
6672 operations as unsigned. If we must use the AND, we have a choice.
6673 Normally unsigned is faster, but for some machines signed is. */
6674 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6675 && !flag_syntax_only) ? 0 : 1;
6676
6677 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6678 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6679 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6680 inner = fold_convert_loc (loc, intermediate_type, inner);
6681
6682 if (bitnum != 0)
6683 inner = build2 (RSHIFT_EXPR, intermediate_type,
6684 inner, size_int (bitnum));
6685
6686 one = build_int_cst (intermediate_type, 1);
6687
6688 if (code == EQ_EXPR)
6689 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6690
6691 /* Put the AND last so it can combine with more things. */
6692 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6693
6694 /* Make sure to return the proper type. */
6695 inner = fold_convert_loc (loc, result_type, inner);
6696
6697 return inner;
6698 }
6699 return NULL_TREE;
6700 }
6701
6702 /* Test whether it is preferable two swap two operands, ARG0 and
6703 ARG1, for example because ARG0 is an integer constant and ARG1
6704 isn't. */
6705
6706 bool
6707 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6708 {
6709 if (CONSTANT_CLASS_P (arg1))
6710 return 0;
6711 if (CONSTANT_CLASS_P (arg0))
6712 return 1;
6713
6714 STRIP_NOPS (arg0);
6715 STRIP_NOPS (arg1);
6716
6717 if (TREE_CONSTANT (arg1))
6718 return 0;
6719 if (TREE_CONSTANT (arg0))
6720 return 1;
6721
6722 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6723 for commutative and comparison operators. Ensuring a canonical
6724 form allows the optimizers to find additional redundancies without
6725 having to explicitly check for both orderings. */
6726 if (TREE_CODE (arg0) == SSA_NAME
6727 && TREE_CODE (arg1) == SSA_NAME
6728 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6729 return 1;
6730
6731 /* Put SSA_NAMEs last. */
6732 if (TREE_CODE (arg1) == SSA_NAME)
6733 return 0;
6734 if (TREE_CODE (arg0) == SSA_NAME)
6735 return 1;
6736
6737 /* Put variables last. */
6738 if (DECL_P (arg1))
6739 return 0;
6740 if (DECL_P (arg0))
6741 return 1;
6742
6743 return 0;
6744 }
6745
6746
6747 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6748 means A >= Y && A != MAX, but in this case we know that
6749 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6750
6751 static tree
6752 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6753 {
6754 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6755
6756 if (TREE_CODE (bound) == LT_EXPR)
6757 a = TREE_OPERAND (bound, 0);
6758 else if (TREE_CODE (bound) == GT_EXPR)
6759 a = TREE_OPERAND (bound, 1);
6760 else
6761 return NULL_TREE;
6762
6763 typea = TREE_TYPE (a);
6764 if (!INTEGRAL_TYPE_P (typea)
6765 && !POINTER_TYPE_P (typea))
6766 return NULL_TREE;
6767
6768 if (TREE_CODE (ineq) == LT_EXPR)
6769 {
6770 a1 = TREE_OPERAND (ineq, 1);
6771 y = TREE_OPERAND (ineq, 0);
6772 }
6773 else if (TREE_CODE (ineq) == GT_EXPR)
6774 {
6775 a1 = TREE_OPERAND (ineq, 0);
6776 y = TREE_OPERAND (ineq, 1);
6777 }
6778 else
6779 return NULL_TREE;
6780
6781 if (TREE_TYPE (a1) != typea)
6782 return NULL_TREE;
6783
6784 if (POINTER_TYPE_P (typea))
6785 {
6786 /* Convert the pointer types into integer before taking the difference. */
6787 tree ta = fold_convert_loc (loc, ssizetype, a);
6788 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6789 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6790 }
6791 else
6792 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6793
6794 if (!diff || !integer_onep (diff))
6795 return NULL_TREE;
6796
6797 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6798 }
6799
6800 /* Fold a sum or difference of at least one multiplication.
6801 Returns the folded tree or NULL if no simplification could be made. */
6802
6803 static tree
6804 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6805 tree arg0, tree arg1)
6806 {
6807 tree arg00, arg01, arg10, arg11;
6808 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6809
6810 /* (A * C) +- (B * C) -> (A+-B) * C.
6811 (A * C) +- A -> A * (C+-1).
6812 We are most concerned about the case where C is a constant,
6813 but other combinations show up during loop reduction. Since
6814 it is not difficult, try all four possibilities. */
6815
6816 if (TREE_CODE (arg0) == MULT_EXPR)
6817 {
6818 arg00 = TREE_OPERAND (arg0, 0);
6819 arg01 = TREE_OPERAND (arg0, 1);
6820 }
6821 else if (TREE_CODE (arg0) == INTEGER_CST)
6822 {
6823 arg00 = build_one_cst (type);
6824 arg01 = arg0;
6825 }
6826 else
6827 {
6828 /* We cannot generate constant 1 for fract. */
6829 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6830 return NULL_TREE;
6831 arg00 = arg0;
6832 arg01 = build_one_cst (type);
6833 }
6834 if (TREE_CODE (arg1) == MULT_EXPR)
6835 {
6836 arg10 = TREE_OPERAND (arg1, 0);
6837 arg11 = TREE_OPERAND (arg1, 1);
6838 }
6839 else if (TREE_CODE (arg1) == INTEGER_CST)
6840 {
6841 arg10 = build_one_cst (type);
6842 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6843 the purpose of this canonicalization. */
6844 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6845 && negate_expr_p (arg1)
6846 && code == PLUS_EXPR)
6847 {
6848 arg11 = negate_expr (arg1);
6849 code = MINUS_EXPR;
6850 }
6851 else
6852 arg11 = arg1;
6853 }
6854 else
6855 {
6856 /* We cannot generate constant 1 for fract. */
6857 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6858 return NULL_TREE;
6859 arg10 = arg1;
6860 arg11 = build_one_cst (type);
6861 }
6862 same = NULL_TREE;
6863
6864 /* Prefer factoring a common non-constant. */
6865 if (operand_equal_p (arg00, arg10, 0))
6866 same = arg00, alt0 = arg01, alt1 = arg11;
6867 else if (operand_equal_p (arg01, arg11, 0))
6868 same = arg01, alt0 = arg00, alt1 = arg10;
6869 else if (operand_equal_p (arg00, arg11, 0))
6870 same = arg00, alt0 = arg01, alt1 = arg10;
6871 else if (operand_equal_p (arg01, arg10, 0))
6872 same = arg01, alt0 = arg00, alt1 = arg11;
6873
6874 /* No identical multiplicands; see if we can find a common
6875 power-of-two factor in non-power-of-two multiplies. This
6876 can help in multi-dimensional array access. */
6877 else if (tree_fits_shwi_p (arg01)
6878 && tree_fits_shwi_p (arg11))
6879 {
6880 HOST_WIDE_INT int01, int11, tmp;
6881 bool swap = false;
6882 tree maybe_same;
6883 int01 = tree_to_shwi (arg01);
6884 int11 = tree_to_shwi (arg11);
6885
6886 /* Move min of absolute values to int11. */
6887 if (absu_hwi (int01) < absu_hwi (int11))
6888 {
6889 tmp = int01, int01 = int11, int11 = tmp;
6890 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6891 maybe_same = arg01;
6892 swap = true;
6893 }
6894 else
6895 maybe_same = arg11;
6896
6897 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6898 /* The remainder should not be a constant, otherwise we
6899 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6900 increased the number of multiplications necessary. */
6901 && TREE_CODE (arg10) != INTEGER_CST)
6902 {
6903 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6904 build_int_cst (TREE_TYPE (arg00),
6905 int01 / int11));
6906 alt1 = arg10;
6907 same = maybe_same;
6908 if (swap)
6909 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6910 }
6911 }
6912
6913 if (!same)
6914 return NULL_TREE;
6915
6916 if (! INTEGRAL_TYPE_P (type)
6917 || TYPE_OVERFLOW_WRAPS (type)
6918 /* We are neither factoring zero nor minus one. */
6919 || TREE_CODE (same) == INTEGER_CST)
6920 return fold_build2_loc (loc, MULT_EXPR, type,
6921 fold_build2_loc (loc, code, type,
6922 fold_convert_loc (loc, type, alt0),
6923 fold_convert_loc (loc, type, alt1)),
6924 fold_convert_loc (loc, type, same));
6925
6926 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6927 same may be minus one and thus the multiplication may overflow. Perform
6928 the operations in an unsigned type. */
6929 tree utype = unsigned_type_for (type);
6930 tree tem = fold_build2_loc (loc, code, utype,
6931 fold_convert_loc (loc, utype, alt0),
6932 fold_convert_loc (loc, utype, alt1));
6933 /* If the sum evaluated to a constant that is not -INF the multiplication
6934 cannot overflow. */
6935 if (TREE_CODE (tem) == INTEGER_CST
6936 && (wi::to_wide (tem)
6937 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6938 return fold_build2_loc (loc, MULT_EXPR, type,
6939 fold_convert (type, tem), same);
6940
6941 return fold_convert_loc (loc, type,
6942 fold_build2_loc (loc, MULT_EXPR, utype, tem,
6943 fold_convert_loc (loc, utype, same)));
6944 }
6945
6946 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6947 specified by EXPR into the buffer PTR of length LEN bytes.
6948 Return the number of bytes placed in the buffer, or zero
6949 upon failure. */
6950
6951 static int
6952 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6953 {
6954 tree type = TREE_TYPE (expr);
6955 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
6956 int byte, offset, word, words;
6957 unsigned char value;
6958
6959 if ((off == -1 && total_bytes > len) || off >= total_bytes)
6960 return 0;
6961 if (off == -1)
6962 off = 0;
6963
6964 if (ptr == NULL)
6965 /* Dry run. */
6966 return MIN (len, total_bytes - off);
6967
6968 words = total_bytes / UNITS_PER_WORD;
6969
6970 for (byte = 0; byte < total_bytes; byte++)
6971 {
6972 int bitpos = byte * BITS_PER_UNIT;
6973 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6974 number of bytes. */
6975 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6976
6977 if (total_bytes > UNITS_PER_WORD)
6978 {
6979 word = byte / UNITS_PER_WORD;
6980 if (WORDS_BIG_ENDIAN)
6981 word = (words - 1) - word;
6982 offset = word * UNITS_PER_WORD;
6983 if (BYTES_BIG_ENDIAN)
6984 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6985 else
6986 offset += byte % UNITS_PER_WORD;
6987 }
6988 else
6989 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6990 if (offset >= off && offset - off < len)
6991 ptr[offset - off] = value;
6992 }
6993 return MIN (len, total_bytes - off);
6994 }
6995
6996
6997 /* Subroutine of native_encode_expr. Encode the FIXED_CST
6998 specified by EXPR into the buffer PTR of length LEN bytes.
6999 Return the number of bytes placed in the buffer, or zero
7000 upon failure. */
7001
7002 static int
7003 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7004 {
7005 tree type = TREE_TYPE (expr);
7006 scalar_mode mode = SCALAR_TYPE_MODE (type);
7007 int total_bytes = GET_MODE_SIZE (mode);
7008 FIXED_VALUE_TYPE value;
7009 tree i_value, i_type;
7010
7011 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7012 return 0;
7013
7014 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7015
7016 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7017 return 0;
7018
7019 value = TREE_FIXED_CST (expr);
7020 i_value = double_int_to_tree (i_type, value.data);
7021
7022 return native_encode_int (i_value, ptr, len, off);
7023 }
7024
7025
7026 /* Subroutine of native_encode_expr. Encode the REAL_CST
7027 specified by EXPR into the buffer PTR of length LEN bytes.
7028 Return the number of bytes placed in the buffer, or zero
7029 upon failure. */
7030
7031 static int
7032 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7033 {
7034 tree type = TREE_TYPE (expr);
7035 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7036 int byte, offset, word, words, bitpos;
7037 unsigned char value;
7038
7039 /* There are always 32 bits in each long, no matter the size of
7040 the hosts long. We handle floating point representations with
7041 up to 192 bits. */
7042 long tmp[6];
7043
7044 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7045 return 0;
7046 if (off == -1)
7047 off = 0;
7048
7049 if (ptr == NULL)
7050 /* Dry run. */
7051 return MIN (len, total_bytes - off);
7052
7053 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7054
7055 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7056
7057 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7058 bitpos += BITS_PER_UNIT)
7059 {
7060 byte = (bitpos / BITS_PER_UNIT) & 3;
7061 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7062
7063 if (UNITS_PER_WORD < 4)
7064 {
7065 word = byte / UNITS_PER_WORD;
7066 if (WORDS_BIG_ENDIAN)
7067 word = (words - 1) - word;
7068 offset = word * UNITS_PER_WORD;
7069 if (BYTES_BIG_ENDIAN)
7070 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7071 else
7072 offset += byte % UNITS_PER_WORD;
7073 }
7074 else
7075 {
7076 offset = byte;
7077 if (BYTES_BIG_ENDIAN)
7078 {
7079 /* Reverse bytes within each long, or within the entire float
7080 if it's smaller than a long (for HFmode). */
7081 offset = MIN (3, total_bytes - 1) - offset;
7082 gcc_assert (offset >= 0);
7083 }
7084 }
7085 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7086 if (offset >= off
7087 && offset - off < len)
7088 ptr[offset - off] = value;
7089 }
7090 return MIN (len, total_bytes - off);
7091 }
7092
7093 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7094 specified by EXPR into the buffer PTR of length LEN bytes.
7095 Return the number of bytes placed in the buffer, or zero
7096 upon failure. */
7097
7098 static int
7099 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7100 {
7101 int rsize, isize;
7102 tree part;
7103
7104 part = TREE_REALPART (expr);
7105 rsize = native_encode_expr (part, ptr, len, off);
7106 if (off == -1 && rsize == 0)
7107 return 0;
7108 part = TREE_IMAGPART (expr);
7109 if (off != -1)
7110 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7111 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7112 len - rsize, off);
7113 if (off == -1 && isize != rsize)
7114 return 0;
7115 return rsize + isize;
7116 }
7117
7118
7119 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7120 specified by EXPR into the buffer PTR of length LEN bytes.
7121 Return the number of bytes placed in the buffer, or zero
7122 upon failure. */
7123
7124 static int
7125 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7126 {
7127 unsigned i, count;
7128 int size, offset;
7129 tree itype, elem;
7130
7131 offset = 0;
7132 count = VECTOR_CST_NELTS (expr);
7133 itype = TREE_TYPE (TREE_TYPE (expr));
7134 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7135 for (i = 0; i < count; i++)
7136 {
7137 if (off >= size)
7138 {
7139 off -= size;
7140 continue;
7141 }
7142 elem = VECTOR_CST_ELT (expr, i);
7143 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7144 len - offset, off);
7145 if ((off == -1 && res != size) || res == 0)
7146 return 0;
7147 offset += res;
7148 if (offset >= len)
7149 return offset;
7150 if (off != -1)
7151 off = 0;
7152 }
7153 return offset;
7154 }
7155
7156
7157 /* Subroutine of native_encode_expr. Encode the STRING_CST
7158 specified by EXPR into the buffer PTR of length LEN bytes.
7159 Return the number of bytes placed in the buffer, or zero
7160 upon failure. */
7161
7162 static int
7163 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7164 {
7165 tree type = TREE_TYPE (expr);
7166
7167 /* Wide-char strings are encoded in target byte-order so native
7168 encoding them is trivial. */
7169 if (BITS_PER_UNIT != CHAR_BIT
7170 || TREE_CODE (type) != ARRAY_TYPE
7171 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7172 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7173 return 0;
7174
7175 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7176 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7177 return 0;
7178 if (off == -1)
7179 off = 0;
7180 if (ptr == NULL)
7181 /* Dry run. */;
7182 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7183 {
7184 int written = 0;
7185 if (off < TREE_STRING_LENGTH (expr))
7186 {
7187 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7188 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7189 }
7190 memset (ptr + written, 0,
7191 MIN (total_bytes - written, len - written));
7192 }
7193 else
7194 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7195 return MIN (total_bytes - off, len);
7196 }
7197
7198
7199 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7200 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7201 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7202 anything, just do a dry run. If OFF is not -1 then start
7203 the encoding at byte offset OFF and encode at most LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero upon failure. */
7205
7206 int
7207 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7208 {
7209 /* We don't support starting at negative offset and -1 is special. */
7210 if (off < -1)
7211 return 0;
7212
7213 switch (TREE_CODE (expr))
7214 {
7215 case INTEGER_CST:
7216 return native_encode_int (expr, ptr, len, off);
7217
7218 case REAL_CST:
7219 return native_encode_real (expr, ptr, len, off);
7220
7221 case FIXED_CST:
7222 return native_encode_fixed (expr, ptr, len, off);
7223
7224 case COMPLEX_CST:
7225 return native_encode_complex (expr, ptr, len, off);
7226
7227 case VECTOR_CST:
7228 return native_encode_vector (expr, ptr, len, off);
7229
7230 case STRING_CST:
7231 return native_encode_string (expr, ptr, len, off);
7232
7233 default:
7234 return 0;
7235 }
7236 }
7237
7238
7239 /* Subroutine of native_interpret_expr. Interpret the contents of
7240 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7241 If the buffer cannot be interpreted, return NULL_TREE. */
7242
7243 static tree
7244 native_interpret_int (tree type, const unsigned char *ptr, int len)
7245 {
7246 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7247
7248 if (total_bytes > len
7249 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7250 return NULL_TREE;
7251
7252 wide_int result = wi::from_buffer (ptr, total_bytes);
7253
7254 return wide_int_to_tree (type, result);
7255 }
7256
7257
7258 /* Subroutine of native_interpret_expr. Interpret the contents of
7259 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7260 If the buffer cannot be interpreted, return NULL_TREE. */
7261
7262 static tree
7263 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7264 {
7265 scalar_mode mode = SCALAR_TYPE_MODE (type);
7266 int total_bytes = GET_MODE_SIZE (mode);
7267 double_int result;
7268 FIXED_VALUE_TYPE fixed_value;
7269
7270 if (total_bytes > len
7271 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7272 return NULL_TREE;
7273
7274 result = double_int::from_buffer (ptr, total_bytes);
7275 fixed_value = fixed_from_double_int (result, mode);
7276
7277 return build_fixed (type, fixed_value);
7278 }
7279
7280
7281 /* Subroutine of native_interpret_expr. Interpret the contents of
7282 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7283 If the buffer cannot be interpreted, return NULL_TREE. */
7284
7285 static tree
7286 native_interpret_real (tree type, const unsigned char *ptr, int len)
7287 {
7288 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7289 int total_bytes = GET_MODE_SIZE (mode);
7290 unsigned char value;
7291 /* There are always 32 bits in each long, no matter the size of
7292 the hosts long. We handle floating point representations with
7293 up to 192 bits. */
7294 REAL_VALUE_TYPE r;
7295 long tmp[6];
7296
7297 if (total_bytes > len || total_bytes > 24)
7298 return NULL_TREE;
7299 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7300
7301 memset (tmp, 0, sizeof (tmp));
7302 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7303 bitpos += BITS_PER_UNIT)
7304 {
7305 /* Both OFFSET and BYTE index within a long;
7306 bitpos indexes the whole float. */
7307 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7308 if (UNITS_PER_WORD < 4)
7309 {
7310 int word = byte / UNITS_PER_WORD;
7311 if (WORDS_BIG_ENDIAN)
7312 word = (words - 1) - word;
7313 offset = word * UNITS_PER_WORD;
7314 if (BYTES_BIG_ENDIAN)
7315 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7316 else
7317 offset += byte % UNITS_PER_WORD;
7318 }
7319 else
7320 {
7321 offset = byte;
7322 if (BYTES_BIG_ENDIAN)
7323 {
7324 /* Reverse bytes within each long, or within the entire float
7325 if it's smaller than a long (for HFmode). */
7326 offset = MIN (3, total_bytes - 1) - offset;
7327 gcc_assert (offset >= 0);
7328 }
7329 }
7330 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7331
7332 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7333 }
7334
7335 real_from_target (&r, tmp, mode);
7336 return build_real (type, r);
7337 }
7338
7339
7340 /* Subroutine of native_interpret_expr. Interpret the contents of
7341 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7342 If the buffer cannot be interpreted, return NULL_TREE. */
7343
7344 static tree
7345 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7346 {
7347 tree etype, rpart, ipart;
7348 int size;
7349
7350 etype = TREE_TYPE (type);
7351 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7352 if (size * 2 > len)
7353 return NULL_TREE;
7354 rpart = native_interpret_expr (etype, ptr, size);
7355 if (!rpart)
7356 return NULL_TREE;
7357 ipart = native_interpret_expr (etype, ptr+size, size);
7358 if (!ipart)
7359 return NULL_TREE;
7360 return build_complex (type, rpart, ipart);
7361 }
7362
7363
7364 /* Subroutine of native_interpret_expr. Interpret the contents of
7365 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7366 If the buffer cannot be interpreted, return NULL_TREE. */
7367
7368 static tree
7369 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7370 {
7371 tree etype, elem;
7372 int i, size, count;
7373
7374 etype = TREE_TYPE (type);
7375 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7376 count = TYPE_VECTOR_SUBPARTS (type);
7377 if (size * count > len)
7378 return NULL_TREE;
7379
7380 auto_vec<tree, 32> elements (count);
7381 for (i = 0; i < count; ++i)
7382 {
7383 elem = native_interpret_expr (etype, ptr+(i*size), size);
7384 if (!elem)
7385 return NULL_TREE;
7386 elements.quick_push (elem);
7387 }
7388 return build_vector (type, elements);
7389 }
7390
7391
7392 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7393 the buffer PTR of length LEN as a constant of type TYPE. For
7394 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7395 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7396 return NULL_TREE. */
7397
7398 tree
7399 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7400 {
7401 switch (TREE_CODE (type))
7402 {
7403 case INTEGER_TYPE:
7404 case ENUMERAL_TYPE:
7405 case BOOLEAN_TYPE:
7406 case POINTER_TYPE:
7407 case REFERENCE_TYPE:
7408 return native_interpret_int (type, ptr, len);
7409
7410 case REAL_TYPE:
7411 return native_interpret_real (type, ptr, len);
7412
7413 case FIXED_POINT_TYPE:
7414 return native_interpret_fixed (type, ptr, len);
7415
7416 case COMPLEX_TYPE:
7417 return native_interpret_complex (type, ptr, len);
7418
7419 case VECTOR_TYPE:
7420 return native_interpret_vector (type, ptr, len);
7421
7422 default:
7423 return NULL_TREE;
7424 }
7425 }
7426
7427 /* Returns true if we can interpret the contents of a native encoding
7428 as TYPE. */
7429
7430 static bool
7431 can_native_interpret_type_p (tree type)
7432 {
7433 switch (TREE_CODE (type))
7434 {
7435 case INTEGER_TYPE:
7436 case ENUMERAL_TYPE:
7437 case BOOLEAN_TYPE:
7438 case POINTER_TYPE:
7439 case REFERENCE_TYPE:
7440 case FIXED_POINT_TYPE:
7441 case REAL_TYPE:
7442 case COMPLEX_TYPE:
7443 case VECTOR_TYPE:
7444 return true;
7445 default:
7446 return false;
7447 }
7448 }
7449
7450
7451 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7452 TYPE at compile-time. If we're unable to perform the conversion
7453 return NULL_TREE. */
7454
7455 static tree
7456 fold_view_convert_expr (tree type, tree expr)
7457 {
7458 /* We support up to 512-bit values (for V8DFmode). */
7459 unsigned char buffer[64];
7460 int len;
7461
7462 /* Check that the host and target are sane. */
7463 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7464 return NULL_TREE;
7465
7466 len = native_encode_expr (expr, buffer, sizeof (buffer));
7467 if (len == 0)
7468 return NULL_TREE;
7469
7470 return native_interpret_expr (type, buffer, len);
7471 }
7472
7473 /* Build an expression for the address of T. Folds away INDIRECT_REF
7474 to avoid confusing the gimplify process. */
7475
7476 tree
7477 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7478 {
7479 /* The size of the object is not relevant when talking about its address. */
7480 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7481 t = TREE_OPERAND (t, 0);
7482
7483 if (TREE_CODE (t) == INDIRECT_REF)
7484 {
7485 t = TREE_OPERAND (t, 0);
7486
7487 if (TREE_TYPE (t) != ptrtype)
7488 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7489 }
7490 else if (TREE_CODE (t) == MEM_REF
7491 && integer_zerop (TREE_OPERAND (t, 1)))
7492 return TREE_OPERAND (t, 0);
7493 else if (TREE_CODE (t) == MEM_REF
7494 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7495 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7496 TREE_OPERAND (t, 0),
7497 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7498 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7499 {
7500 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7501
7502 if (TREE_TYPE (t) != ptrtype)
7503 t = fold_convert_loc (loc, ptrtype, t);
7504 }
7505 else
7506 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7507
7508 return t;
7509 }
7510
7511 /* Build an expression for the address of T. */
7512
7513 tree
7514 build_fold_addr_expr_loc (location_t loc, tree t)
7515 {
7516 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7517
7518 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7519 }
7520
7521 /* Fold a unary expression of code CODE and type TYPE with operand
7522 OP0. Return the folded expression if folding is successful.
7523 Otherwise, return NULL_TREE. */
7524
7525 tree
7526 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7527 {
7528 tree tem;
7529 tree arg0;
7530 enum tree_code_class kind = TREE_CODE_CLASS (code);
7531
7532 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7533 && TREE_CODE_LENGTH (code) == 1);
7534
7535 arg0 = op0;
7536 if (arg0)
7537 {
7538 if (CONVERT_EXPR_CODE_P (code)
7539 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7540 {
7541 /* Don't use STRIP_NOPS, because signedness of argument type
7542 matters. */
7543 STRIP_SIGN_NOPS (arg0);
7544 }
7545 else
7546 {
7547 /* Strip any conversions that don't change the mode. This
7548 is safe for every expression, except for a comparison
7549 expression because its signedness is derived from its
7550 operands.
7551
7552 Note that this is done as an internal manipulation within
7553 the constant folder, in order to find the simplest
7554 representation of the arguments so that their form can be
7555 studied. In any cases, the appropriate type conversions
7556 should be put back in the tree that will get out of the
7557 constant folder. */
7558 STRIP_NOPS (arg0);
7559 }
7560
7561 if (CONSTANT_CLASS_P (arg0))
7562 {
7563 tree tem = const_unop (code, type, arg0);
7564 if (tem)
7565 {
7566 if (TREE_TYPE (tem) != type)
7567 tem = fold_convert_loc (loc, type, tem);
7568 return tem;
7569 }
7570 }
7571 }
7572
7573 tem = generic_simplify (loc, code, type, op0);
7574 if (tem)
7575 return tem;
7576
7577 if (TREE_CODE_CLASS (code) == tcc_unary)
7578 {
7579 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7580 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7581 fold_build1_loc (loc, code, type,
7582 fold_convert_loc (loc, TREE_TYPE (op0),
7583 TREE_OPERAND (arg0, 1))));
7584 else if (TREE_CODE (arg0) == COND_EXPR)
7585 {
7586 tree arg01 = TREE_OPERAND (arg0, 1);
7587 tree arg02 = TREE_OPERAND (arg0, 2);
7588 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7589 arg01 = fold_build1_loc (loc, code, type,
7590 fold_convert_loc (loc,
7591 TREE_TYPE (op0), arg01));
7592 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7593 arg02 = fold_build1_loc (loc, code, type,
7594 fold_convert_loc (loc,
7595 TREE_TYPE (op0), arg02));
7596 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7597 arg01, arg02);
7598
7599 /* If this was a conversion, and all we did was to move into
7600 inside the COND_EXPR, bring it back out. But leave it if
7601 it is a conversion from integer to integer and the
7602 result precision is no wider than a word since such a
7603 conversion is cheap and may be optimized away by combine,
7604 while it couldn't if it were outside the COND_EXPR. Then return
7605 so we don't get into an infinite recursion loop taking the
7606 conversion out and then back in. */
7607
7608 if ((CONVERT_EXPR_CODE_P (code)
7609 || code == NON_LVALUE_EXPR)
7610 && TREE_CODE (tem) == COND_EXPR
7611 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7612 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7613 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7614 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7615 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7616 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7617 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7618 && (INTEGRAL_TYPE_P
7619 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7620 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7621 || flag_syntax_only))
7622 tem = build1_loc (loc, code, type,
7623 build3 (COND_EXPR,
7624 TREE_TYPE (TREE_OPERAND
7625 (TREE_OPERAND (tem, 1), 0)),
7626 TREE_OPERAND (tem, 0),
7627 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7628 TREE_OPERAND (TREE_OPERAND (tem, 2),
7629 0)));
7630 return tem;
7631 }
7632 }
7633
7634 switch (code)
7635 {
7636 case NON_LVALUE_EXPR:
7637 if (!maybe_lvalue_p (op0))
7638 return fold_convert_loc (loc, type, op0);
7639 return NULL_TREE;
7640
7641 CASE_CONVERT:
7642 case FLOAT_EXPR:
7643 case FIX_TRUNC_EXPR:
7644 if (COMPARISON_CLASS_P (op0))
7645 {
7646 /* If we have (type) (a CMP b) and type is an integral type, return
7647 new expression involving the new type. Canonicalize
7648 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7649 non-integral type.
7650 Do not fold the result as that would not simplify further, also
7651 folding again results in recursions. */
7652 if (TREE_CODE (type) == BOOLEAN_TYPE)
7653 return build2_loc (loc, TREE_CODE (op0), type,
7654 TREE_OPERAND (op0, 0),
7655 TREE_OPERAND (op0, 1));
7656 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7657 && TREE_CODE (type) != VECTOR_TYPE)
7658 return build3_loc (loc, COND_EXPR, type, op0,
7659 constant_boolean_node (true, type),
7660 constant_boolean_node (false, type));
7661 }
7662
7663 /* Handle (T *)&A.B.C for A being of type T and B and C
7664 living at offset zero. This occurs frequently in
7665 C++ upcasting and then accessing the base. */
7666 if (TREE_CODE (op0) == ADDR_EXPR
7667 && POINTER_TYPE_P (type)
7668 && handled_component_p (TREE_OPERAND (op0, 0)))
7669 {
7670 HOST_WIDE_INT bitsize, bitpos;
7671 tree offset;
7672 machine_mode mode;
7673 int unsignedp, reversep, volatilep;
7674 tree base
7675 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7676 &offset, &mode, &unsignedp, &reversep,
7677 &volatilep);
7678 /* If the reference was to a (constant) zero offset, we can use
7679 the address of the base if it has the same base type
7680 as the result type and the pointer type is unqualified. */
7681 if (! offset && bitpos == 0
7682 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7683 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7684 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7685 return fold_convert_loc (loc, type,
7686 build_fold_addr_expr_loc (loc, base));
7687 }
7688
7689 if (TREE_CODE (op0) == MODIFY_EXPR
7690 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7691 /* Detect assigning a bitfield. */
7692 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7693 && DECL_BIT_FIELD
7694 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7695 {
7696 /* Don't leave an assignment inside a conversion
7697 unless assigning a bitfield. */
7698 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7699 /* First do the assignment, then return converted constant. */
7700 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7701 TREE_NO_WARNING (tem) = 1;
7702 TREE_USED (tem) = 1;
7703 return tem;
7704 }
7705
7706 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7707 constants (if x has signed type, the sign bit cannot be set
7708 in c). This folds extension into the BIT_AND_EXPR.
7709 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7710 very likely don't have maximal range for their precision and this
7711 transformation effectively doesn't preserve non-maximal ranges. */
7712 if (TREE_CODE (type) == INTEGER_TYPE
7713 && TREE_CODE (op0) == BIT_AND_EXPR
7714 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7715 {
7716 tree and_expr = op0;
7717 tree and0 = TREE_OPERAND (and_expr, 0);
7718 tree and1 = TREE_OPERAND (and_expr, 1);
7719 int change = 0;
7720
7721 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7722 || (TYPE_PRECISION (type)
7723 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7724 change = 1;
7725 else if (TYPE_PRECISION (TREE_TYPE (and1))
7726 <= HOST_BITS_PER_WIDE_INT
7727 && tree_fits_uhwi_p (and1))
7728 {
7729 unsigned HOST_WIDE_INT cst;
7730
7731 cst = tree_to_uhwi (and1);
7732 cst &= HOST_WIDE_INT_M1U
7733 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7734 change = (cst == 0);
7735 if (change
7736 && !flag_syntax_only
7737 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7738 == ZERO_EXTEND))
7739 {
7740 tree uns = unsigned_type_for (TREE_TYPE (and0));
7741 and0 = fold_convert_loc (loc, uns, and0);
7742 and1 = fold_convert_loc (loc, uns, and1);
7743 }
7744 }
7745 if (change)
7746 {
7747 tem = force_fit_type (type, wi::to_widest (and1), 0,
7748 TREE_OVERFLOW (and1));
7749 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7750 fold_convert_loc (loc, type, and0), tem);
7751 }
7752 }
7753
7754 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7755 cast (T1)X will fold away. We assume that this happens when X itself
7756 is a cast. */
7757 if (POINTER_TYPE_P (type)
7758 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7759 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7760 {
7761 tree arg00 = TREE_OPERAND (arg0, 0);
7762 tree arg01 = TREE_OPERAND (arg0, 1);
7763
7764 return fold_build_pointer_plus_loc
7765 (loc, fold_convert_loc (loc, type, arg00), arg01);
7766 }
7767
7768 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7769 of the same precision, and X is an integer type not narrower than
7770 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7771 if (INTEGRAL_TYPE_P (type)
7772 && TREE_CODE (op0) == BIT_NOT_EXPR
7773 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7774 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7775 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7776 {
7777 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7778 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7779 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7780 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7781 fold_convert_loc (loc, type, tem));
7782 }
7783
7784 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7785 type of X and Y (integer types only). */
7786 if (INTEGRAL_TYPE_P (type)
7787 && TREE_CODE (op0) == MULT_EXPR
7788 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7789 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7790 {
7791 /* Be careful not to introduce new overflows. */
7792 tree mult_type;
7793 if (TYPE_OVERFLOW_WRAPS (type))
7794 mult_type = type;
7795 else
7796 mult_type = unsigned_type_for (type);
7797
7798 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7799 {
7800 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7801 fold_convert_loc (loc, mult_type,
7802 TREE_OPERAND (op0, 0)),
7803 fold_convert_loc (loc, mult_type,
7804 TREE_OPERAND (op0, 1)));
7805 return fold_convert_loc (loc, type, tem);
7806 }
7807 }
7808
7809 return NULL_TREE;
7810
7811 case VIEW_CONVERT_EXPR:
7812 if (TREE_CODE (op0) == MEM_REF)
7813 {
7814 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7815 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7816 tem = fold_build2_loc (loc, MEM_REF, type,
7817 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7818 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7819 return tem;
7820 }
7821
7822 return NULL_TREE;
7823
7824 case NEGATE_EXPR:
7825 tem = fold_negate_expr (loc, arg0);
7826 if (tem)
7827 return fold_convert_loc (loc, type, tem);
7828 return NULL_TREE;
7829
7830 case ABS_EXPR:
7831 /* Convert fabs((double)float) into (double)fabsf(float). */
7832 if (TREE_CODE (arg0) == NOP_EXPR
7833 && TREE_CODE (type) == REAL_TYPE)
7834 {
7835 tree targ0 = strip_float_extensions (arg0);
7836 if (targ0 != arg0)
7837 return fold_convert_loc (loc, type,
7838 fold_build1_loc (loc, ABS_EXPR,
7839 TREE_TYPE (targ0),
7840 targ0));
7841 }
7842 return NULL_TREE;
7843
7844 case BIT_NOT_EXPR:
7845 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7846 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7847 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7848 fold_convert_loc (loc, type,
7849 TREE_OPERAND (arg0, 0)))))
7850 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7851 fold_convert_loc (loc, type,
7852 TREE_OPERAND (arg0, 1)));
7853 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7854 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7855 fold_convert_loc (loc, type,
7856 TREE_OPERAND (arg0, 1)))))
7857 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7858 fold_convert_loc (loc, type,
7859 TREE_OPERAND (arg0, 0)), tem);
7860
7861 return NULL_TREE;
7862
7863 case TRUTH_NOT_EXPR:
7864 /* Note that the operand of this must be an int
7865 and its values must be 0 or 1.
7866 ("true" is a fixed value perhaps depending on the language,
7867 but we don't handle values other than 1 correctly yet.) */
7868 tem = fold_truth_not_expr (loc, arg0);
7869 if (!tem)
7870 return NULL_TREE;
7871 return fold_convert_loc (loc, type, tem);
7872
7873 case INDIRECT_REF:
7874 /* Fold *&X to X if X is an lvalue. */
7875 if (TREE_CODE (op0) == ADDR_EXPR)
7876 {
7877 tree op00 = TREE_OPERAND (op0, 0);
7878 if ((VAR_P (op00)
7879 || TREE_CODE (op00) == PARM_DECL
7880 || TREE_CODE (op00) == RESULT_DECL)
7881 && !TREE_READONLY (op00))
7882 return op00;
7883 }
7884 return NULL_TREE;
7885
7886 default:
7887 return NULL_TREE;
7888 } /* switch (code) */
7889 }
7890
7891
7892 /* If the operation was a conversion do _not_ mark a resulting constant
7893 with TREE_OVERFLOW if the original constant was not. These conversions
7894 have implementation defined behavior and retaining the TREE_OVERFLOW
7895 flag here would confuse later passes such as VRP. */
7896 tree
7897 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7898 tree type, tree op0)
7899 {
7900 tree res = fold_unary_loc (loc, code, type, op0);
7901 if (res
7902 && TREE_CODE (res) == INTEGER_CST
7903 && TREE_CODE (op0) == INTEGER_CST
7904 && CONVERT_EXPR_CODE_P (code))
7905 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7906
7907 return res;
7908 }
7909
7910 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7911 operands OP0 and OP1. LOC is the location of the resulting expression.
7912 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7913 Return the folded expression if folding is successful. Otherwise,
7914 return NULL_TREE. */
7915 static tree
7916 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7917 tree arg0, tree arg1, tree op0, tree op1)
7918 {
7919 tree tem;
7920
7921 /* We only do these simplifications if we are optimizing. */
7922 if (!optimize)
7923 return NULL_TREE;
7924
7925 /* Check for things like (A || B) && (A || C). We can convert this
7926 to A || (B && C). Note that either operator can be any of the four
7927 truth and/or operations and the transformation will still be
7928 valid. Also note that we only care about order for the
7929 ANDIF and ORIF operators. If B contains side effects, this
7930 might change the truth-value of A. */
7931 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7932 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7933 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7934 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7935 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7936 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7937 {
7938 tree a00 = TREE_OPERAND (arg0, 0);
7939 tree a01 = TREE_OPERAND (arg0, 1);
7940 tree a10 = TREE_OPERAND (arg1, 0);
7941 tree a11 = TREE_OPERAND (arg1, 1);
7942 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7943 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7944 && (code == TRUTH_AND_EXPR
7945 || code == TRUTH_OR_EXPR));
7946
7947 if (operand_equal_p (a00, a10, 0))
7948 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7949 fold_build2_loc (loc, code, type, a01, a11));
7950 else if (commutative && operand_equal_p (a00, a11, 0))
7951 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7952 fold_build2_loc (loc, code, type, a01, a10));
7953 else if (commutative && operand_equal_p (a01, a10, 0))
7954 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7955 fold_build2_loc (loc, code, type, a00, a11));
7956
7957 /* This case if tricky because we must either have commutative
7958 operators or else A10 must not have side-effects. */
7959
7960 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7961 && operand_equal_p (a01, a11, 0))
7962 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7963 fold_build2_loc (loc, code, type, a00, a10),
7964 a01);
7965 }
7966
7967 /* See if we can build a range comparison. */
7968 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7969 return tem;
7970
7971 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7972 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7973 {
7974 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7975 if (tem)
7976 return fold_build2_loc (loc, code, type, tem, arg1);
7977 }
7978
7979 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7980 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7981 {
7982 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7983 if (tem)
7984 return fold_build2_loc (loc, code, type, arg0, tem);
7985 }
7986
7987 /* Check for the possibility of merging component references. If our
7988 lhs is another similar operation, try to merge its rhs with our
7989 rhs. Then try to merge our lhs and rhs. */
7990 if (TREE_CODE (arg0) == code
7991 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
7992 TREE_OPERAND (arg0, 1), arg1)))
7993 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
7994
7995 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
7996 return tem;
7997
7998 if (LOGICAL_OP_NON_SHORT_CIRCUIT
7999 && !flag_sanitize_coverage
8000 && (code == TRUTH_AND_EXPR
8001 || code == TRUTH_ANDIF_EXPR
8002 || code == TRUTH_OR_EXPR
8003 || code == TRUTH_ORIF_EXPR))
8004 {
8005 enum tree_code ncode, icode;
8006
8007 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8008 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8009 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8010
8011 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8012 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8013 We don't want to pack more than two leafs to a non-IF AND/OR
8014 expression.
8015 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8016 equal to IF-CODE, then we don't want to add right-hand operand.
8017 If the inner right-hand side of left-hand operand has
8018 side-effects, or isn't simple, then we can't add to it,
8019 as otherwise we might destroy if-sequence. */
8020 if (TREE_CODE (arg0) == icode
8021 && simple_operand_p_2 (arg1)
8022 /* Needed for sequence points to handle trappings, and
8023 side-effects. */
8024 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8025 {
8026 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8027 arg1);
8028 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8029 tem);
8030 }
8031 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8032 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8033 else if (TREE_CODE (arg1) == icode
8034 && simple_operand_p_2 (arg0)
8035 /* Needed for sequence points to handle trappings, and
8036 side-effects. */
8037 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8038 {
8039 tem = fold_build2_loc (loc, ncode, type,
8040 arg0, TREE_OPERAND (arg1, 0));
8041 return fold_build2_loc (loc, icode, type, tem,
8042 TREE_OPERAND (arg1, 1));
8043 }
8044 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8045 into (A OR B).
8046 For sequence point consistancy, we need to check for trapping,
8047 and side-effects. */
8048 else if (code == icode && simple_operand_p_2 (arg0)
8049 && simple_operand_p_2 (arg1))
8050 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8051 }
8052
8053 return NULL_TREE;
8054 }
8055
8056 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8057 by changing CODE to reduce the magnitude of constants involved in
8058 ARG0 of the comparison.
8059 Returns a canonicalized comparison tree if a simplification was
8060 possible, otherwise returns NULL_TREE.
8061 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8062 valid if signed overflow is undefined. */
8063
8064 static tree
8065 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8066 tree arg0, tree arg1,
8067 bool *strict_overflow_p)
8068 {
8069 enum tree_code code0 = TREE_CODE (arg0);
8070 tree t, cst0 = NULL_TREE;
8071 int sgn0;
8072
8073 /* Match A +- CST code arg1. We can change this only if overflow
8074 is undefined. */
8075 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8076 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8077 /* In principle pointers also have undefined overflow behavior,
8078 but that causes problems elsewhere. */
8079 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8080 && (code0 == MINUS_EXPR
8081 || code0 == PLUS_EXPR)
8082 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8083 return NULL_TREE;
8084
8085 /* Identify the constant in arg0 and its sign. */
8086 cst0 = TREE_OPERAND (arg0, 1);
8087 sgn0 = tree_int_cst_sgn (cst0);
8088
8089 /* Overflowed constants and zero will cause problems. */
8090 if (integer_zerop (cst0)
8091 || TREE_OVERFLOW (cst0))
8092 return NULL_TREE;
8093
8094 /* See if we can reduce the magnitude of the constant in
8095 arg0 by changing the comparison code. */
8096 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8097 if (code == LT_EXPR
8098 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8099 code = LE_EXPR;
8100 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8101 else if (code == GT_EXPR
8102 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8103 code = GE_EXPR;
8104 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8105 else if (code == LE_EXPR
8106 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8107 code = LT_EXPR;
8108 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8109 else if (code == GE_EXPR
8110 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8111 code = GT_EXPR;
8112 else
8113 return NULL_TREE;
8114 *strict_overflow_p = true;
8115
8116 /* Now build the constant reduced in magnitude. But not if that
8117 would produce one outside of its types range. */
8118 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8119 && ((sgn0 == 1
8120 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8121 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8122 || (sgn0 == -1
8123 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8124 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8125 return NULL_TREE;
8126
8127 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8128 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8129 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8130 t = fold_convert (TREE_TYPE (arg1), t);
8131
8132 return fold_build2_loc (loc, code, type, t, arg1);
8133 }
8134
8135 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8136 overflow further. Try to decrease the magnitude of constants involved
8137 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8138 and put sole constants at the second argument position.
8139 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8140
8141 static tree
8142 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8143 tree arg0, tree arg1)
8144 {
8145 tree t;
8146 bool strict_overflow_p;
8147 const char * const warnmsg = G_("assuming signed overflow does not occur "
8148 "when reducing constant in comparison");
8149
8150 /* Try canonicalization by simplifying arg0. */
8151 strict_overflow_p = false;
8152 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8153 &strict_overflow_p);
8154 if (t)
8155 {
8156 if (strict_overflow_p)
8157 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8158 return t;
8159 }
8160
8161 /* Try canonicalization by simplifying arg1 using the swapped
8162 comparison. */
8163 code = swap_tree_comparison (code);
8164 strict_overflow_p = false;
8165 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8166 &strict_overflow_p);
8167 if (t && strict_overflow_p)
8168 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8169 return t;
8170 }
8171
8172 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8173 space. This is used to avoid issuing overflow warnings for
8174 expressions like &p->x which can not wrap. */
8175
8176 static bool
8177 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8178 {
8179 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8180 return true;
8181
8182 if (bitpos < 0)
8183 return true;
8184
8185 wide_int wi_offset;
8186 int precision = TYPE_PRECISION (TREE_TYPE (base));
8187 if (offset == NULL_TREE)
8188 wi_offset = wi::zero (precision);
8189 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8190 return true;
8191 else
8192 wi_offset = wi::to_wide (offset);
8193
8194 bool overflow;
8195 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8196 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8197 if (overflow)
8198 return true;
8199
8200 if (!wi::fits_uhwi_p (total))
8201 return true;
8202
8203 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8204 if (size <= 0)
8205 return true;
8206
8207 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8208 array. */
8209 if (TREE_CODE (base) == ADDR_EXPR)
8210 {
8211 HOST_WIDE_INT base_size;
8212
8213 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8214 if (base_size > 0 && size < base_size)
8215 size = base_size;
8216 }
8217
8218 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8219 }
8220
8221 /* Return a positive integer when the symbol DECL is known to have
8222 a nonzero address, zero when it's known not to (e.g., it's a weak
8223 symbol), and a negative integer when the symbol is not yet in the
8224 symbol table and so whether or not its address is zero is unknown.
8225 For function local objects always return positive integer. */
8226 static int
8227 maybe_nonzero_address (tree decl)
8228 {
8229 if (DECL_P (decl) && decl_in_symtab_p (decl))
8230 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8231 return symbol->nonzero_address ();
8232
8233 /* Function local objects are never NULL. */
8234 if (DECL_P (decl)
8235 && (DECL_CONTEXT (decl)
8236 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8237 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8238 return 1;
8239
8240 return -1;
8241 }
8242
8243 /* Subroutine of fold_binary. This routine performs all of the
8244 transformations that are common to the equality/inequality
8245 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8246 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8247 fold_binary should call fold_binary. Fold a comparison with
8248 tree code CODE and type TYPE with operands OP0 and OP1. Return
8249 the folded comparison or NULL_TREE. */
8250
8251 static tree
8252 fold_comparison (location_t loc, enum tree_code code, tree type,
8253 tree op0, tree op1)
8254 {
8255 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8256 tree arg0, arg1, tem;
8257
8258 arg0 = op0;
8259 arg1 = op1;
8260
8261 STRIP_SIGN_NOPS (arg0);
8262 STRIP_SIGN_NOPS (arg1);
8263
8264 /* For comparisons of pointers we can decompose it to a compile time
8265 comparison of the base objects and the offsets into the object.
8266 This requires at least one operand being an ADDR_EXPR or a
8267 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8268 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8269 && (TREE_CODE (arg0) == ADDR_EXPR
8270 || TREE_CODE (arg1) == ADDR_EXPR
8271 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8272 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8273 {
8274 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8275 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8276 machine_mode mode;
8277 int volatilep, reversep, unsignedp;
8278 bool indirect_base0 = false, indirect_base1 = false;
8279
8280 /* Get base and offset for the access. Strip ADDR_EXPR for
8281 get_inner_reference, but put it back by stripping INDIRECT_REF
8282 off the base object if possible. indirect_baseN will be true
8283 if baseN is not an address but refers to the object itself. */
8284 base0 = arg0;
8285 if (TREE_CODE (arg0) == ADDR_EXPR)
8286 {
8287 base0
8288 = get_inner_reference (TREE_OPERAND (arg0, 0),
8289 &bitsize, &bitpos0, &offset0, &mode,
8290 &unsignedp, &reversep, &volatilep);
8291 if (TREE_CODE (base0) == INDIRECT_REF)
8292 base0 = TREE_OPERAND (base0, 0);
8293 else
8294 indirect_base0 = true;
8295 }
8296 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8297 {
8298 base0 = TREE_OPERAND (arg0, 0);
8299 STRIP_SIGN_NOPS (base0);
8300 if (TREE_CODE (base0) == ADDR_EXPR)
8301 {
8302 base0
8303 = get_inner_reference (TREE_OPERAND (base0, 0),
8304 &bitsize, &bitpos0, &offset0, &mode,
8305 &unsignedp, &reversep, &volatilep);
8306 if (TREE_CODE (base0) == INDIRECT_REF)
8307 base0 = TREE_OPERAND (base0, 0);
8308 else
8309 indirect_base0 = true;
8310 }
8311 if (offset0 == NULL_TREE || integer_zerop (offset0))
8312 offset0 = TREE_OPERAND (arg0, 1);
8313 else
8314 offset0 = size_binop (PLUS_EXPR, offset0,
8315 TREE_OPERAND (arg0, 1));
8316 if (TREE_CODE (offset0) == INTEGER_CST)
8317 {
8318 offset_int tem = wi::sext (wi::to_offset (offset0),
8319 TYPE_PRECISION (sizetype));
8320 tem <<= LOG2_BITS_PER_UNIT;
8321 tem += bitpos0;
8322 if (wi::fits_shwi_p (tem))
8323 {
8324 bitpos0 = tem.to_shwi ();
8325 offset0 = NULL_TREE;
8326 }
8327 }
8328 }
8329
8330 base1 = arg1;
8331 if (TREE_CODE (arg1) == ADDR_EXPR)
8332 {
8333 base1
8334 = get_inner_reference (TREE_OPERAND (arg1, 0),
8335 &bitsize, &bitpos1, &offset1, &mode,
8336 &unsignedp, &reversep, &volatilep);
8337 if (TREE_CODE (base1) == INDIRECT_REF)
8338 base1 = TREE_OPERAND (base1, 0);
8339 else
8340 indirect_base1 = true;
8341 }
8342 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8343 {
8344 base1 = TREE_OPERAND (arg1, 0);
8345 STRIP_SIGN_NOPS (base1);
8346 if (TREE_CODE (base1) == ADDR_EXPR)
8347 {
8348 base1
8349 = get_inner_reference (TREE_OPERAND (base1, 0),
8350 &bitsize, &bitpos1, &offset1, &mode,
8351 &unsignedp, &reversep, &volatilep);
8352 if (TREE_CODE (base1) == INDIRECT_REF)
8353 base1 = TREE_OPERAND (base1, 0);
8354 else
8355 indirect_base1 = true;
8356 }
8357 if (offset1 == NULL_TREE || integer_zerop (offset1))
8358 offset1 = TREE_OPERAND (arg1, 1);
8359 else
8360 offset1 = size_binop (PLUS_EXPR, offset1,
8361 TREE_OPERAND (arg1, 1));
8362 if (TREE_CODE (offset1) == INTEGER_CST)
8363 {
8364 offset_int tem = wi::sext (wi::to_offset (offset1),
8365 TYPE_PRECISION (sizetype));
8366 tem <<= LOG2_BITS_PER_UNIT;
8367 tem += bitpos1;
8368 if (wi::fits_shwi_p (tem))
8369 {
8370 bitpos1 = tem.to_shwi ();
8371 offset1 = NULL_TREE;
8372 }
8373 }
8374 }
8375
8376 /* If we have equivalent bases we might be able to simplify. */
8377 if (indirect_base0 == indirect_base1
8378 && operand_equal_p (base0, base1,
8379 indirect_base0 ? OEP_ADDRESS_OF : 0))
8380 {
8381 /* We can fold this expression to a constant if the non-constant
8382 offset parts are equal. */
8383 if (offset0 == offset1
8384 || (offset0 && offset1
8385 && operand_equal_p (offset0, offset1, 0)))
8386 {
8387 if (!equality_code
8388 && bitpos0 != bitpos1
8389 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8390 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8391 fold_overflow_warning (("assuming pointer wraparound does not "
8392 "occur when comparing P +- C1 with "
8393 "P +- C2"),
8394 WARN_STRICT_OVERFLOW_CONDITIONAL);
8395
8396 switch (code)
8397 {
8398 case EQ_EXPR:
8399 return constant_boolean_node (bitpos0 == bitpos1, type);
8400 case NE_EXPR:
8401 return constant_boolean_node (bitpos0 != bitpos1, type);
8402 case LT_EXPR:
8403 return constant_boolean_node (bitpos0 < bitpos1, type);
8404 case LE_EXPR:
8405 return constant_boolean_node (bitpos0 <= bitpos1, type);
8406 case GE_EXPR:
8407 return constant_boolean_node (bitpos0 >= bitpos1, type);
8408 case GT_EXPR:
8409 return constant_boolean_node (bitpos0 > bitpos1, type);
8410 default:;
8411 }
8412 }
8413 /* We can simplify the comparison to a comparison of the variable
8414 offset parts if the constant offset parts are equal.
8415 Be careful to use signed sizetype here because otherwise we
8416 mess with array offsets in the wrong way. This is possible
8417 because pointer arithmetic is restricted to retain within an
8418 object and overflow on pointer differences is undefined as of
8419 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8420 else if (bitpos0 == bitpos1)
8421 {
8422 /* By converting to signed sizetype we cover middle-end pointer
8423 arithmetic which operates on unsigned pointer types of size
8424 type size and ARRAY_REF offsets which are properly sign or
8425 zero extended from their type in case it is narrower than
8426 sizetype. */
8427 if (offset0 == NULL_TREE)
8428 offset0 = build_int_cst (ssizetype, 0);
8429 else
8430 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8431 if (offset1 == NULL_TREE)
8432 offset1 = build_int_cst (ssizetype, 0);
8433 else
8434 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8435
8436 if (!equality_code
8437 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8438 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8439 fold_overflow_warning (("assuming pointer wraparound does not "
8440 "occur when comparing P +- C1 with "
8441 "P +- C2"),
8442 WARN_STRICT_OVERFLOW_COMPARISON);
8443
8444 return fold_build2_loc (loc, code, type, offset0, offset1);
8445 }
8446 }
8447 /* For equal offsets we can simplify to a comparison of the
8448 base addresses. */
8449 else if (bitpos0 == bitpos1
8450 && (indirect_base0
8451 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8452 && (indirect_base1
8453 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8454 && ((offset0 == offset1)
8455 || (offset0 && offset1
8456 && operand_equal_p (offset0, offset1, 0))))
8457 {
8458 if (indirect_base0)
8459 base0 = build_fold_addr_expr_loc (loc, base0);
8460 if (indirect_base1)
8461 base1 = build_fold_addr_expr_loc (loc, base1);
8462 return fold_build2_loc (loc, code, type, base0, base1);
8463 }
8464 /* Comparison between an ordinary (non-weak) symbol and a null
8465 pointer can be eliminated since such symbols must have a non
8466 null address. In C, relational expressions between pointers
8467 to objects and null pointers are undefined. The results
8468 below follow the C++ rules with the additional property that
8469 every object pointer compares greater than a null pointer.
8470 */
8471 else if (((DECL_P (base0)
8472 && maybe_nonzero_address (base0) > 0
8473 /* Avoid folding references to struct members at offset 0 to
8474 prevent tests like '&ptr->firstmember == 0' from getting
8475 eliminated. When ptr is null, although the -> expression
8476 is strictly speaking invalid, GCC retains it as a matter
8477 of QoI. See PR c/44555. */
8478 && (offset0 == NULL_TREE && bitpos0 != 0))
8479 || CONSTANT_CLASS_P (base0))
8480 && indirect_base0
8481 /* The caller guarantees that when one of the arguments is
8482 constant (i.e., null in this case) it is second. */
8483 && integer_zerop (arg1))
8484 {
8485 switch (code)
8486 {
8487 case EQ_EXPR:
8488 case LE_EXPR:
8489 case LT_EXPR:
8490 return constant_boolean_node (false, type);
8491 case GE_EXPR:
8492 case GT_EXPR:
8493 case NE_EXPR:
8494 return constant_boolean_node (true, type);
8495 default:
8496 gcc_unreachable ();
8497 }
8498 }
8499 }
8500
8501 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8502 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8503 the resulting offset is smaller in absolute value than the
8504 original one and has the same sign. */
8505 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8506 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8507 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8508 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8509 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8510 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8511 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8512 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8513 {
8514 tree const1 = TREE_OPERAND (arg0, 1);
8515 tree const2 = TREE_OPERAND (arg1, 1);
8516 tree variable1 = TREE_OPERAND (arg0, 0);
8517 tree variable2 = TREE_OPERAND (arg1, 0);
8518 tree cst;
8519 const char * const warnmsg = G_("assuming signed overflow does not "
8520 "occur when combining constants around "
8521 "a comparison");
8522
8523 /* Put the constant on the side where it doesn't overflow and is
8524 of lower absolute value and of same sign than before. */
8525 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8526 ? MINUS_EXPR : PLUS_EXPR,
8527 const2, const1);
8528 if (!TREE_OVERFLOW (cst)
8529 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8530 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8531 {
8532 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8533 return fold_build2_loc (loc, code, type,
8534 variable1,
8535 fold_build2_loc (loc, TREE_CODE (arg1),
8536 TREE_TYPE (arg1),
8537 variable2, cst));
8538 }
8539
8540 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8541 ? MINUS_EXPR : PLUS_EXPR,
8542 const1, const2);
8543 if (!TREE_OVERFLOW (cst)
8544 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8545 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8546 {
8547 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8548 return fold_build2_loc (loc, code, type,
8549 fold_build2_loc (loc, TREE_CODE (arg0),
8550 TREE_TYPE (arg0),
8551 variable1, cst),
8552 variable2);
8553 }
8554 }
8555
8556 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8557 if (tem)
8558 return tem;
8559
8560 /* If we are comparing an expression that just has comparisons
8561 of two integer values, arithmetic expressions of those comparisons,
8562 and constants, we can simplify it. There are only three cases
8563 to check: the two values can either be equal, the first can be
8564 greater, or the second can be greater. Fold the expression for
8565 those three values. Since each value must be 0 or 1, we have
8566 eight possibilities, each of which corresponds to the constant 0
8567 or 1 or one of the six possible comparisons.
8568
8569 This handles common cases like (a > b) == 0 but also handles
8570 expressions like ((x > y) - (y > x)) > 0, which supposedly
8571 occur in macroized code. */
8572
8573 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8574 {
8575 tree cval1 = 0, cval2 = 0;
8576 int save_p = 0;
8577
8578 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8579 /* Don't handle degenerate cases here; they should already
8580 have been handled anyway. */
8581 && cval1 != 0 && cval2 != 0
8582 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8583 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8584 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8585 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8586 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8587 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8588 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8589 {
8590 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8591 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8592
8593 /* We can't just pass T to eval_subst in case cval1 or cval2
8594 was the same as ARG1. */
8595
8596 tree high_result
8597 = fold_build2_loc (loc, code, type,
8598 eval_subst (loc, arg0, cval1, maxval,
8599 cval2, minval),
8600 arg1);
8601 tree equal_result
8602 = fold_build2_loc (loc, code, type,
8603 eval_subst (loc, arg0, cval1, maxval,
8604 cval2, maxval),
8605 arg1);
8606 tree low_result
8607 = fold_build2_loc (loc, code, type,
8608 eval_subst (loc, arg0, cval1, minval,
8609 cval2, maxval),
8610 arg1);
8611
8612 /* All three of these results should be 0 or 1. Confirm they are.
8613 Then use those values to select the proper code to use. */
8614
8615 if (TREE_CODE (high_result) == INTEGER_CST
8616 && TREE_CODE (equal_result) == INTEGER_CST
8617 && TREE_CODE (low_result) == INTEGER_CST)
8618 {
8619 /* Make a 3-bit mask with the high-order bit being the
8620 value for `>', the next for '=', and the low for '<'. */
8621 switch ((integer_onep (high_result) * 4)
8622 + (integer_onep (equal_result) * 2)
8623 + integer_onep (low_result))
8624 {
8625 case 0:
8626 /* Always false. */
8627 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8628 case 1:
8629 code = LT_EXPR;
8630 break;
8631 case 2:
8632 code = EQ_EXPR;
8633 break;
8634 case 3:
8635 code = LE_EXPR;
8636 break;
8637 case 4:
8638 code = GT_EXPR;
8639 break;
8640 case 5:
8641 code = NE_EXPR;
8642 break;
8643 case 6:
8644 code = GE_EXPR;
8645 break;
8646 case 7:
8647 /* Always true. */
8648 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8649 }
8650
8651 if (save_p)
8652 {
8653 tem = save_expr (build2 (code, type, cval1, cval2));
8654 protected_set_expr_location (tem, loc);
8655 return tem;
8656 }
8657 return fold_build2_loc (loc, code, type, cval1, cval2);
8658 }
8659 }
8660 }
8661
8662 return NULL_TREE;
8663 }
8664
8665
8666 /* Subroutine of fold_binary. Optimize complex multiplications of the
8667 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8668 argument EXPR represents the expression "z" of type TYPE. */
8669
8670 static tree
8671 fold_mult_zconjz (location_t loc, tree type, tree expr)
8672 {
8673 tree itype = TREE_TYPE (type);
8674 tree rpart, ipart, tem;
8675
8676 if (TREE_CODE (expr) == COMPLEX_EXPR)
8677 {
8678 rpart = TREE_OPERAND (expr, 0);
8679 ipart = TREE_OPERAND (expr, 1);
8680 }
8681 else if (TREE_CODE (expr) == COMPLEX_CST)
8682 {
8683 rpart = TREE_REALPART (expr);
8684 ipart = TREE_IMAGPART (expr);
8685 }
8686 else
8687 {
8688 expr = save_expr (expr);
8689 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8690 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8691 }
8692
8693 rpart = save_expr (rpart);
8694 ipart = save_expr (ipart);
8695 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8696 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8697 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8698 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8699 build_zero_cst (itype));
8700 }
8701
8702
8703 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8704 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8705 true if successful. */
8706
8707 static bool
8708 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8709 {
8710 unsigned int i;
8711
8712 if (TREE_CODE (arg) == VECTOR_CST)
8713 {
8714 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8715 elts[i] = VECTOR_CST_ELT (arg, i);
8716 }
8717 else if (TREE_CODE (arg) == CONSTRUCTOR)
8718 {
8719 constructor_elt *elt;
8720
8721 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8722 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8723 return false;
8724 else
8725 elts[i] = elt->value;
8726 }
8727 else
8728 return false;
8729 for (; i < nelts; i++)
8730 elts[i]
8731 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8732 return true;
8733 }
8734
8735 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8736 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8737 NULL_TREE otherwise. */
8738
8739 static tree
8740 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8741 {
8742 unsigned int i;
8743 bool need_ctor = false;
8744
8745 unsigned int nelts = sel.length ();
8746 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8747 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8748 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8749 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8750 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8751 return NULL_TREE;
8752
8753 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8754 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8755 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8756 return NULL_TREE;
8757
8758 auto_vec<tree, 32> out_elts (nelts);
8759 for (i = 0; i < nelts; i++)
8760 {
8761 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8762 need_ctor = true;
8763 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8764 }
8765
8766 if (need_ctor)
8767 {
8768 vec<constructor_elt, va_gc> *v;
8769 vec_alloc (v, nelts);
8770 for (i = 0; i < nelts; i++)
8771 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8772 return build_constructor (type, v);
8773 }
8774 else
8775 return build_vector (type, out_elts);
8776 }
8777
8778 /* Try to fold a pointer difference of type TYPE two address expressions of
8779 array references AREF0 and AREF1 using location LOC. Return a
8780 simplified expression for the difference or NULL_TREE. */
8781
8782 static tree
8783 fold_addr_of_array_ref_difference (location_t loc, tree type,
8784 tree aref0, tree aref1,
8785 bool use_pointer_diff)
8786 {
8787 tree base0 = TREE_OPERAND (aref0, 0);
8788 tree base1 = TREE_OPERAND (aref1, 0);
8789 tree base_offset = build_int_cst (type, 0);
8790
8791 /* If the bases are array references as well, recurse. If the bases
8792 are pointer indirections compute the difference of the pointers.
8793 If the bases are equal, we are set. */
8794 if ((TREE_CODE (base0) == ARRAY_REF
8795 && TREE_CODE (base1) == ARRAY_REF
8796 && (base_offset
8797 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8798 use_pointer_diff)))
8799 || (INDIRECT_REF_P (base0)
8800 && INDIRECT_REF_P (base1)
8801 && (base_offset
8802 = use_pointer_diff
8803 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8804 TREE_OPERAND (base0, 0),
8805 TREE_OPERAND (base1, 0))
8806 : fold_binary_loc (loc, MINUS_EXPR, type,
8807 fold_convert (type,
8808 TREE_OPERAND (base0, 0)),
8809 fold_convert (type,
8810 TREE_OPERAND (base1, 0)))))
8811 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8812 {
8813 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8814 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8815 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8816 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8817 return fold_build2_loc (loc, PLUS_EXPR, type,
8818 base_offset,
8819 fold_build2_loc (loc, MULT_EXPR, type,
8820 diff, esz));
8821 }
8822 return NULL_TREE;
8823 }
8824
8825 /* If the real or vector real constant CST of type TYPE has an exact
8826 inverse, return it, else return NULL. */
8827
8828 tree
8829 exact_inverse (tree type, tree cst)
8830 {
8831 REAL_VALUE_TYPE r;
8832 tree unit_type;
8833 machine_mode mode;
8834 unsigned vec_nelts, i;
8835
8836 switch (TREE_CODE (cst))
8837 {
8838 case REAL_CST:
8839 r = TREE_REAL_CST (cst);
8840
8841 if (exact_real_inverse (TYPE_MODE (type), &r))
8842 return build_real (type, r);
8843
8844 return NULL_TREE;
8845
8846 case VECTOR_CST:
8847 {
8848 vec_nelts = VECTOR_CST_NELTS (cst);
8849 unit_type = TREE_TYPE (type);
8850 mode = TYPE_MODE (unit_type);
8851
8852 auto_vec<tree, 32> elts (vec_nelts);
8853 for (i = 0; i < vec_nelts; i++)
8854 {
8855 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8856 if (!exact_real_inverse (mode, &r))
8857 return NULL_TREE;
8858 elts.quick_push (build_real (unit_type, r));
8859 }
8860
8861 return build_vector (type, elts);
8862 }
8863
8864 default:
8865 return NULL_TREE;
8866 }
8867 }
8868
8869 /* Mask out the tz least significant bits of X of type TYPE where
8870 tz is the number of trailing zeroes in Y. */
8871 static wide_int
8872 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8873 {
8874 int tz = wi::ctz (y);
8875 if (tz > 0)
8876 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8877 return x;
8878 }
8879
8880 /* Return true when T is an address and is known to be nonzero.
8881 For floating point we further ensure that T is not denormal.
8882 Similar logic is present in nonzero_address in rtlanal.h.
8883
8884 If the return value is based on the assumption that signed overflow
8885 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8886 change *STRICT_OVERFLOW_P. */
8887
8888 static bool
8889 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8890 {
8891 tree type = TREE_TYPE (t);
8892 enum tree_code code;
8893
8894 /* Doing something useful for floating point would need more work. */
8895 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8896 return false;
8897
8898 code = TREE_CODE (t);
8899 switch (TREE_CODE_CLASS (code))
8900 {
8901 case tcc_unary:
8902 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8903 strict_overflow_p);
8904 case tcc_binary:
8905 case tcc_comparison:
8906 return tree_binary_nonzero_warnv_p (code, type,
8907 TREE_OPERAND (t, 0),
8908 TREE_OPERAND (t, 1),
8909 strict_overflow_p);
8910 case tcc_constant:
8911 case tcc_declaration:
8912 case tcc_reference:
8913 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8914
8915 default:
8916 break;
8917 }
8918
8919 switch (code)
8920 {
8921 case TRUTH_NOT_EXPR:
8922 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8923 strict_overflow_p);
8924
8925 case TRUTH_AND_EXPR:
8926 case TRUTH_OR_EXPR:
8927 case TRUTH_XOR_EXPR:
8928 return tree_binary_nonzero_warnv_p (code, type,
8929 TREE_OPERAND (t, 0),
8930 TREE_OPERAND (t, 1),
8931 strict_overflow_p);
8932
8933 case COND_EXPR:
8934 case CONSTRUCTOR:
8935 case OBJ_TYPE_REF:
8936 case ASSERT_EXPR:
8937 case ADDR_EXPR:
8938 case WITH_SIZE_EXPR:
8939 case SSA_NAME:
8940 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8941
8942 case COMPOUND_EXPR:
8943 case MODIFY_EXPR:
8944 case BIND_EXPR:
8945 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8946 strict_overflow_p);
8947
8948 case SAVE_EXPR:
8949 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8950 strict_overflow_p);
8951
8952 case CALL_EXPR:
8953 {
8954 tree fndecl = get_callee_fndecl (t);
8955 if (!fndecl) return false;
8956 if (flag_delete_null_pointer_checks && !flag_check_new
8957 && DECL_IS_OPERATOR_NEW (fndecl)
8958 && !TREE_NOTHROW (fndecl))
8959 return true;
8960 if (flag_delete_null_pointer_checks
8961 && lookup_attribute ("returns_nonnull",
8962 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8963 return true;
8964 return alloca_call_p (t);
8965 }
8966
8967 default:
8968 break;
8969 }
8970 return false;
8971 }
8972
8973 /* Return true when T is an address and is known to be nonzero.
8974 Handle warnings about undefined signed overflow. */
8975
8976 bool
8977 tree_expr_nonzero_p (tree t)
8978 {
8979 bool ret, strict_overflow_p;
8980
8981 strict_overflow_p = false;
8982 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
8983 if (strict_overflow_p)
8984 fold_overflow_warning (("assuming signed overflow does not occur when "
8985 "determining that expression is always "
8986 "non-zero"),
8987 WARN_STRICT_OVERFLOW_MISC);
8988 return ret;
8989 }
8990
8991 /* Return true if T is known not to be equal to an integer W. */
8992
8993 bool
8994 expr_not_equal_to (tree t, const wide_int &w)
8995 {
8996 wide_int min, max, nz;
8997 value_range_type rtype;
8998 switch (TREE_CODE (t))
8999 {
9000 case INTEGER_CST:
9001 return wi::to_wide (t) != w;
9002
9003 case SSA_NAME:
9004 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9005 return false;
9006 rtype = get_range_info (t, &min, &max);
9007 if (rtype == VR_RANGE)
9008 {
9009 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9010 return true;
9011 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9012 return true;
9013 }
9014 else if (rtype == VR_ANTI_RANGE
9015 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9016 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9017 return true;
9018 /* If T has some known zero bits and W has any of those bits set,
9019 then T is known not to be equal to W. */
9020 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9021 TYPE_PRECISION (TREE_TYPE (t))), 0))
9022 return true;
9023 return false;
9024
9025 default:
9026 return false;
9027 }
9028 }
9029
9030 /* Fold a binary expression of code CODE and type TYPE with operands
9031 OP0 and OP1. LOC is the location of the resulting expression.
9032 Return the folded expression if folding is successful. Otherwise,
9033 return NULL_TREE. */
9034
9035 tree
9036 fold_binary_loc (location_t loc,
9037 enum tree_code code, tree type, tree op0, tree op1)
9038 {
9039 enum tree_code_class kind = TREE_CODE_CLASS (code);
9040 tree arg0, arg1, tem;
9041 tree t1 = NULL_TREE;
9042 bool strict_overflow_p;
9043 unsigned int prec;
9044
9045 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9046 && TREE_CODE_LENGTH (code) == 2
9047 && op0 != NULL_TREE
9048 && op1 != NULL_TREE);
9049
9050 arg0 = op0;
9051 arg1 = op1;
9052
9053 /* Strip any conversions that don't change the mode. This is
9054 safe for every expression, except for a comparison expression
9055 because its signedness is derived from its operands. So, in
9056 the latter case, only strip conversions that don't change the
9057 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9058 preserved.
9059
9060 Note that this is done as an internal manipulation within the
9061 constant folder, in order to find the simplest representation
9062 of the arguments so that their form can be studied. In any
9063 cases, the appropriate type conversions should be put back in
9064 the tree that will get out of the constant folder. */
9065
9066 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9067 {
9068 STRIP_SIGN_NOPS (arg0);
9069 STRIP_SIGN_NOPS (arg1);
9070 }
9071 else
9072 {
9073 STRIP_NOPS (arg0);
9074 STRIP_NOPS (arg1);
9075 }
9076
9077 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9078 constant but we can't do arithmetic on them. */
9079 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9080 {
9081 tem = const_binop (code, type, arg0, arg1);
9082 if (tem != NULL_TREE)
9083 {
9084 if (TREE_TYPE (tem) != type)
9085 tem = fold_convert_loc (loc, type, tem);
9086 return tem;
9087 }
9088 }
9089
9090 /* If this is a commutative operation, and ARG0 is a constant, move it
9091 to ARG1 to reduce the number of tests below. */
9092 if (commutative_tree_code (code)
9093 && tree_swap_operands_p (arg0, arg1))
9094 return fold_build2_loc (loc, code, type, op1, op0);
9095
9096 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9097 to ARG1 to reduce the number of tests below. */
9098 if (kind == tcc_comparison
9099 && tree_swap_operands_p (arg0, arg1))
9100 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9101
9102 tem = generic_simplify (loc, code, type, op0, op1);
9103 if (tem)
9104 return tem;
9105
9106 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9107
9108 First check for cases where an arithmetic operation is applied to a
9109 compound, conditional, or comparison operation. Push the arithmetic
9110 operation inside the compound or conditional to see if any folding
9111 can then be done. Convert comparison to conditional for this purpose.
9112 The also optimizes non-constant cases that used to be done in
9113 expand_expr.
9114
9115 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9116 one of the operands is a comparison and the other is a comparison, a
9117 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9118 code below would make the expression more complex. Change it to a
9119 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9120 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9121
9122 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9123 || code == EQ_EXPR || code == NE_EXPR)
9124 && TREE_CODE (type) != VECTOR_TYPE
9125 && ((truth_value_p (TREE_CODE (arg0))
9126 && (truth_value_p (TREE_CODE (arg1))
9127 || (TREE_CODE (arg1) == BIT_AND_EXPR
9128 && integer_onep (TREE_OPERAND (arg1, 1)))))
9129 || (truth_value_p (TREE_CODE (arg1))
9130 && (truth_value_p (TREE_CODE (arg0))
9131 || (TREE_CODE (arg0) == BIT_AND_EXPR
9132 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9133 {
9134 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9135 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9136 : TRUTH_XOR_EXPR,
9137 boolean_type_node,
9138 fold_convert_loc (loc, boolean_type_node, arg0),
9139 fold_convert_loc (loc, boolean_type_node, arg1));
9140
9141 if (code == EQ_EXPR)
9142 tem = invert_truthvalue_loc (loc, tem);
9143
9144 return fold_convert_loc (loc, type, tem);
9145 }
9146
9147 if (TREE_CODE_CLASS (code) == tcc_binary
9148 || TREE_CODE_CLASS (code) == tcc_comparison)
9149 {
9150 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9151 {
9152 tem = fold_build2_loc (loc, code, type,
9153 fold_convert_loc (loc, TREE_TYPE (op0),
9154 TREE_OPERAND (arg0, 1)), op1);
9155 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9156 tem);
9157 }
9158 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9159 {
9160 tem = fold_build2_loc (loc, code, type, op0,
9161 fold_convert_loc (loc, TREE_TYPE (op1),
9162 TREE_OPERAND (arg1, 1)));
9163 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9164 tem);
9165 }
9166
9167 if (TREE_CODE (arg0) == COND_EXPR
9168 || TREE_CODE (arg0) == VEC_COND_EXPR
9169 || COMPARISON_CLASS_P (arg0))
9170 {
9171 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9172 arg0, arg1,
9173 /*cond_first_p=*/1);
9174 if (tem != NULL_TREE)
9175 return tem;
9176 }
9177
9178 if (TREE_CODE (arg1) == COND_EXPR
9179 || TREE_CODE (arg1) == VEC_COND_EXPR
9180 || COMPARISON_CLASS_P (arg1))
9181 {
9182 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9183 arg1, arg0,
9184 /*cond_first_p=*/0);
9185 if (tem != NULL_TREE)
9186 return tem;
9187 }
9188 }
9189
9190 switch (code)
9191 {
9192 case MEM_REF:
9193 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9194 if (TREE_CODE (arg0) == ADDR_EXPR
9195 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9196 {
9197 tree iref = TREE_OPERAND (arg0, 0);
9198 return fold_build2 (MEM_REF, type,
9199 TREE_OPERAND (iref, 0),
9200 int_const_binop (PLUS_EXPR, arg1,
9201 TREE_OPERAND (iref, 1)));
9202 }
9203
9204 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9205 if (TREE_CODE (arg0) == ADDR_EXPR
9206 && handled_component_p (TREE_OPERAND (arg0, 0)))
9207 {
9208 tree base;
9209 HOST_WIDE_INT coffset;
9210 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9211 &coffset);
9212 if (!base)
9213 return NULL_TREE;
9214 return fold_build2 (MEM_REF, type,
9215 build_fold_addr_expr (base),
9216 int_const_binop (PLUS_EXPR, arg1,
9217 size_int (coffset)));
9218 }
9219
9220 return NULL_TREE;
9221
9222 case POINTER_PLUS_EXPR:
9223 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9224 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9225 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9226 return fold_convert_loc (loc, type,
9227 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9228 fold_convert_loc (loc, sizetype,
9229 arg1),
9230 fold_convert_loc (loc, sizetype,
9231 arg0)));
9232
9233 return NULL_TREE;
9234
9235 case PLUS_EXPR:
9236 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9237 {
9238 /* X + (X / CST) * -CST is X % CST. */
9239 if (TREE_CODE (arg1) == MULT_EXPR
9240 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9241 && operand_equal_p (arg0,
9242 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9243 {
9244 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9245 tree cst1 = TREE_OPERAND (arg1, 1);
9246 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9247 cst1, cst0);
9248 if (sum && integer_zerop (sum))
9249 return fold_convert_loc (loc, type,
9250 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9251 TREE_TYPE (arg0), arg0,
9252 cst0));
9253 }
9254 }
9255
9256 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9257 one. Make sure the type is not saturating and has the signedness of
9258 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9259 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9260 if ((TREE_CODE (arg0) == MULT_EXPR
9261 || TREE_CODE (arg1) == MULT_EXPR)
9262 && !TYPE_SATURATING (type)
9263 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9264 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9265 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9266 {
9267 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9268 if (tem)
9269 return tem;
9270 }
9271
9272 if (! FLOAT_TYPE_P (type))
9273 {
9274 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9275 (plus (plus (mult) (mult)) (foo)) so that we can
9276 take advantage of the factoring cases below. */
9277 if (ANY_INTEGRAL_TYPE_P (type)
9278 && TYPE_OVERFLOW_WRAPS (type)
9279 && (((TREE_CODE (arg0) == PLUS_EXPR
9280 || TREE_CODE (arg0) == MINUS_EXPR)
9281 && TREE_CODE (arg1) == MULT_EXPR)
9282 || ((TREE_CODE (arg1) == PLUS_EXPR
9283 || TREE_CODE (arg1) == MINUS_EXPR)
9284 && TREE_CODE (arg0) == MULT_EXPR)))
9285 {
9286 tree parg0, parg1, parg, marg;
9287 enum tree_code pcode;
9288
9289 if (TREE_CODE (arg1) == MULT_EXPR)
9290 parg = arg0, marg = arg1;
9291 else
9292 parg = arg1, marg = arg0;
9293 pcode = TREE_CODE (parg);
9294 parg0 = TREE_OPERAND (parg, 0);
9295 parg1 = TREE_OPERAND (parg, 1);
9296 STRIP_NOPS (parg0);
9297 STRIP_NOPS (parg1);
9298
9299 if (TREE_CODE (parg0) == MULT_EXPR
9300 && TREE_CODE (parg1) != MULT_EXPR)
9301 return fold_build2_loc (loc, pcode, type,
9302 fold_build2_loc (loc, PLUS_EXPR, type,
9303 fold_convert_loc (loc, type,
9304 parg0),
9305 fold_convert_loc (loc, type,
9306 marg)),
9307 fold_convert_loc (loc, type, parg1));
9308 if (TREE_CODE (parg0) != MULT_EXPR
9309 && TREE_CODE (parg1) == MULT_EXPR)
9310 return
9311 fold_build2_loc (loc, PLUS_EXPR, type,
9312 fold_convert_loc (loc, type, parg0),
9313 fold_build2_loc (loc, pcode, type,
9314 fold_convert_loc (loc, type, marg),
9315 fold_convert_loc (loc, type,
9316 parg1)));
9317 }
9318 }
9319 else
9320 {
9321 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9322 to __complex__ ( x, y ). This is not the same for SNaNs or
9323 if signed zeros are involved. */
9324 if (!HONOR_SNANS (element_mode (arg0))
9325 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9326 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9327 {
9328 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9329 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9330 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9331 bool arg0rz = false, arg0iz = false;
9332 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9333 || (arg0i && (arg0iz = real_zerop (arg0i))))
9334 {
9335 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9336 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9337 if (arg0rz && arg1i && real_zerop (arg1i))
9338 {
9339 tree rp = arg1r ? arg1r
9340 : build1 (REALPART_EXPR, rtype, arg1);
9341 tree ip = arg0i ? arg0i
9342 : build1 (IMAGPART_EXPR, rtype, arg0);
9343 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9344 }
9345 else if (arg0iz && arg1r && real_zerop (arg1r))
9346 {
9347 tree rp = arg0r ? arg0r
9348 : build1 (REALPART_EXPR, rtype, arg0);
9349 tree ip = arg1i ? arg1i
9350 : build1 (IMAGPART_EXPR, rtype, arg1);
9351 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9352 }
9353 }
9354 }
9355
9356 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9357 We associate floats only if the user has specified
9358 -fassociative-math. */
9359 if (flag_associative_math
9360 && TREE_CODE (arg1) == PLUS_EXPR
9361 && TREE_CODE (arg0) != MULT_EXPR)
9362 {
9363 tree tree10 = TREE_OPERAND (arg1, 0);
9364 tree tree11 = TREE_OPERAND (arg1, 1);
9365 if (TREE_CODE (tree11) == MULT_EXPR
9366 && TREE_CODE (tree10) == MULT_EXPR)
9367 {
9368 tree tree0;
9369 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9370 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9371 }
9372 }
9373 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9374 We associate floats only if the user has specified
9375 -fassociative-math. */
9376 if (flag_associative_math
9377 && TREE_CODE (arg0) == PLUS_EXPR
9378 && TREE_CODE (arg1) != MULT_EXPR)
9379 {
9380 tree tree00 = TREE_OPERAND (arg0, 0);
9381 tree tree01 = TREE_OPERAND (arg0, 1);
9382 if (TREE_CODE (tree01) == MULT_EXPR
9383 && TREE_CODE (tree00) == MULT_EXPR)
9384 {
9385 tree tree0;
9386 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9387 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9388 }
9389 }
9390 }
9391
9392 bit_rotate:
9393 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9394 is a rotate of A by C1 bits. */
9395 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9396 is a rotate of A by B bits.
9397 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9398 though in this case CODE must be | and not + or ^, otherwise
9399 it doesn't return A when B is 0. */
9400 {
9401 enum tree_code code0, code1;
9402 tree rtype;
9403 code0 = TREE_CODE (arg0);
9404 code1 = TREE_CODE (arg1);
9405 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9406 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9407 && operand_equal_p (TREE_OPERAND (arg0, 0),
9408 TREE_OPERAND (arg1, 0), 0)
9409 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9410 TYPE_UNSIGNED (rtype))
9411 /* Only create rotates in complete modes. Other cases are not
9412 expanded properly. */
9413 && (element_precision (rtype)
9414 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9415 {
9416 tree tree01, tree11;
9417 tree orig_tree01, orig_tree11;
9418 enum tree_code code01, code11;
9419
9420 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9421 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9422 STRIP_NOPS (tree01);
9423 STRIP_NOPS (tree11);
9424 code01 = TREE_CODE (tree01);
9425 code11 = TREE_CODE (tree11);
9426 if (code11 != MINUS_EXPR
9427 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9428 {
9429 std::swap (code0, code1);
9430 std::swap (code01, code11);
9431 std::swap (tree01, tree11);
9432 std::swap (orig_tree01, orig_tree11);
9433 }
9434 if (code01 == INTEGER_CST
9435 && code11 == INTEGER_CST
9436 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9437 == element_precision (rtype)))
9438 {
9439 tem = build2_loc (loc, LROTATE_EXPR,
9440 rtype, TREE_OPERAND (arg0, 0),
9441 code0 == LSHIFT_EXPR
9442 ? orig_tree01 : orig_tree11);
9443 return fold_convert_loc (loc, type, tem);
9444 }
9445 else if (code11 == MINUS_EXPR)
9446 {
9447 tree tree110, tree111;
9448 tree110 = TREE_OPERAND (tree11, 0);
9449 tree111 = TREE_OPERAND (tree11, 1);
9450 STRIP_NOPS (tree110);
9451 STRIP_NOPS (tree111);
9452 if (TREE_CODE (tree110) == INTEGER_CST
9453 && 0 == compare_tree_int (tree110,
9454 element_precision (rtype))
9455 && operand_equal_p (tree01, tree111, 0))
9456 {
9457 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9458 ? LROTATE_EXPR : RROTATE_EXPR),
9459 rtype, TREE_OPERAND (arg0, 0),
9460 orig_tree01);
9461 return fold_convert_loc (loc, type, tem);
9462 }
9463 }
9464 else if (code == BIT_IOR_EXPR
9465 && code11 == BIT_AND_EXPR
9466 && pow2p_hwi (element_precision (rtype)))
9467 {
9468 tree tree110, tree111;
9469 tree110 = TREE_OPERAND (tree11, 0);
9470 tree111 = TREE_OPERAND (tree11, 1);
9471 STRIP_NOPS (tree110);
9472 STRIP_NOPS (tree111);
9473 if (TREE_CODE (tree110) == NEGATE_EXPR
9474 && TREE_CODE (tree111) == INTEGER_CST
9475 && 0 == compare_tree_int (tree111,
9476 element_precision (rtype) - 1)
9477 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9478 {
9479 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9480 ? LROTATE_EXPR : RROTATE_EXPR),
9481 rtype, TREE_OPERAND (arg0, 0),
9482 orig_tree01);
9483 return fold_convert_loc (loc, type, tem);
9484 }
9485 }
9486 }
9487 }
9488
9489 associate:
9490 /* In most languages, can't associate operations on floats through
9491 parentheses. Rather than remember where the parentheses were, we
9492 don't associate floats at all, unless the user has specified
9493 -fassociative-math.
9494 And, we need to make sure type is not saturating. */
9495
9496 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9497 && !TYPE_SATURATING (type))
9498 {
9499 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9500 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9501 tree atype = type;
9502 bool ok = true;
9503
9504 /* Split both trees into variables, constants, and literals. Then
9505 associate each group together, the constants with literals,
9506 then the result with variables. This increases the chances of
9507 literals being recombined later and of generating relocatable
9508 expressions for the sum of a constant and literal. */
9509 var0 = split_tree (arg0, type, code,
9510 &minus_var0, &con0, &minus_con0,
9511 &lit0, &minus_lit0, 0);
9512 var1 = split_tree (arg1, type, code,
9513 &minus_var1, &con1, &minus_con1,
9514 &lit1, &minus_lit1, code == MINUS_EXPR);
9515
9516 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9517 if (code == MINUS_EXPR)
9518 code = PLUS_EXPR;
9519
9520 /* With undefined overflow prefer doing association in a type
9521 which wraps on overflow, if that is one of the operand types. */
9522 if (POINTER_TYPE_P (type)
9523 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9524 {
9525 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9526 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9527 atype = TREE_TYPE (arg0);
9528 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9529 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9530 atype = TREE_TYPE (arg1);
9531 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9532 }
9533
9534 /* With undefined overflow we can only associate constants with one
9535 variable, and constants whose association doesn't overflow. */
9536 if (POINTER_TYPE_P (atype)
9537 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9538 {
9539 if ((var0 && var1) || (minus_var0 && minus_var1))
9540 {
9541 /* ??? If split_tree would handle NEGATE_EXPR we could
9542 simply reject these cases and the allowed cases would
9543 be the var0/minus_var1 ones. */
9544 tree tmp0 = var0 ? var0 : minus_var0;
9545 tree tmp1 = var1 ? var1 : minus_var1;
9546 bool one_neg = false;
9547
9548 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9549 {
9550 tmp0 = TREE_OPERAND (tmp0, 0);
9551 one_neg = !one_neg;
9552 }
9553 if (CONVERT_EXPR_P (tmp0)
9554 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9555 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9556 <= TYPE_PRECISION (atype)))
9557 tmp0 = TREE_OPERAND (tmp0, 0);
9558 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9559 {
9560 tmp1 = TREE_OPERAND (tmp1, 0);
9561 one_neg = !one_neg;
9562 }
9563 if (CONVERT_EXPR_P (tmp1)
9564 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9565 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9566 <= TYPE_PRECISION (atype)))
9567 tmp1 = TREE_OPERAND (tmp1, 0);
9568 /* The only case we can still associate with two variables
9569 is if they cancel out. */
9570 if (!one_neg
9571 || !operand_equal_p (tmp0, tmp1, 0))
9572 ok = false;
9573 }
9574 else if ((var0 && minus_var1
9575 && ! operand_equal_p (var0, minus_var1, 0))
9576 || (minus_var0 && var1
9577 && ! operand_equal_p (minus_var0, var1, 0)))
9578 ok = false;
9579 }
9580
9581 /* Only do something if we found more than two objects. Otherwise,
9582 nothing has changed and we risk infinite recursion. */
9583 if (ok
9584 && (2 < ((var0 != 0) + (var1 != 0)
9585 + (minus_var0 != 0) + (minus_var1 != 0)
9586 + (con0 != 0) + (con1 != 0)
9587 + (minus_con0 != 0) + (minus_con1 != 0)
9588 + (lit0 != 0) + (lit1 != 0)
9589 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9590 {
9591 var0 = associate_trees (loc, var0, var1, code, atype);
9592 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9593 code, atype);
9594 con0 = associate_trees (loc, con0, con1, code, atype);
9595 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9596 code, atype);
9597 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9598 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9599 code, atype);
9600
9601 if (minus_var0 && var0)
9602 {
9603 var0 = associate_trees (loc, var0, minus_var0,
9604 MINUS_EXPR, atype);
9605 minus_var0 = 0;
9606 }
9607 if (minus_con0 && con0)
9608 {
9609 con0 = associate_trees (loc, con0, minus_con0,
9610 MINUS_EXPR, atype);
9611 minus_con0 = 0;
9612 }
9613
9614 /* Preserve the MINUS_EXPR if the negative part of the literal is
9615 greater than the positive part. Otherwise, the multiplicative
9616 folding code (i.e extract_muldiv) may be fooled in case
9617 unsigned constants are subtracted, like in the following
9618 example: ((X*2 + 4) - 8U)/2. */
9619 if (minus_lit0 && lit0)
9620 {
9621 if (TREE_CODE (lit0) == INTEGER_CST
9622 && TREE_CODE (minus_lit0) == INTEGER_CST
9623 && tree_int_cst_lt (lit0, minus_lit0)
9624 /* But avoid ending up with only negated parts. */
9625 && (var0 || con0))
9626 {
9627 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9628 MINUS_EXPR, atype);
9629 lit0 = 0;
9630 }
9631 else
9632 {
9633 lit0 = associate_trees (loc, lit0, minus_lit0,
9634 MINUS_EXPR, atype);
9635 minus_lit0 = 0;
9636 }
9637 }
9638
9639 /* Don't introduce overflows through reassociation. */
9640 if ((lit0 && TREE_OVERFLOW_P (lit0))
9641 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9642 return NULL_TREE;
9643
9644 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9645 con0 = associate_trees (loc, con0, lit0, code, atype);
9646 lit0 = 0;
9647 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9648 code, atype);
9649 minus_lit0 = 0;
9650
9651 /* Eliminate minus_con0. */
9652 if (minus_con0)
9653 {
9654 if (con0)
9655 con0 = associate_trees (loc, con0, minus_con0,
9656 MINUS_EXPR, atype);
9657 else if (var0)
9658 var0 = associate_trees (loc, var0, minus_con0,
9659 MINUS_EXPR, atype);
9660 else
9661 gcc_unreachable ();
9662 minus_con0 = 0;
9663 }
9664
9665 /* Eliminate minus_var0. */
9666 if (minus_var0)
9667 {
9668 if (con0)
9669 con0 = associate_trees (loc, con0, minus_var0,
9670 MINUS_EXPR, atype);
9671 else
9672 gcc_unreachable ();
9673 minus_var0 = 0;
9674 }
9675
9676 return
9677 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9678 code, atype));
9679 }
9680 }
9681
9682 return NULL_TREE;
9683
9684 case POINTER_DIFF_EXPR:
9685 case MINUS_EXPR:
9686 /* Fold &a[i] - &a[j] to i-j. */
9687 if (TREE_CODE (arg0) == ADDR_EXPR
9688 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9689 && TREE_CODE (arg1) == ADDR_EXPR
9690 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9691 {
9692 tree tem = fold_addr_of_array_ref_difference (loc, type,
9693 TREE_OPERAND (arg0, 0),
9694 TREE_OPERAND (arg1, 0),
9695 code
9696 == POINTER_DIFF_EXPR);
9697 if (tem)
9698 return tem;
9699 }
9700
9701 /* Further transformations are not for pointers. */
9702 if (code == POINTER_DIFF_EXPR)
9703 return NULL_TREE;
9704
9705 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9706 if (TREE_CODE (arg0) == NEGATE_EXPR
9707 && negate_expr_p (op1))
9708 return fold_build2_loc (loc, MINUS_EXPR, type,
9709 negate_expr (op1),
9710 fold_convert_loc (loc, type,
9711 TREE_OPERAND (arg0, 0)));
9712
9713 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9714 __complex__ ( x, -y ). This is not the same for SNaNs or if
9715 signed zeros are involved. */
9716 if (!HONOR_SNANS (element_mode (arg0))
9717 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9718 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9719 {
9720 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9721 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9722 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9723 bool arg0rz = false, arg0iz = false;
9724 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9725 || (arg0i && (arg0iz = real_zerop (arg0i))))
9726 {
9727 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9728 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9729 if (arg0rz && arg1i && real_zerop (arg1i))
9730 {
9731 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9732 arg1r ? arg1r
9733 : build1 (REALPART_EXPR, rtype, arg1));
9734 tree ip = arg0i ? arg0i
9735 : build1 (IMAGPART_EXPR, rtype, arg0);
9736 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9737 }
9738 else if (arg0iz && arg1r && real_zerop (arg1r))
9739 {
9740 tree rp = arg0r ? arg0r
9741 : build1 (REALPART_EXPR, rtype, arg0);
9742 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9743 arg1i ? arg1i
9744 : build1 (IMAGPART_EXPR, rtype, arg1));
9745 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9746 }
9747 }
9748 }
9749
9750 /* A - B -> A + (-B) if B is easily negatable. */
9751 if (negate_expr_p (op1)
9752 && ! TYPE_OVERFLOW_SANITIZED (type)
9753 && ((FLOAT_TYPE_P (type)
9754 /* Avoid this transformation if B is a positive REAL_CST. */
9755 && (TREE_CODE (op1) != REAL_CST
9756 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9757 || INTEGRAL_TYPE_P (type)))
9758 return fold_build2_loc (loc, PLUS_EXPR, type,
9759 fold_convert_loc (loc, type, arg0),
9760 negate_expr (op1));
9761
9762 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9763 one. Make sure the type is not saturating and has the signedness of
9764 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9765 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9766 if ((TREE_CODE (arg0) == MULT_EXPR
9767 || TREE_CODE (arg1) == MULT_EXPR)
9768 && !TYPE_SATURATING (type)
9769 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9770 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9771 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9772 {
9773 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9774 if (tem)
9775 return tem;
9776 }
9777
9778 goto associate;
9779
9780 case MULT_EXPR:
9781 if (! FLOAT_TYPE_P (type))
9782 {
9783 /* Transform x * -C into -x * C if x is easily negatable. */
9784 if (TREE_CODE (op1) == INTEGER_CST
9785 && tree_int_cst_sgn (op1) == -1
9786 && negate_expr_p (op0)
9787 && negate_expr_p (op1)
9788 && (tem = negate_expr (op1)) != op1
9789 && ! TREE_OVERFLOW (tem))
9790 return fold_build2_loc (loc, MULT_EXPR, type,
9791 fold_convert_loc (loc, type,
9792 negate_expr (op0)), tem);
9793
9794 strict_overflow_p = false;
9795 if (TREE_CODE (arg1) == INTEGER_CST
9796 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9797 &strict_overflow_p)))
9798 {
9799 if (strict_overflow_p)
9800 fold_overflow_warning (("assuming signed overflow does not "
9801 "occur when simplifying "
9802 "multiplication"),
9803 WARN_STRICT_OVERFLOW_MISC);
9804 return fold_convert_loc (loc, type, tem);
9805 }
9806
9807 /* Optimize z * conj(z) for integer complex numbers. */
9808 if (TREE_CODE (arg0) == CONJ_EXPR
9809 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9810 return fold_mult_zconjz (loc, type, arg1);
9811 if (TREE_CODE (arg1) == CONJ_EXPR
9812 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9813 return fold_mult_zconjz (loc, type, arg0);
9814 }
9815 else
9816 {
9817 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9818 This is not the same for NaNs or if signed zeros are
9819 involved. */
9820 if (!HONOR_NANS (arg0)
9821 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9822 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9823 && TREE_CODE (arg1) == COMPLEX_CST
9824 && real_zerop (TREE_REALPART (arg1)))
9825 {
9826 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9827 if (real_onep (TREE_IMAGPART (arg1)))
9828 return
9829 fold_build2_loc (loc, COMPLEX_EXPR, type,
9830 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9831 rtype, arg0)),
9832 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9833 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9834 return
9835 fold_build2_loc (loc, COMPLEX_EXPR, type,
9836 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9837 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9838 rtype, arg0)));
9839 }
9840
9841 /* Optimize z * conj(z) for floating point complex numbers.
9842 Guarded by flag_unsafe_math_optimizations as non-finite
9843 imaginary components don't produce scalar results. */
9844 if (flag_unsafe_math_optimizations
9845 && TREE_CODE (arg0) == CONJ_EXPR
9846 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9847 return fold_mult_zconjz (loc, type, arg1);
9848 if (flag_unsafe_math_optimizations
9849 && TREE_CODE (arg1) == CONJ_EXPR
9850 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9851 return fold_mult_zconjz (loc, type, arg0);
9852 }
9853 goto associate;
9854
9855 case BIT_IOR_EXPR:
9856 /* Canonicalize (X & C1) | C2. */
9857 if (TREE_CODE (arg0) == BIT_AND_EXPR
9858 && TREE_CODE (arg1) == INTEGER_CST
9859 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9860 {
9861 int width = TYPE_PRECISION (type), w;
9862 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
9863 wide_int c2 = wi::to_wide (arg1);
9864
9865 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9866 if ((c1 & c2) == c1)
9867 return omit_one_operand_loc (loc, type, arg1,
9868 TREE_OPERAND (arg0, 0));
9869
9870 wide_int msk = wi::mask (width, false,
9871 TYPE_PRECISION (TREE_TYPE (arg1)));
9872
9873 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9874 if (wi::bit_and_not (msk, c1 | c2) == 0)
9875 {
9876 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9877 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9878 }
9879
9880 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9881 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9882 mode which allows further optimizations. */
9883 c1 &= msk;
9884 c2 &= msk;
9885 wide_int c3 = wi::bit_and_not (c1, c2);
9886 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9887 {
9888 wide_int mask = wi::mask (w, false,
9889 TYPE_PRECISION (type));
9890 if (((c1 | c2) & mask) == mask
9891 && wi::bit_and_not (c1, mask) == 0)
9892 {
9893 c3 = mask;
9894 break;
9895 }
9896 }
9897
9898 if (c3 != c1)
9899 {
9900 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9901 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9902 wide_int_to_tree (type, c3));
9903 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9904 }
9905 }
9906
9907 /* See if this can be simplified into a rotate first. If that
9908 is unsuccessful continue in the association code. */
9909 goto bit_rotate;
9910
9911 case BIT_XOR_EXPR:
9912 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9913 if (TREE_CODE (arg0) == BIT_AND_EXPR
9914 && INTEGRAL_TYPE_P (type)
9915 && integer_onep (TREE_OPERAND (arg0, 1))
9916 && integer_onep (arg1))
9917 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9918 build_zero_cst (TREE_TYPE (arg0)));
9919
9920 /* See if this can be simplified into a rotate first. If that
9921 is unsuccessful continue in the association code. */
9922 goto bit_rotate;
9923
9924 case BIT_AND_EXPR:
9925 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9926 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9927 && INTEGRAL_TYPE_P (type)
9928 && integer_onep (TREE_OPERAND (arg0, 1))
9929 && integer_onep (arg1))
9930 {
9931 tree tem2;
9932 tem = TREE_OPERAND (arg0, 0);
9933 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9934 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9935 tem, tem2);
9936 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9937 build_zero_cst (TREE_TYPE (tem)));
9938 }
9939 /* Fold ~X & 1 as (X & 1) == 0. */
9940 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9941 && INTEGRAL_TYPE_P (type)
9942 && integer_onep (arg1))
9943 {
9944 tree tem2;
9945 tem = TREE_OPERAND (arg0, 0);
9946 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9947 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9948 tem, tem2);
9949 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9950 build_zero_cst (TREE_TYPE (tem)));
9951 }
9952 /* Fold !X & 1 as X == 0. */
9953 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9954 && integer_onep (arg1))
9955 {
9956 tem = TREE_OPERAND (arg0, 0);
9957 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9958 build_zero_cst (TREE_TYPE (tem)));
9959 }
9960
9961 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9962 multiple of 1 << CST. */
9963 if (TREE_CODE (arg1) == INTEGER_CST)
9964 {
9965 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
9966 wide_int ncst1 = -cst1;
9967 if ((cst1 & ncst1) == ncst1
9968 && multiple_of_p (type, arg0,
9969 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
9970 return fold_convert_loc (loc, type, arg0);
9971 }
9972
9973 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
9974 bits from CST2. */
9975 if (TREE_CODE (arg1) == INTEGER_CST
9976 && TREE_CODE (arg0) == MULT_EXPR
9977 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9978 {
9979 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
9980 wide_int masked
9981 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
9982
9983 if (masked == 0)
9984 return omit_two_operands_loc (loc, type, build_zero_cst (type),
9985 arg0, arg1);
9986 else if (masked != warg1)
9987 {
9988 /* Avoid the transform if arg1 is a mask of some
9989 mode which allows further optimizations. */
9990 int pop = wi::popcount (warg1);
9991 if (!(pop >= BITS_PER_UNIT
9992 && pow2p_hwi (pop)
9993 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
9994 return fold_build2_loc (loc, code, type, op0,
9995 wide_int_to_tree (type, masked));
9996 }
9997 }
9998
9999 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10000 ((A & N) + B) & M -> (A + B) & M
10001 Similarly if (N & M) == 0,
10002 ((A | N) + B) & M -> (A + B) & M
10003 and for - instead of + (or unary - instead of +)
10004 and/or ^ instead of |.
10005 If B is constant and (B & M) == 0, fold into A & M. */
10006 if (TREE_CODE (arg1) == INTEGER_CST)
10007 {
10008 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10009 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10010 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10011 && (TREE_CODE (arg0) == PLUS_EXPR
10012 || TREE_CODE (arg0) == MINUS_EXPR
10013 || TREE_CODE (arg0) == NEGATE_EXPR)
10014 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10015 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10016 {
10017 tree pmop[2];
10018 int which = 0;
10019 wide_int cst0;
10020
10021 /* Now we know that arg0 is (C + D) or (C - D) or
10022 -C and arg1 (M) is == (1LL << cst) - 1.
10023 Store C into PMOP[0] and D into PMOP[1]. */
10024 pmop[0] = TREE_OPERAND (arg0, 0);
10025 pmop[1] = NULL;
10026 if (TREE_CODE (arg0) != NEGATE_EXPR)
10027 {
10028 pmop[1] = TREE_OPERAND (arg0, 1);
10029 which = 1;
10030 }
10031
10032 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10033 which = -1;
10034
10035 for (; which >= 0; which--)
10036 switch (TREE_CODE (pmop[which]))
10037 {
10038 case BIT_AND_EXPR:
10039 case BIT_IOR_EXPR:
10040 case BIT_XOR_EXPR:
10041 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10042 != INTEGER_CST)
10043 break;
10044 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10045 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10046 {
10047 if (cst0 != cst1)
10048 break;
10049 }
10050 else if (cst0 != 0)
10051 break;
10052 /* If C or D is of the form (A & N) where
10053 (N & M) == M, or of the form (A | N) or
10054 (A ^ N) where (N & M) == 0, replace it with A. */
10055 pmop[which] = TREE_OPERAND (pmop[which], 0);
10056 break;
10057 case INTEGER_CST:
10058 /* If C or D is a N where (N & M) == 0, it can be
10059 omitted (assumed 0). */
10060 if ((TREE_CODE (arg0) == PLUS_EXPR
10061 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10062 && (cst1 & wi::to_wide (pmop[which])) == 0)
10063 pmop[which] = NULL;
10064 break;
10065 default:
10066 break;
10067 }
10068
10069 /* Only build anything new if we optimized one or both arguments
10070 above. */
10071 if (pmop[0] != TREE_OPERAND (arg0, 0)
10072 || (TREE_CODE (arg0) != NEGATE_EXPR
10073 && pmop[1] != TREE_OPERAND (arg0, 1)))
10074 {
10075 tree utype = TREE_TYPE (arg0);
10076 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10077 {
10078 /* Perform the operations in a type that has defined
10079 overflow behavior. */
10080 utype = unsigned_type_for (TREE_TYPE (arg0));
10081 if (pmop[0] != NULL)
10082 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10083 if (pmop[1] != NULL)
10084 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10085 }
10086
10087 if (TREE_CODE (arg0) == NEGATE_EXPR)
10088 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10089 else if (TREE_CODE (arg0) == PLUS_EXPR)
10090 {
10091 if (pmop[0] != NULL && pmop[1] != NULL)
10092 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10093 pmop[0], pmop[1]);
10094 else if (pmop[0] != NULL)
10095 tem = pmop[0];
10096 else if (pmop[1] != NULL)
10097 tem = pmop[1];
10098 else
10099 return build_int_cst (type, 0);
10100 }
10101 else if (pmop[0] == NULL)
10102 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10103 else
10104 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10105 pmop[0], pmop[1]);
10106 /* TEM is now the new binary +, - or unary - replacement. */
10107 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10108 fold_convert_loc (loc, utype, arg1));
10109 return fold_convert_loc (loc, type, tem);
10110 }
10111 }
10112 }
10113
10114 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10115 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10116 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10117 {
10118 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10119
10120 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10121 if (mask == -1)
10122 return
10123 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10124 }
10125
10126 goto associate;
10127
10128 case RDIV_EXPR:
10129 /* Don't touch a floating-point divide by zero unless the mode
10130 of the constant can represent infinity. */
10131 if (TREE_CODE (arg1) == REAL_CST
10132 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10133 && real_zerop (arg1))
10134 return NULL_TREE;
10135
10136 /* (-A) / (-B) -> A / B */
10137 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10138 return fold_build2_loc (loc, RDIV_EXPR, type,
10139 TREE_OPERAND (arg0, 0),
10140 negate_expr (arg1));
10141 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10142 return fold_build2_loc (loc, RDIV_EXPR, type,
10143 negate_expr (arg0),
10144 TREE_OPERAND (arg1, 0));
10145 return NULL_TREE;
10146
10147 case TRUNC_DIV_EXPR:
10148 /* Fall through */
10149
10150 case FLOOR_DIV_EXPR:
10151 /* Simplify A / (B << N) where A and B are positive and B is
10152 a power of 2, to A >> (N + log2(B)). */
10153 strict_overflow_p = false;
10154 if (TREE_CODE (arg1) == LSHIFT_EXPR
10155 && (TYPE_UNSIGNED (type)
10156 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10157 {
10158 tree sval = TREE_OPERAND (arg1, 0);
10159 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10160 {
10161 tree sh_cnt = TREE_OPERAND (arg1, 1);
10162 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10163 wi::exact_log2 (wi::to_wide (sval)));
10164
10165 if (strict_overflow_p)
10166 fold_overflow_warning (("assuming signed overflow does not "
10167 "occur when simplifying A / (B << N)"),
10168 WARN_STRICT_OVERFLOW_MISC);
10169
10170 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10171 sh_cnt, pow2);
10172 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10173 fold_convert_loc (loc, type, arg0), sh_cnt);
10174 }
10175 }
10176
10177 /* Fall through */
10178
10179 case ROUND_DIV_EXPR:
10180 case CEIL_DIV_EXPR:
10181 case EXACT_DIV_EXPR:
10182 if (integer_zerop (arg1))
10183 return NULL_TREE;
10184
10185 /* Convert -A / -B to A / B when the type is signed and overflow is
10186 undefined. */
10187 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10188 && TREE_CODE (op0) == NEGATE_EXPR
10189 && negate_expr_p (op1))
10190 {
10191 if (INTEGRAL_TYPE_P (type))
10192 fold_overflow_warning (("assuming signed overflow does not occur "
10193 "when distributing negation across "
10194 "division"),
10195 WARN_STRICT_OVERFLOW_MISC);
10196 return fold_build2_loc (loc, code, type,
10197 fold_convert_loc (loc, type,
10198 TREE_OPERAND (arg0, 0)),
10199 negate_expr (op1));
10200 }
10201 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10202 && TREE_CODE (arg1) == NEGATE_EXPR
10203 && negate_expr_p (op0))
10204 {
10205 if (INTEGRAL_TYPE_P (type))
10206 fold_overflow_warning (("assuming signed overflow does not occur "
10207 "when distributing negation across "
10208 "division"),
10209 WARN_STRICT_OVERFLOW_MISC);
10210 return fold_build2_loc (loc, code, type,
10211 negate_expr (op0),
10212 fold_convert_loc (loc, type,
10213 TREE_OPERAND (arg1, 0)));
10214 }
10215
10216 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10217 operation, EXACT_DIV_EXPR.
10218
10219 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10220 At one time others generated faster code, it's not clear if they do
10221 after the last round to changes to the DIV code in expmed.c. */
10222 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10223 && multiple_of_p (type, arg0, arg1))
10224 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10225 fold_convert (type, arg0),
10226 fold_convert (type, arg1));
10227
10228 strict_overflow_p = false;
10229 if (TREE_CODE (arg1) == INTEGER_CST
10230 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10231 &strict_overflow_p)))
10232 {
10233 if (strict_overflow_p)
10234 fold_overflow_warning (("assuming signed overflow does not occur "
10235 "when simplifying division"),
10236 WARN_STRICT_OVERFLOW_MISC);
10237 return fold_convert_loc (loc, type, tem);
10238 }
10239
10240 return NULL_TREE;
10241
10242 case CEIL_MOD_EXPR:
10243 case FLOOR_MOD_EXPR:
10244 case ROUND_MOD_EXPR:
10245 case TRUNC_MOD_EXPR:
10246 strict_overflow_p = false;
10247 if (TREE_CODE (arg1) == INTEGER_CST
10248 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10249 &strict_overflow_p)))
10250 {
10251 if (strict_overflow_p)
10252 fold_overflow_warning (("assuming signed overflow does not occur "
10253 "when simplifying modulus"),
10254 WARN_STRICT_OVERFLOW_MISC);
10255 return fold_convert_loc (loc, type, tem);
10256 }
10257
10258 return NULL_TREE;
10259
10260 case LROTATE_EXPR:
10261 case RROTATE_EXPR:
10262 case RSHIFT_EXPR:
10263 case LSHIFT_EXPR:
10264 /* Since negative shift count is not well-defined,
10265 don't try to compute it in the compiler. */
10266 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10267 return NULL_TREE;
10268
10269 prec = element_precision (type);
10270
10271 /* If we have a rotate of a bit operation with the rotate count and
10272 the second operand of the bit operation both constant,
10273 permute the two operations. */
10274 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10275 && (TREE_CODE (arg0) == BIT_AND_EXPR
10276 || TREE_CODE (arg0) == BIT_IOR_EXPR
10277 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10278 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10279 {
10280 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10281 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10282 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10283 fold_build2_loc (loc, code, type,
10284 arg00, arg1),
10285 fold_build2_loc (loc, code, type,
10286 arg01, arg1));
10287 }
10288
10289 /* Two consecutive rotates adding up to the some integer
10290 multiple of the precision of the type can be ignored. */
10291 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10292 && TREE_CODE (arg0) == RROTATE_EXPR
10293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10294 && wi::umod_trunc (wi::to_wide (arg1)
10295 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10296 prec) == 0)
10297 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10298
10299 return NULL_TREE;
10300
10301 case MIN_EXPR:
10302 case MAX_EXPR:
10303 goto associate;
10304
10305 case TRUTH_ANDIF_EXPR:
10306 /* Note that the operands of this must be ints
10307 and their values must be 0 or 1.
10308 ("true" is a fixed value perhaps depending on the language.) */
10309 /* If first arg is constant zero, return it. */
10310 if (integer_zerop (arg0))
10311 return fold_convert_loc (loc, type, arg0);
10312 /* FALLTHRU */
10313 case TRUTH_AND_EXPR:
10314 /* If either arg is constant true, drop it. */
10315 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10316 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10317 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10318 /* Preserve sequence points. */
10319 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10320 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10321 /* If second arg is constant zero, result is zero, but first arg
10322 must be evaluated. */
10323 if (integer_zerop (arg1))
10324 return omit_one_operand_loc (loc, type, arg1, arg0);
10325 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10326 case will be handled here. */
10327 if (integer_zerop (arg0))
10328 return omit_one_operand_loc (loc, type, arg0, arg1);
10329
10330 /* !X && X is always false. */
10331 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10332 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10333 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10334 /* X && !X is always false. */
10335 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10336 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10337 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10338
10339 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10340 means A >= Y && A != MAX, but in this case we know that
10341 A < X <= MAX. */
10342
10343 if (!TREE_SIDE_EFFECTS (arg0)
10344 && !TREE_SIDE_EFFECTS (arg1))
10345 {
10346 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10347 if (tem && !operand_equal_p (tem, arg0, 0))
10348 return fold_build2_loc (loc, code, type, tem, arg1);
10349
10350 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10351 if (tem && !operand_equal_p (tem, arg1, 0))
10352 return fold_build2_loc (loc, code, type, arg0, tem);
10353 }
10354
10355 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10356 != NULL_TREE)
10357 return tem;
10358
10359 return NULL_TREE;
10360
10361 case TRUTH_ORIF_EXPR:
10362 /* Note that the operands of this must be ints
10363 and their values must be 0 or true.
10364 ("true" is a fixed value perhaps depending on the language.) */
10365 /* If first arg is constant true, return it. */
10366 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10367 return fold_convert_loc (loc, type, arg0);
10368 /* FALLTHRU */
10369 case TRUTH_OR_EXPR:
10370 /* If either arg is constant zero, drop it. */
10371 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10372 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10373 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10374 /* Preserve sequence points. */
10375 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10377 /* If second arg is constant true, result is true, but we must
10378 evaluate first arg. */
10379 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10380 return omit_one_operand_loc (loc, type, arg1, arg0);
10381 /* Likewise for first arg, but note this only occurs here for
10382 TRUTH_OR_EXPR. */
10383 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10384 return omit_one_operand_loc (loc, type, arg0, arg1);
10385
10386 /* !X || X is always true. */
10387 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10388 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10389 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10390 /* X || !X is always true. */
10391 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10392 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10393 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10394
10395 /* (X && !Y) || (!X && Y) is X ^ Y */
10396 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10397 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10398 {
10399 tree a0, a1, l0, l1, n0, n1;
10400
10401 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10402 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10403
10404 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10405 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10406
10407 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10408 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10409
10410 if ((operand_equal_p (n0, a0, 0)
10411 && operand_equal_p (n1, a1, 0))
10412 || (operand_equal_p (n0, a1, 0)
10413 && operand_equal_p (n1, a0, 0)))
10414 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10415 }
10416
10417 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10418 != NULL_TREE)
10419 return tem;
10420
10421 return NULL_TREE;
10422
10423 case TRUTH_XOR_EXPR:
10424 /* If the second arg is constant zero, drop it. */
10425 if (integer_zerop (arg1))
10426 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10427 /* If the second arg is constant true, this is a logical inversion. */
10428 if (integer_onep (arg1))
10429 {
10430 tem = invert_truthvalue_loc (loc, arg0);
10431 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10432 }
10433 /* Identical arguments cancel to zero. */
10434 if (operand_equal_p (arg0, arg1, 0))
10435 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10436
10437 /* !X ^ X is always true. */
10438 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10439 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10440 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10441
10442 /* X ^ !X is always true. */
10443 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10444 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10445 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10446
10447 return NULL_TREE;
10448
10449 case EQ_EXPR:
10450 case NE_EXPR:
10451 STRIP_NOPS (arg0);
10452 STRIP_NOPS (arg1);
10453
10454 tem = fold_comparison (loc, code, type, op0, op1);
10455 if (tem != NULL_TREE)
10456 return tem;
10457
10458 /* bool_var != 1 becomes !bool_var. */
10459 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10460 && code == NE_EXPR)
10461 return fold_convert_loc (loc, type,
10462 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10463 TREE_TYPE (arg0), arg0));
10464
10465 /* bool_var == 0 becomes !bool_var. */
10466 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10467 && code == EQ_EXPR)
10468 return fold_convert_loc (loc, type,
10469 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10470 TREE_TYPE (arg0), arg0));
10471
10472 /* !exp != 0 becomes !exp */
10473 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10474 && code == NE_EXPR)
10475 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10476
10477 /* If this is an EQ or NE comparison with zero and ARG0 is
10478 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10479 two operations, but the latter can be done in one less insn
10480 on machines that have only two-operand insns or on which a
10481 constant cannot be the first operand. */
10482 if (TREE_CODE (arg0) == BIT_AND_EXPR
10483 && integer_zerop (arg1))
10484 {
10485 tree arg00 = TREE_OPERAND (arg0, 0);
10486 tree arg01 = TREE_OPERAND (arg0, 1);
10487 if (TREE_CODE (arg00) == LSHIFT_EXPR
10488 && integer_onep (TREE_OPERAND (arg00, 0)))
10489 {
10490 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10491 arg01, TREE_OPERAND (arg00, 1));
10492 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10493 build_int_cst (TREE_TYPE (arg0), 1));
10494 return fold_build2_loc (loc, code, type,
10495 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10496 arg1);
10497 }
10498 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10499 && integer_onep (TREE_OPERAND (arg01, 0)))
10500 {
10501 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10502 arg00, TREE_OPERAND (arg01, 1));
10503 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10504 build_int_cst (TREE_TYPE (arg0), 1));
10505 return fold_build2_loc (loc, code, type,
10506 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10507 arg1);
10508 }
10509 }
10510
10511 /* If this is an NE or EQ comparison of zero against the result of a
10512 signed MOD operation whose second operand is a power of 2, make
10513 the MOD operation unsigned since it is simpler and equivalent. */
10514 if (integer_zerop (arg1)
10515 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10516 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10517 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10518 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10519 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10521 {
10522 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10523 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10524 fold_convert_loc (loc, newtype,
10525 TREE_OPERAND (arg0, 0)),
10526 fold_convert_loc (loc, newtype,
10527 TREE_OPERAND (arg0, 1)));
10528
10529 return fold_build2_loc (loc, code, type, newmod,
10530 fold_convert_loc (loc, newtype, arg1));
10531 }
10532
10533 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10534 C1 is a valid shift constant, and C2 is a power of two, i.e.
10535 a single bit. */
10536 if (TREE_CODE (arg0) == BIT_AND_EXPR
10537 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10538 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10539 == INTEGER_CST
10540 && integer_pow2p (TREE_OPERAND (arg0, 1))
10541 && integer_zerop (arg1))
10542 {
10543 tree itype = TREE_TYPE (arg0);
10544 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10545 prec = TYPE_PRECISION (itype);
10546
10547 /* Check for a valid shift count. */
10548 if (wi::ltu_p (wi::to_wide (arg001), prec))
10549 {
10550 tree arg01 = TREE_OPERAND (arg0, 1);
10551 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10552 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10553 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10554 can be rewritten as (X & (C2 << C1)) != 0. */
10555 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10556 {
10557 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10558 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10559 return fold_build2_loc (loc, code, type, tem,
10560 fold_convert_loc (loc, itype, arg1));
10561 }
10562 /* Otherwise, for signed (arithmetic) shifts,
10563 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10564 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10565 else if (!TYPE_UNSIGNED (itype))
10566 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10567 arg000, build_int_cst (itype, 0));
10568 /* Otherwise, of unsigned (logical) shifts,
10569 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10570 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10571 else
10572 return omit_one_operand_loc (loc, type,
10573 code == EQ_EXPR ? integer_one_node
10574 : integer_zero_node,
10575 arg000);
10576 }
10577 }
10578
10579 /* If this is a comparison of a field, we may be able to simplify it. */
10580 if ((TREE_CODE (arg0) == COMPONENT_REF
10581 || TREE_CODE (arg0) == BIT_FIELD_REF)
10582 /* Handle the constant case even without -O
10583 to make sure the warnings are given. */
10584 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10585 {
10586 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10587 if (t1)
10588 return t1;
10589 }
10590
10591 /* Optimize comparisons of strlen vs zero to a compare of the
10592 first character of the string vs zero. To wit,
10593 strlen(ptr) == 0 => *ptr == 0
10594 strlen(ptr) != 0 => *ptr != 0
10595 Other cases should reduce to one of these two (or a constant)
10596 due to the return value of strlen being unsigned. */
10597 if (TREE_CODE (arg0) == CALL_EXPR
10598 && integer_zerop (arg1))
10599 {
10600 tree fndecl = get_callee_fndecl (arg0);
10601
10602 if (fndecl
10603 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10604 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10605 && call_expr_nargs (arg0) == 1
10606 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10607 {
10608 tree iref = build_fold_indirect_ref_loc (loc,
10609 CALL_EXPR_ARG (arg0, 0));
10610 return fold_build2_loc (loc, code, type, iref,
10611 build_int_cst (TREE_TYPE (iref), 0));
10612 }
10613 }
10614
10615 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10616 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10617 if (TREE_CODE (arg0) == RSHIFT_EXPR
10618 && integer_zerop (arg1)
10619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10620 {
10621 tree arg00 = TREE_OPERAND (arg0, 0);
10622 tree arg01 = TREE_OPERAND (arg0, 1);
10623 tree itype = TREE_TYPE (arg00);
10624 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10625 {
10626 if (TYPE_UNSIGNED (itype))
10627 {
10628 itype = signed_type_for (itype);
10629 arg00 = fold_convert_loc (loc, itype, arg00);
10630 }
10631 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10632 type, arg00, build_zero_cst (itype));
10633 }
10634 }
10635
10636 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10637 (X & C) == 0 when C is a single bit. */
10638 if (TREE_CODE (arg0) == BIT_AND_EXPR
10639 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10640 && integer_zerop (arg1)
10641 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10642 {
10643 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10644 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10645 TREE_OPERAND (arg0, 1));
10646 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10647 type, tem,
10648 fold_convert_loc (loc, TREE_TYPE (arg0),
10649 arg1));
10650 }
10651
10652 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10653 constant C is a power of two, i.e. a single bit. */
10654 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10655 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10656 && integer_zerop (arg1)
10657 && integer_pow2p (TREE_OPERAND (arg0, 1))
10658 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10659 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10660 {
10661 tree arg00 = TREE_OPERAND (arg0, 0);
10662 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10663 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10664 }
10665
10666 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10667 when is C is a power of two, i.e. a single bit. */
10668 if (TREE_CODE (arg0) == BIT_AND_EXPR
10669 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10670 && integer_zerop (arg1)
10671 && integer_pow2p (TREE_OPERAND (arg0, 1))
10672 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10673 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10674 {
10675 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10676 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10677 arg000, TREE_OPERAND (arg0, 1));
10678 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10679 tem, build_int_cst (TREE_TYPE (tem), 0));
10680 }
10681
10682 if (integer_zerop (arg1)
10683 && tree_expr_nonzero_p (arg0))
10684 {
10685 tree res = constant_boolean_node (code==NE_EXPR, type);
10686 return omit_one_operand_loc (loc, type, res, arg0);
10687 }
10688
10689 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10690 if (TREE_CODE (arg0) == BIT_AND_EXPR
10691 && TREE_CODE (arg1) == BIT_AND_EXPR)
10692 {
10693 tree arg00 = TREE_OPERAND (arg0, 0);
10694 tree arg01 = TREE_OPERAND (arg0, 1);
10695 tree arg10 = TREE_OPERAND (arg1, 0);
10696 tree arg11 = TREE_OPERAND (arg1, 1);
10697 tree itype = TREE_TYPE (arg0);
10698
10699 if (operand_equal_p (arg01, arg11, 0))
10700 {
10701 tem = fold_convert_loc (loc, itype, arg10);
10702 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10703 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10704 return fold_build2_loc (loc, code, type, tem,
10705 build_zero_cst (itype));
10706 }
10707 if (operand_equal_p (arg01, arg10, 0))
10708 {
10709 tem = fold_convert_loc (loc, itype, arg11);
10710 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10711 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10712 return fold_build2_loc (loc, code, type, tem,
10713 build_zero_cst (itype));
10714 }
10715 if (operand_equal_p (arg00, arg11, 0))
10716 {
10717 tem = fold_convert_loc (loc, itype, arg10);
10718 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10719 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10720 return fold_build2_loc (loc, code, type, tem,
10721 build_zero_cst (itype));
10722 }
10723 if (operand_equal_p (arg00, arg10, 0))
10724 {
10725 tem = fold_convert_loc (loc, itype, arg11);
10726 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10727 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10728 return fold_build2_loc (loc, code, type, tem,
10729 build_zero_cst (itype));
10730 }
10731 }
10732
10733 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10734 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10735 {
10736 tree arg00 = TREE_OPERAND (arg0, 0);
10737 tree arg01 = TREE_OPERAND (arg0, 1);
10738 tree arg10 = TREE_OPERAND (arg1, 0);
10739 tree arg11 = TREE_OPERAND (arg1, 1);
10740 tree itype = TREE_TYPE (arg0);
10741
10742 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10743 operand_equal_p guarantees no side-effects so we don't need
10744 to use omit_one_operand on Z. */
10745 if (operand_equal_p (arg01, arg11, 0))
10746 return fold_build2_loc (loc, code, type, arg00,
10747 fold_convert_loc (loc, TREE_TYPE (arg00),
10748 arg10));
10749 if (operand_equal_p (arg01, arg10, 0))
10750 return fold_build2_loc (loc, code, type, arg00,
10751 fold_convert_loc (loc, TREE_TYPE (arg00),
10752 arg11));
10753 if (operand_equal_p (arg00, arg11, 0))
10754 return fold_build2_loc (loc, code, type, arg01,
10755 fold_convert_loc (loc, TREE_TYPE (arg01),
10756 arg10));
10757 if (operand_equal_p (arg00, arg10, 0))
10758 return fold_build2_loc (loc, code, type, arg01,
10759 fold_convert_loc (loc, TREE_TYPE (arg01),
10760 arg11));
10761
10762 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10763 if (TREE_CODE (arg01) == INTEGER_CST
10764 && TREE_CODE (arg11) == INTEGER_CST)
10765 {
10766 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10767 fold_convert_loc (loc, itype, arg11));
10768 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10769 return fold_build2_loc (loc, code, type, tem,
10770 fold_convert_loc (loc, itype, arg10));
10771 }
10772 }
10773
10774 /* Attempt to simplify equality/inequality comparisons of complex
10775 values. Only lower the comparison if the result is known or
10776 can be simplified to a single scalar comparison. */
10777 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10778 || TREE_CODE (arg0) == COMPLEX_CST)
10779 && (TREE_CODE (arg1) == COMPLEX_EXPR
10780 || TREE_CODE (arg1) == COMPLEX_CST))
10781 {
10782 tree real0, imag0, real1, imag1;
10783 tree rcond, icond;
10784
10785 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10786 {
10787 real0 = TREE_OPERAND (arg0, 0);
10788 imag0 = TREE_OPERAND (arg0, 1);
10789 }
10790 else
10791 {
10792 real0 = TREE_REALPART (arg0);
10793 imag0 = TREE_IMAGPART (arg0);
10794 }
10795
10796 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10797 {
10798 real1 = TREE_OPERAND (arg1, 0);
10799 imag1 = TREE_OPERAND (arg1, 1);
10800 }
10801 else
10802 {
10803 real1 = TREE_REALPART (arg1);
10804 imag1 = TREE_IMAGPART (arg1);
10805 }
10806
10807 rcond = fold_binary_loc (loc, code, type, real0, real1);
10808 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10809 {
10810 if (integer_zerop (rcond))
10811 {
10812 if (code == EQ_EXPR)
10813 return omit_two_operands_loc (loc, type, boolean_false_node,
10814 imag0, imag1);
10815 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10816 }
10817 else
10818 {
10819 if (code == NE_EXPR)
10820 return omit_two_operands_loc (loc, type, boolean_true_node,
10821 imag0, imag1);
10822 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10823 }
10824 }
10825
10826 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10827 if (icond && TREE_CODE (icond) == INTEGER_CST)
10828 {
10829 if (integer_zerop (icond))
10830 {
10831 if (code == EQ_EXPR)
10832 return omit_two_operands_loc (loc, type, boolean_false_node,
10833 real0, real1);
10834 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10835 }
10836 else
10837 {
10838 if (code == NE_EXPR)
10839 return omit_two_operands_loc (loc, type, boolean_true_node,
10840 real0, real1);
10841 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10842 }
10843 }
10844 }
10845
10846 return NULL_TREE;
10847
10848 case LT_EXPR:
10849 case GT_EXPR:
10850 case LE_EXPR:
10851 case GE_EXPR:
10852 tem = fold_comparison (loc, code, type, op0, op1);
10853 if (tem != NULL_TREE)
10854 return tem;
10855
10856 /* Transform comparisons of the form X +- C CMP X. */
10857 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10858 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10859 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10860 && !HONOR_SNANS (arg0))
10861 {
10862 tree arg01 = TREE_OPERAND (arg0, 1);
10863 enum tree_code code0 = TREE_CODE (arg0);
10864 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10865
10866 /* (X - c) > X becomes false. */
10867 if (code == GT_EXPR
10868 && ((code0 == MINUS_EXPR && is_positive >= 0)
10869 || (code0 == PLUS_EXPR && is_positive <= 0)))
10870 return constant_boolean_node (0, type);
10871
10872 /* Likewise (X + c) < X becomes false. */
10873 if (code == LT_EXPR
10874 && ((code0 == PLUS_EXPR && is_positive >= 0)
10875 || (code0 == MINUS_EXPR && is_positive <= 0)))
10876 return constant_boolean_node (0, type);
10877
10878 /* Convert (X - c) <= X to true. */
10879 if (!HONOR_NANS (arg1)
10880 && code == LE_EXPR
10881 && ((code0 == MINUS_EXPR && is_positive >= 0)
10882 || (code0 == PLUS_EXPR && is_positive <= 0)))
10883 return constant_boolean_node (1, type);
10884
10885 /* Convert (X + c) >= X to true. */
10886 if (!HONOR_NANS (arg1)
10887 && code == GE_EXPR
10888 && ((code0 == PLUS_EXPR && is_positive >= 0)
10889 || (code0 == MINUS_EXPR && is_positive <= 0)))
10890 return constant_boolean_node (1, type);
10891 }
10892
10893 /* If we are comparing an ABS_EXPR with a constant, we can
10894 convert all the cases into explicit comparisons, but they may
10895 well not be faster than doing the ABS and one comparison.
10896 But ABS (X) <= C is a range comparison, which becomes a subtraction
10897 and a comparison, and is probably faster. */
10898 if (code == LE_EXPR
10899 && TREE_CODE (arg1) == INTEGER_CST
10900 && TREE_CODE (arg0) == ABS_EXPR
10901 && ! TREE_SIDE_EFFECTS (arg0)
10902 && (0 != (tem = negate_expr (arg1)))
10903 && TREE_CODE (tem) == INTEGER_CST
10904 && !TREE_OVERFLOW (tem))
10905 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10906 build2 (GE_EXPR, type,
10907 TREE_OPERAND (arg0, 0), tem),
10908 build2 (LE_EXPR, type,
10909 TREE_OPERAND (arg0, 0), arg1));
10910
10911 /* Convert ABS_EXPR<x> >= 0 to true. */
10912 strict_overflow_p = false;
10913 if (code == GE_EXPR
10914 && (integer_zerop (arg1)
10915 || (! HONOR_NANS (arg0)
10916 && real_zerop (arg1)))
10917 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10918 {
10919 if (strict_overflow_p)
10920 fold_overflow_warning (("assuming signed overflow does not occur "
10921 "when simplifying comparison of "
10922 "absolute value and zero"),
10923 WARN_STRICT_OVERFLOW_CONDITIONAL);
10924 return omit_one_operand_loc (loc, type,
10925 constant_boolean_node (true, type),
10926 arg0);
10927 }
10928
10929 /* Convert ABS_EXPR<x> < 0 to false. */
10930 strict_overflow_p = false;
10931 if (code == LT_EXPR
10932 && (integer_zerop (arg1) || real_zerop (arg1))
10933 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10934 {
10935 if (strict_overflow_p)
10936 fold_overflow_warning (("assuming signed overflow does not occur "
10937 "when simplifying comparison of "
10938 "absolute value and zero"),
10939 WARN_STRICT_OVERFLOW_CONDITIONAL);
10940 return omit_one_operand_loc (loc, type,
10941 constant_boolean_node (false, type),
10942 arg0);
10943 }
10944
10945 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10946 and similarly for >= into !=. */
10947 if ((code == LT_EXPR || code == GE_EXPR)
10948 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10949 && TREE_CODE (arg1) == LSHIFT_EXPR
10950 && integer_onep (TREE_OPERAND (arg1, 0)))
10951 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10952 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10953 TREE_OPERAND (arg1, 1)),
10954 build_zero_cst (TREE_TYPE (arg0)));
10955
10956 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
10957 otherwise Y might be >= # of bits in X's type and thus e.g.
10958 (unsigned char) (1 << Y) for Y 15 might be 0.
10959 If the cast is widening, then 1 << Y should have unsigned type,
10960 otherwise if Y is number of bits in the signed shift type minus 1,
10961 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
10962 31 might be 0xffffffff80000000. */
10963 if ((code == LT_EXPR || code == GE_EXPR)
10964 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10965 && CONVERT_EXPR_P (arg1)
10966 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10967 && (element_precision (TREE_TYPE (arg1))
10968 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
10969 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
10970 || (element_precision (TREE_TYPE (arg1))
10971 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
10972 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10973 {
10974 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10975 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
10976 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10977 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
10978 build_zero_cst (TREE_TYPE (arg0)));
10979 }
10980
10981 return NULL_TREE;
10982
10983 case UNORDERED_EXPR:
10984 case ORDERED_EXPR:
10985 case UNLT_EXPR:
10986 case UNLE_EXPR:
10987 case UNGT_EXPR:
10988 case UNGE_EXPR:
10989 case UNEQ_EXPR:
10990 case LTGT_EXPR:
10991 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10992 {
10993 tree targ0 = strip_float_extensions (arg0);
10994 tree targ1 = strip_float_extensions (arg1);
10995 tree newtype = TREE_TYPE (targ0);
10996
10997 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10998 newtype = TREE_TYPE (targ1);
10999
11000 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11001 return fold_build2_loc (loc, code, type,
11002 fold_convert_loc (loc, newtype, targ0),
11003 fold_convert_loc (loc, newtype, targ1));
11004 }
11005
11006 return NULL_TREE;
11007
11008 case COMPOUND_EXPR:
11009 /* When pedantic, a compound expression can be neither an lvalue
11010 nor an integer constant expression. */
11011 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11012 return NULL_TREE;
11013 /* Don't let (0, 0) be null pointer constant. */
11014 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11015 : fold_convert_loc (loc, type, arg1);
11016 return pedantic_non_lvalue_loc (loc, tem);
11017
11018 case ASSERT_EXPR:
11019 /* An ASSERT_EXPR should never be passed to fold_binary. */
11020 gcc_unreachable ();
11021
11022 default:
11023 return NULL_TREE;
11024 } /* switch (code) */
11025 }
11026
11027 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11028 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11029 of GOTO_EXPR. */
11030
11031 static tree
11032 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11033 {
11034 switch (TREE_CODE (*tp))
11035 {
11036 case LABEL_EXPR:
11037 return *tp;
11038
11039 case GOTO_EXPR:
11040 *walk_subtrees = 0;
11041
11042 /* fall through */
11043
11044 default:
11045 return NULL_TREE;
11046 }
11047 }
11048
11049 /* Return whether the sub-tree ST contains a label which is accessible from
11050 outside the sub-tree. */
11051
11052 static bool
11053 contains_label_p (tree st)
11054 {
11055 return
11056 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11057 }
11058
11059 /* Fold a ternary expression of code CODE and type TYPE with operands
11060 OP0, OP1, and OP2. Return the folded expression if folding is
11061 successful. Otherwise, return NULL_TREE. */
11062
11063 tree
11064 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11065 tree op0, tree op1, tree op2)
11066 {
11067 tree tem;
11068 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11069 enum tree_code_class kind = TREE_CODE_CLASS (code);
11070
11071 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11072 && TREE_CODE_LENGTH (code) == 3);
11073
11074 /* If this is a commutative operation, and OP0 is a constant, move it
11075 to OP1 to reduce the number of tests below. */
11076 if (commutative_ternary_tree_code (code)
11077 && tree_swap_operands_p (op0, op1))
11078 return fold_build3_loc (loc, code, type, op1, op0, op2);
11079
11080 tem = generic_simplify (loc, code, type, op0, op1, op2);
11081 if (tem)
11082 return tem;
11083
11084 /* Strip any conversions that don't change the mode. This is safe
11085 for every expression, except for a comparison expression because
11086 its signedness is derived from its operands. So, in the latter
11087 case, only strip conversions that don't change the signedness.
11088
11089 Note that this is done as an internal manipulation within the
11090 constant folder, in order to find the simplest representation of
11091 the arguments so that their form can be studied. In any cases,
11092 the appropriate type conversions should be put back in the tree
11093 that will get out of the constant folder. */
11094 if (op0)
11095 {
11096 arg0 = op0;
11097 STRIP_NOPS (arg0);
11098 }
11099
11100 if (op1)
11101 {
11102 arg1 = op1;
11103 STRIP_NOPS (arg1);
11104 }
11105
11106 if (op2)
11107 {
11108 arg2 = op2;
11109 STRIP_NOPS (arg2);
11110 }
11111
11112 switch (code)
11113 {
11114 case COMPONENT_REF:
11115 if (TREE_CODE (arg0) == CONSTRUCTOR
11116 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11117 {
11118 unsigned HOST_WIDE_INT idx;
11119 tree field, value;
11120 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11121 if (field == arg1)
11122 return value;
11123 }
11124 return NULL_TREE;
11125
11126 case COND_EXPR:
11127 case VEC_COND_EXPR:
11128 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11129 so all simple results must be passed through pedantic_non_lvalue. */
11130 if (TREE_CODE (arg0) == INTEGER_CST)
11131 {
11132 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11133 tem = integer_zerop (arg0) ? op2 : op1;
11134 /* Only optimize constant conditions when the selected branch
11135 has the same type as the COND_EXPR. This avoids optimizing
11136 away "c ? x : throw", where the throw has a void type.
11137 Avoid throwing away that operand which contains label. */
11138 if ((!TREE_SIDE_EFFECTS (unused_op)
11139 || !contains_label_p (unused_op))
11140 && (! VOID_TYPE_P (TREE_TYPE (tem))
11141 || VOID_TYPE_P (type)))
11142 return pedantic_non_lvalue_loc (loc, tem);
11143 return NULL_TREE;
11144 }
11145 else if (TREE_CODE (arg0) == VECTOR_CST)
11146 {
11147 if ((TREE_CODE (arg1) == VECTOR_CST
11148 || TREE_CODE (arg1) == CONSTRUCTOR)
11149 && (TREE_CODE (arg2) == VECTOR_CST
11150 || TREE_CODE (arg2) == CONSTRUCTOR))
11151 {
11152 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11153 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11154 auto_vec_perm_indices sel (nelts);
11155 for (i = 0; i < nelts; i++)
11156 {
11157 tree val = VECTOR_CST_ELT (arg0, i);
11158 if (integer_all_onesp (val))
11159 sel.quick_push (i);
11160 else if (integer_zerop (val))
11161 sel.quick_push (nelts + i);
11162 else /* Currently unreachable. */
11163 return NULL_TREE;
11164 }
11165 tree t = fold_vec_perm (type, arg1, arg2, sel);
11166 if (t != NULL_TREE)
11167 return t;
11168 }
11169 }
11170
11171 /* If we have A op B ? A : C, we may be able to convert this to a
11172 simpler expression, depending on the operation and the values
11173 of B and C. Signed zeros prevent all of these transformations,
11174 for reasons given above each one.
11175
11176 Also try swapping the arguments and inverting the conditional. */
11177 if (COMPARISON_CLASS_P (arg0)
11178 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11179 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11180 {
11181 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11182 if (tem)
11183 return tem;
11184 }
11185
11186 if (COMPARISON_CLASS_P (arg0)
11187 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11188 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11189 {
11190 location_t loc0 = expr_location_or (arg0, loc);
11191 tem = fold_invert_truthvalue (loc0, arg0);
11192 if (tem && COMPARISON_CLASS_P (tem))
11193 {
11194 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11195 if (tem)
11196 return tem;
11197 }
11198 }
11199
11200 /* If the second operand is simpler than the third, swap them
11201 since that produces better jump optimization results. */
11202 if (truth_value_p (TREE_CODE (arg0))
11203 && tree_swap_operands_p (op1, op2))
11204 {
11205 location_t loc0 = expr_location_or (arg0, loc);
11206 /* See if this can be inverted. If it can't, possibly because
11207 it was a floating-point inequality comparison, don't do
11208 anything. */
11209 tem = fold_invert_truthvalue (loc0, arg0);
11210 if (tem)
11211 return fold_build3_loc (loc, code, type, tem, op2, op1);
11212 }
11213
11214 /* Convert A ? 1 : 0 to simply A. */
11215 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11216 : (integer_onep (op1)
11217 && !VECTOR_TYPE_P (type)))
11218 && integer_zerop (op2)
11219 /* If we try to convert OP0 to our type, the
11220 call to fold will try to move the conversion inside
11221 a COND, which will recurse. In that case, the COND_EXPR
11222 is probably the best choice, so leave it alone. */
11223 && type == TREE_TYPE (arg0))
11224 return pedantic_non_lvalue_loc (loc, arg0);
11225
11226 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11227 over COND_EXPR in cases such as floating point comparisons. */
11228 if (integer_zerop (op1)
11229 && code == COND_EXPR
11230 && integer_onep (op2)
11231 && !VECTOR_TYPE_P (type)
11232 && truth_value_p (TREE_CODE (arg0)))
11233 return pedantic_non_lvalue_loc (loc,
11234 fold_convert_loc (loc, type,
11235 invert_truthvalue_loc (loc,
11236 arg0)));
11237
11238 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11239 if (TREE_CODE (arg0) == LT_EXPR
11240 && integer_zerop (TREE_OPERAND (arg0, 1))
11241 && integer_zerop (op2)
11242 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11243 {
11244 /* sign_bit_p looks through both zero and sign extensions,
11245 but for this optimization only sign extensions are
11246 usable. */
11247 tree tem2 = TREE_OPERAND (arg0, 0);
11248 while (tem != tem2)
11249 {
11250 if (TREE_CODE (tem2) != NOP_EXPR
11251 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11252 {
11253 tem = NULL_TREE;
11254 break;
11255 }
11256 tem2 = TREE_OPERAND (tem2, 0);
11257 }
11258 /* sign_bit_p only checks ARG1 bits within A's precision.
11259 If <sign bit of A> has wider type than A, bits outside
11260 of A's precision in <sign bit of A> need to be checked.
11261 If they are all 0, this optimization needs to be done
11262 in unsigned A's type, if they are all 1 in signed A's type,
11263 otherwise this can't be done. */
11264 if (tem
11265 && TYPE_PRECISION (TREE_TYPE (tem))
11266 < TYPE_PRECISION (TREE_TYPE (arg1))
11267 && TYPE_PRECISION (TREE_TYPE (tem))
11268 < TYPE_PRECISION (type))
11269 {
11270 int inner_width, outer_width;
11271 tree tem_type;
11272
11273 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11274 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11275 if (outer_width > TYPE_PRECISION (type))
11276 outer_width = TYPE_PRECISION (type);
11277
11278 wide_int mask = wi::shifted_mask
11279 (inner_width, outer_width - inner_width, false,
11280 TYPE_PRECISION (TREE_TYPE (arg1)));
11281
11282 wide_int common = mask & wi::to_wide (arg1);
11283 if (common == mask)
11284 {
11285 tem_type = signed_type_for (TREE_TYPE (tem));
11286 tem = fold_convert_loc (loc, tem_type, tem);
11287 }
11288 else if (common == 0)
11289 {
11290 tem_type = unsigned_type_for (TREE_TYPE (tem));
11291 tem = fold_convert_loc (loc, tem_type, tem);
11292 }
11293 else
11294 tem = NULL;
11295 }
11296
11297 if (tem)
11298 return
11299 fold_convert_loc (loc, type,
11300 fold_build2_loc (loc, BIT_AND_EXPR,
11301 TREE_TYPE (tem), tem,
11302 fold_convert_loc (loc,
11303 TREE_TYPE (tem),
11304 arg1)));
11305 }
11306
11307 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11308 already handled above. */
11309 if (TREE_CODE (arg0) == BIT_AND_EXPR
11310 && integer_onep (TREE_OPERAND (arg0, 1))
11311 && integer_zerop (op2)
11312 && integer_pow2p (arg1))
11313 {
11314 tree tem = TREE_OPERAND (arg0, 0);
11315 STRIP_NOPS (tem);
11316 if (TREE_CODE (tem) == RSHIFT_EXPR
11317 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11318 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11319 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11320 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11321 fold_convert_loc (loc, type,
11322 TREE_OPERAND (tem, 0)),
11323 op1);
11324 }
11325
11326 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11327 is probably obsolete because the first operand should be a
11328 truth value (that's why we have the two cases above), but let's
11329 leave it in until we can confirm this for all front-ends. */
11330 if (integer_zerop (op2)
11331 && TREE_CODE (arg0) == NE_EXPR
11332 && integer_zerop (TREE_OPERAND (arg0, 1))
11333 && integer_pow2p (arg1)
11334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11335 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11336 arg1, OEP_ONLY_CONST))
11337 return pedantic_non_lvalue_loc (loc,
11338 fold_convert_loc (loc, type,
11339 TREE_OPERAND (arg0, 0)));
11340
11341 /* Disable the transformations below for vectors, since
11342 fold_binary_op_with_conditional_arg may undo them immediately,
11343 yielding an infinite loop. */
11344 if (code == VEC_COND_EXPR)
11345 return NULL_TREE;
11346
11347 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11348 if (integer_zerop (op2)
11349 && truth_value_p (TREE_CODE (arg0))
11350 && truth_value_p (TREE_CODE (arg1))
11351 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11352 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11353 : TRUTH_ANDIF_EXPR,
11354 type, fold_convert_loc (loc, type, arg0), op1);
11355
11356 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11357 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11358 && truth_value_p (TREE_CODE (arg0))
11359 && truth_value_p (TREE_CODE (arg1))
11360 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11361 {
11362 location_t loc0 = expr_location_or (arg0, loc);
11363 /* Only perform transformation if ARG0 is easily inverted. */
11364 tem = fold_invert_truthvalue (loc0, arg0);
11365 if (tem)
11366 return fold_build2_loc (loc, code == VEC_COND_EXPR
11367 ? BIT_IOR_EXPR
11368 : TRUTH_ORIF_EXPR,
11369 type, fold_convert_loc (loc, type, tem),
11370 op1);
11371 }
11372
11373 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11374 if (integer_zerop (arg1)
11375 && truth_value_p (TREE_CODE (arg0))
11376 && truth_value_p (TREE_CODE (op2))
11377 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11378 {
11379 location_t loc0 = expr_location_or (arg0, loc);
11380 /* Only perform transformation if ARG0 is easily inverted. */
11381 tem = fold_invert_truthvalue (loc0, arg0);
11382 if (tem)
11383 return fold_build2_loc (loc, code == VEC_COND_EXPR
11384 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11385 type, fold_convert_loc (loc, type, tem),
11386 op2);
11387 }
11388
11389 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11390 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11391 && truth_value_p (TREE_CODE (arg0))
11392 && truth_value_p (TREE_CODE (op2))
11393 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11394 return fold_build2_loc (loc, code == VEC_COND_EXPR
11395 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11396 type, fold_convert_loc (loc, type, arg0), op2);
11397
11398 return NULL_TREE;
11399
11400 case CALL_EXPR:
11401 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11402 of fold_ternary on them. */
11403 gcc_unreachable ();
11404
11405 case BIT_FIELD_REF:
11406 if (TREE_CODE (arg0) == VECTOR_CST
11407 && (type == TREE_TYPE (TREE_TYPE (arg0))
11408 || (TREE_CODE (type) == VECTOR_TYPE
11409 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11410 {
11411 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11412 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11413 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11414 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11415
11416 if (n != 0
11417 && (idx % width) == 0
11418 && (n % width) == 0
11419 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11420 {
11421 idx = idx / width;
11422 n = n / width;
11423
11424 if (TREE_CODE (arg0) == VECTOR_CST)
11425 {
11426 if (n == 1)
11427 return VECTOR_CST_ELT (arg0, idx);
11428
11429 auto_vec<tree, 32> vals (n);
11430 for (unsigned i = 0; i < n; ++i)
11431 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11432 return build_vector (type, vals);
11433 }
11434 }
11435 }
11436
11437 /* On constants we can use native encode/interpret to constant
11438 fold (nearly) all BIT_FIELD_REFs. */
11439 if (CONSTANT_CLASS_P (arg0)
11440 && can_native_interpret_type_p (type)
11441 && BITS_PER_UNIT == 8)
11442 {
11443 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11444 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11445 /* Limit us to a reasonable amount of work. To relax the
11446 other limitations we need bit-shifting of the buffer
11447 and rounding up the size. */
11448 if (bitpos % BITS_PER_UNIT == 0
11449 && bitsize % BITS_PER_UNIT == 0
11450 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11451 {
11452 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11453 unsigned HOST_WIDE_INT len
11454 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11455 bitpos / BITS_PER_UNIT);
11456 if (len > 0
11457 && len * BITS_PER_UNIT >= bitsize)
11458 {
11459 tree v = native_interpret_expr (type, b,
11460 bitsize / BITS_PER_UNIT);
11461 if (v)
11462 return v;
11463 }
11464 }
11465 }
11466
11467 return NULL_TREE;
11468
11469 case FMA_EXPR:
11470 /* For integers we can decompose the FMA if possible. */
11471 if (TREE_CODE (arg0) == INTEGER_CST
11472 && TREE_CODE (arg1) == INTEGER_CST)
11473 return fold_build2_loc (loc, PLUS_EXPR, type,
11474 const_binop (MULT_EXPR, arg0, arg1), arg2);
11475 if (integer_zerop (arg2))
11476 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11477
11478 return fold_fma (loc, type, arg0, arg1, arg2);
11479
11480 case VEC_PERM_EXPR:
11481 if (TREE_CODE (arg2) == VECTOR_CST)
11482 {
11483 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11484 bool need_mask_canon = false;
11485 bool need_mask_canon2 = false;
11486 bool all_in_vec0 = true;
11487 bool all_in_vec1 = true;
11488 bool maybe_identity = true;
11489 bool single_arg = (op0 == op1);
11490 bool changed = false;
11491
11492 mask2 = 2 * nelts - 1;
11493 mask = single_arg ? (nelts - 1) : mask2;
11494 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11495 auto_vec_perm_indices sel (nelts);
11496 auto_vec_perm_indices sel2 (nelts);
11497 for (i = 0; i < nelts; i++)
11498 {
11499 tree val = VECTOR_CST_ELT (arg2, i);
11500 if (TREE_CODE (val) != INTEGER_CST)
11501 return NULL_TREE;
11502
11503 /* Make sure that the perm value is in an acceptable
11504 range. */
11505 wi::tree_to_wide_ref t = wi::to_wide (val);
11506 need_mask_canon |= wi::gtu_p (t, mask);
11507 need_mask_canon2 |= wi::gtu_p (t, mask2);
11508 unsigned int elt = t.to_uhwi () & mask;
11509 unsigned int elt2 = t.to_uhwi () & mask2;
11510
11511 if (elt < nelts)
11512 all_in_vec1 = false;
11513 else
11514 all_in_vec0 = false;
11515
11516 if ((elt & (nelts - 1)) != i)
11517 maybe_identity = false;
11518
11519 sel.quick_push (elt);
11520 sel2.quick_push (elt2);
11521 }
11522
11523 if (maybe_identity)
11524 {
11525 if (all_in_vec0)
11526 return op0;
11527 if (all_in_vec1)
11528 return op1;
11529 }
11530
11531 if (all_in_vec0)
11532 op1 = op0;
11533 else if (all_in_vec1)
11534 {
11535 op0 = op1;
11536 for (i = 0; i < nelts; i++)
11537 sel[i] -= nelts;
11538 need_mask_canon = true;
11539 }
11540
11541 if ((TREE_CODE (op0) == VECTOR_CST
11542 || TREE_CODE (op0) == CONSTRUCTOR)
11543 && (TREE_CODE (op1) == VECTOR_CST
11544 || TREE_CODE (op1) == CONSTRUCTOR))
11545 {
11546 tree t = fold_vec_perm (type, op0, op1, sel);
11547 if (t != NULL_TREE)
11548 return t;
11549 }
11550
11551 if (op0 == op1 && !single_arg)
11552 changed = true;
11553
11554 /* Some targets are deficient and fail to expand a single
11555 argument permutation while still allowing an equivalent
11556 2-argument version. */
11557 if (need_mask_canon && arg2 == op2
11558 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11559 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11560 {
11561 need_mask_canon = need_mask_canon2;
11562 sel = sel2;
11563 }
11564
11565 if (need_mask_canon && arg2 == op2)
11566 {
11567 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11568 auto_vec<tree, 32> tsel (nelts);
11569 for (i = 0; i < nelts; i++)
11570 tsel.quick_push (build_int_cst (eltype, sel[i]));
11571 op2 = build_vector (TREE_TYPE (arg2), tsel);
11572 changed = true;
11573 }
11574
11575 if (changed)
11576 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11577 }
11578 return NULL_TREE;
11579
11580 case BIT_INSERT_EXPR:
11581 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11582 if (TREE_CODE (arg0) == INTEGER_CST
11583 && TREE_CODE (arg1) == INTEGER_CST)
11584 {
11585 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11586 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11587 wide_int tem = (wi::to_wide (arg0)
11588 & wi::shifted_mask (bitpos, bitsize, true,
11589 TYPE_PRECISION (type)));
11590 wide_int tem2
11591 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11592 bitsize), bitpos);
11593 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11594 }
11595 else if (TREE_CODE (arg0) == VECTOR_CST
11596 && CONSTANT_CLASS_P (arg1)
11597 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11598 TREE_TYPE (arg1)))
11599 {
11600 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11601 unsigned HOST_WIDE_INT elsize
11602 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11603 if (bitpos % elsize == 0)
11604 {
11605 unsigned k = bitpos / elsize;
11606 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11607 return arg0;
11608 else
11609 {
11610 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11611 auto_vec<tree, 32> elts (nelts);
11612 elts.quick_grow (nelts);
11613 memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
11614 sizeof (tree) * nelts);
11615 elts[k] = arg1;
11616 return build_vector (type, elts);
11617 }
11618 }
11619 }
11620 return NULL_TREE;
11621
11622 default:
11623 return NULL_TREE;
11624 } /* switch (code) */
11625 }
11626
11627 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11628 of an array (or vector). */
11629
11630 tree
11631 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11632 {
11633 tree index_type = NULL_TREE;
11634 offset_int low_bound = 0;
11635
11636 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11637 {
11638 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11639 if (domain_type && TYPE_MIN_VALUE (domain_type))
11640 {
11641 /* Static constructors for variably sized objects makes no sense. */
11642 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11643 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11644 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11645 }
11646 }
11647
11648 if (index_type)
11649 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11650 TYPE_SIGN (index_type));
11651
11652 offset_int index = low_bound - 1;
11653 if (index_type)
11654 index = wi::ext (index, TYPE_PRECISION (index_type),
11655 TYPE_SIGN (index_type));
11656
11657 offset_int max_index;
11658 unsigned HOST_WIDE_INT cnt;
11659 tree cfield, cval;
11660
11661 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11662 {
11663 /* Array constructor might explicitly set index, or specify a range,
11664 or leave index NULL meaning that it is next index after previous
11665 one. */
11666 if (cfield)
11667 {
11668 if (TREE_CODE (cfield) == INTEGER_CST)
11669 max_index = index = wi::to_offset (cfield);
11670 else
11671 {
11672 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11673 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11674 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11675 }
11676 }
11677 else
11678 {
11679 index += 1;
11680 if (index_type)
11681 index = wi::ext (index, TYPE_PRECISION (index_type),
11682 TYPE_SIGN (index_type));
11683 max_index = index;
11684 }
11685
11686 /* Do we have match? */
11687 if (wi::cmpu (access_index, index) >= 0
11688 && wi::cmpu (access_index, max_index) <= 0)
11689 return cval;
11690 }
11691 return NULL_TREE;
11692 }
11693
11694 /* Perform constant folding and related simplification of EXPR.
11695 The related simplifications include x*1 => x, x*0 => 0, etc.,
11696 and application of the associative law.
11697 NOP_EXPR conversions may be removed freely (as long as we
11698 are careful not to change the type of the overall expression).
11699 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11700 but we can constant-fold them if they have constant operands. */
11701
11702 #ifdef ENABLE_FOLD_CHECKING
11703 # define fold(x) fold_1 (x)
11704 static tree fold_1 (tree);
11705 static
11706 #endif
11707 tree
11708 fold (tree expr)
11709 {
11710 const tree t = expr;
11711 enum tree_code code = TREE_CODE (t);
11712 enum tree_code_class kind = TREE_CODE_CLASS (code);
11713 tree tem;
11714 location_t loc = EXPR_LOCATION (expr);
11715
11716 /* Return right away if a constant. */
11717 if (kind == tcc_constant)
11718 return t;
11719
11720 /* CALL_EXPR-like objects with variable numbers of operands are
11721 treated specially. */
11722 if (kind == tcc_vl_exp)
11723 {
11724 if (code == CALL_EXPR)
11725 {
11726 tem = fold_call_expr (loc, expr, false);
11727 return tem ? tem : expr;
11728 }
11729 return expr;
11730 }
11731
11732 if (IS_EXPR_CODE_CLASS (kind))
11733 {
11734 tree type = TREE_TYPE (t);
11735 tree op0, op1, op2;
11736
11737 switch (TREE_CODE_LENGTH (code))
11738 {
11739 case 1:
11740 op0 = TREE_OPERAND (t, 0);
11741 tem = fold_unary_loc (loc, code, type, op0);
11742 return tem ? tem : expr;
11743 case 2:
11744 op0 = TREE_OPERAND (t, 0);
11745 op1 = TREE_OPERAND (t, 1);
11746 tem = fold_binary_loc (loc, code, type, op0, op1);
11747 return tem ? tem : expr;
11748 case 3:
11749 op0 = TREE_OPERAND (t, 0);
11750 op1 = TREE_OPERAND (t, 1);
11751 op2 = TREE_OPERAND (t, 2);
11752 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11753 return tem ? tem : expr;
11754 default:
11755 break;
11756 }
11757 }
11758
11759 switch (code)
11760 {
11761 case ARRAY_REF:
11762 {
11763 tree op0 = TREE_OPERAND (t, 0);
11764 tree op1 = TREE_OPERAND (t, 1);
11765
11766 if (TREE_CODE (op1) == INTEGER_CST
11767 && TREE_CODE (op0) == CONSTRUCTOR
11768 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11769 {
11770 tree val = get_array_ctor_element_at_index (op0,
11771 wi::to_offset (op1));
11772 if (val)
11773 return val;
11774 }
11775
11776 return t;
11777 }
11778
11779 /* Return a VECTOR_CST if possible. */
11780 case CONSTRUCTOR:
11781 {
11782 tree type = TREE_TYPE (t);
11783 if (TREE_CODE (type) != VECTOR_TYPE)
11784 return t;
11785
11786 unsigned i;
11787 tree val;
11788 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11789 if (! CONSTANT_CLASS_P (val))
11790 return t;
11791
11792 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11793 }
11794
11795 case CONST_DECL:
11796 return fold (DECL_INITIAL (t));
11797
11798 default:
11799 return t;
11800 } /* switch (code) */
11801 }
11802
11803 #ifdef ENABLE_FOLD_CHECKING
11804 #undef fold
11805
11806 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11807 hash_table<nofree_ptr_hash<const tree_node> > *);
11808 static void fold_check_failed (const_tree, const_tree);
11809 void print_fold_checksum (const_tree);
11810
11811 /* When --enable-checking=fold, compute a digest of expr before
11812 and after actual fold call to see if fold did not accidentally
11813 change original expr. */
11814
11815 tree
11816 fold (tree expr)
11817 {
11818 tree ret;
11819 struct md5_ctx ctx;
11820 unsigned char checksum_before[16], checksum_after[16];
11821 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11822
11823 md5_init_ctx (&ctx);
11824 fold_checksum_tree (expr, &ctx, &ht);
11825 md5_finish_ctx (&ctx, checksum_before);
11826 ht.empty ();
11827
11828 ret = fold_1 (expr);
11829
11830 md5_init_ctx (&ctx);
11831 fold_checksum_tree (expr, &ctx, &ht);
11832 md5_finish_ctx (&ctx, checksum_after);
11833
11834 if (memcmp (checksum_before, checksum_after, 16))
11835 fold_check_failed (expr, ret);
11836
11837 return ret;
11838 }
11839
11840 void
11841 print_fold_checksum (const_tree expr)
11842 {
11843 struct md5_ctx ctx;
11844 unsigned char checksum[16], cnt;
11845 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11846
11847 md5_init_ctx (&ctx);
11848 fold_checksum_tree (expr, &ctx, &ht);
11849 md5_finish_ctx (&ctx, checksum);
11850 for (cnt = 0; cnt < 16; ++cnt)
11851 fprintf (stderr, "%02x", checksum[cnt]);
11852 putc ('\n', stderr);
11853 }
11854
11855 static void
11856 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
11857 {
11858 internal_error ("fold check: original tree changed by fold");
11859 }
11860
11861 static void
11862 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
11863 hash_table<nofree_ptr_hash <const tree_node> > *ht)
11864 {
11865 const tree_node **slot;
11866 enum tree_code code;
11867 union tree_node buf;
11868 int i, len;
11869
11870 recursive_label:
11871 if (expr == NULL)
11872 return;
11873 slot = ht->find_slot (expr, INSERT);
11874 if (*slot != NULL)
11875 return;
11876 *slot = expr;
11877 code = TREE_CODE (expr);
11878 if (TREE_CODE_CLASS (code) == tcc_declaration
11879 && HAS_DECL_ASSEMBLER_NAME_P (expr))
11880 {
11881 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11882 memcpy ((char *) &buf, expr, tree_size (expr));
11883 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
11884 buf.decl_with_vis.symtab_node = NULL;
11885 expr = (tree) &buf;
11886 }
11887 else if (TREE_CODE_CLASS (code) == tcc_type
11888 && (TYPE_POINTER_TO (expr)
11889 || TYPE_REFERENCE_TO (expr)
11890 || TYPE_CACHED_VALUES_P (expr)
11891 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
11892 || TYPE_NEXT_VARIANT (expr)
11893 || TYPE_ALIAS_SET_KNOWN_P (expr)))
11894 {
11895 /* Allow these fields to be modified. */
11896 tree tmp;
11897 memcpy ((char *) &buf, expr, tree_size (expr));
11898 expr = tmp = (tree) &buf;
11899 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
11900 TYPE_POINTER_TO (tmp) = NULL;
11901 TYPE_REFERENCE_TO (tmp) = NULL;
11902 TYPE_NEXT_VARIANT (tmp) = NULL;
11903 TYPE_ALIAS_SET (tmp) = -1;
11904 if (TYPE_CACHED_VALUES_P (tmp))
11905 {
11906 TYPE_CACHED_VALUES_P (tmp) = 0;
11907 TYPE_CACHED_VALUES (tmp) = NULL;
11908 }
11909 }
11910 md5_process_bytes (expr, tree_size (expr), ctx);
11911 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
11912 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11913 if (TREE_CODE_CLASS (code) != tcc_type
11914 && TREE_CODE_CLASS (code) != tcc_declaration
11915 && code != TREE_LIST
11916 && code != SSA_NAME
11917 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
11918 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11919 switch (TREE_CODE_CLASS (code))
11920 {
11921 case tcc_constant:
11922 switch (code)
11923 {
11924 case STRING_CST:
11925 md5_process_bytes (TREE_STRING_POINTER (expr),
11926 TREE_STRING_LENGTH (expr), ctx);
11927 break;
11928 case COMPLEX_CST:
11929 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11930 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11931 break;
11932 case VECTOR_CST:
11933 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
11934 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
11935 break;
11936 default:
11937 break;
11938 }
11939 break;
11940 case tcc_exceptional:
11941 switch (code)
11942 {
11943 case TREE_LIST:
11944 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11945 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11946 expr = TREE_CHAIN (expr);
11947 goto recursive_label;
11948 break;
11949 case TREE_VEC:
11950 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11951 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11952 break;
11953 default:
11954 break;
11955 }
11956 break;
11957 case tcc_expression:
11958 case tcc_reference:
11959 case tcc_comparison:
11960 case tcc_unary:
11961 case tcc_binary:
11962 case tcc_statement:
11963 case tcc_vl_exp:
11964 len = TREE_OPERAND_LENGTH (expr);
11965 for (i = 0; i < len; ++i)
11966 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11967 break;
11968 case tcc_declaration:
11969 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11970 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11971 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11972 {
11973 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11974 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11975 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11976 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11977 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11978 }
11979
11980 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11981 {
11982 if (TREE_CODE (expr) == FUNCTION_DECL)
11983 {
11984 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11985 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
11986 }
11987 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11988 }
11989 break;
11990 case tcc_type:
11991 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11992 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11993 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11994 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11995 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11996 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11997 if (INTEGRAL_TYPE_P (expr)
11998 || SCALAR_FLOAT_TYPE_P (expr))
11999 {
12000 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12001 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12002 }
12003 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12004 if (TREE_CODE (expr) == RECORD_TYPE
12005 || TREE_CODE (expr) == UNION_TYPE
12006 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12007 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12008 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12009 break;
12010 default:
12011 break;
12012 }
12013 }
12014
12015 /* Helper function for outputting the checksum of a tree T. When
12016 debugging with gdb, you can "define mynext" to be "next" followed
12017 by "call debug_fold_checksum (op0)", then just trace down till the
12018 outputs differ. */
12019
12020 DEBUG_FUNCTION void
12021 debug_fold_checksum (const_tree t)
12022 {
12023 int i;
12024 unsigned char checksum[16];
12025 struct md5_ctx ctx;
12026 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12027
12028 md5_init_ctx (&ctx);
12029 fold_checksum_tree (t, &ctx, &ht);
12030 md5_finish_ctx (&ctx, checksum);
12031 ht.empty ();
12032
12033 for (i = 0; i < 16; i++)
12034 fprintf (stderr, "%d ", checksum[i]);
12035
12036 fprintf (stderr, "\n");
12037 }
12038
12039 #endif
12040
12041 /* Fold a unary tree expression with code CODE of type TYPE with an
12042 operand OP0. LOC is the location of the resulting expression.
12043 Return a folded expression if successful. Otherwise, return a tree
12044 expression with code CODE of type TYPE with an operand OP0. */
12045
12046 tree
12047 fold_build1_loc (location_t loc,
12048 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12049 {
12050 tree tem;
12051 #ifdef ENABLE_FOLD_CHECKING
12052 unsigned char checksum_before[16], checksum_after[16];
12053 struct md5_ctx ctx;
12054 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12055
12056 md5_init_ctx (&ctx);
12057 fold_checksum_tree (op0, &ctx, &ht);
12058 md5_finish_ctx (&ctx, checksum_before);
12059 ht.empty ();
12060 #endif
12061
12062 tem = fold_unary_loc (loc, code, type, op0);
12063 if (!tem)
12064 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12065
12066 #ifdef ENABLE_FOLD_CHECKING
12067 md5_init_ctx (&ctx);
12068 fold_checksum_tree (op0, &ctx, &ht);
12069 md5_finish_ctx (&ctx, checksum_after);
12070
12071 if (memcmp (checksum_before, checksum_after, 16))
12072 fold_check_failed (op0, tem);
12073 #endif
12074 return tem;
12075 }
12076
12077 /* Fold a binary tree expression with code CODE of type TYPE with
12078 operands OP0 and OP1. LOC is the location of the resulting
12079 expression. Return a folded expression if successful. Otherwise,
12080 return a tree expression with code CODE of type TYPE with operands
12081 OP0 and OP1. */
12082
12083 tree
12084 fold_build2_loc (location_t loc,
12085 enum tree_code code, tree type, tree op0, tree op1
12086 MEM_STAT_DECL)
12087 {
12088 tree tem;
12089 #ifdef ENABLE_FOLD_CHECKING
12090 unsigned char checksum_before_op0[16],
12091 checksum_before_op1[16],
12092 checksum_after_op0[16],
12093 checksum_after_op1[16];
12094 struct md5_ctx ctx;
12095 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12096
12097 md5_init_ctx (&ctx);
12098 fold_checksum_tree (op0, &ctx, &ht);
12099 md5_finish_ctx (&ctx, checksum_before_op0);
12100 ht.empty ();
12101
12102 md5_init_ctx (&ctx);
12103 fold_checksum_tree (op1, &ctx, &ht);
12104 md5_finish_ctx (&ctx, checksum_before_op1);
12105 ht.empty ();
12106 #endif
12107
12108 tem = fold_binary_loc (loc, code, type, op0, op1);
12109 if (!tem)
12110 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12111
12112 #ifdef ENABLE_FOLD_CHECKING
12113 md5_init_ctx (&ctx);
12114 fold_checksum_tree (op0, &ctx, &ht);
12115 md5_finish_ctx (&ctx, checksum_after_op0);
12116 ht.empty ();
12117
12118 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12119 fold_check_failed (op0, tem);
12120
12121 md5_init_ctx (&ctx);
12122 fold_checksum_tree (op1, &ctx, &ht);
12123 md5_finish_ctx (&ctx, checksum_after_op1);
12124
12125 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12126 fold_check_failed (op1, tem);
12127 #endif
12128 return tem;
12129 }
12130
12131 /* Fold a ternary tree expression with code CODE of type TYPE with
12132 operands OP0, OP1, and OP2. Return a folded expression if
12133 successful. Otherwise, return a tree expression with code CODE of
12134 type TYPE with operands OP0, OP1, and OP2. */
12135
12136 tree
12137 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12138 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12139 {
12140 tree tem;
12141 #ifdef ENABLE_FOLD_CHECKING
12142 unsigned char checksum_before_op0[16],
12143 checksum_before_op1[16],
12144 checksum_before_op2[16],
12145 checksum_after_op0[16],
12146 checksum_after_op1[16],
12147 checksum_after_op2[16];
12148 struct md5_ctx ctx;
12149 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12150
12151 md5_init_ctx (&ctx);
12152 fold_checksum_tree (op0, &ctx, &ht);
12153 md5_finish_ctx (&ctx, checksum_before_op0);
12154 ht.empty ();
12155
12156 md5_init_ctx (&ctx);
12157 fold_checksum_tree (op1, &ctx, &ht);
12158 md5_finish_ctx (&ctx, checksum_before_op1);
12159 ht.empty ();
12160
12161 md5_init_ctx (&ctx);
12162 fold_checksum_tree (op2, &ctx, &ht);
12163 md5_finish_ctx (&ctx, checksum_before_op2);
12164 ht.empty ();
12165 #endif
12166
12167 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12168 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12169 if (!tem)
12170 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12171
12172 #ifdef ENABLE_FOLD_CHECKING
12173 md5_init_ctx (&ctx);
12174 fold_checksum_tree (op0, &ctx, &ht);
12175 md5_finish_ctx (&ctx, checksum_after_op0);
12176 ht.empty ();
12177
12178 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12179 fold_check_failed (op0, tem);
12180
12181 md5_init_ctx (&ctx);
12182 fold_checksum_tree (op1, &ctx, &ht);
12183 md5_finish_ctx (&ctx, checksum_after_op1);
12184 ht.empty ();
12185
12186 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12187 fold_check_failed (op1, tem);
12188
12189 md5_init_ctx (&ctx);
12190 fold_checksum_tree (op2, &ctx, &ht);
12191 md5_finish_ctx (&ctx, checksum_after_op2);
12192
12193 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12194 fold_check_failed (op2, tem);
12195 #endif
12196 return tem;
12197 }
12198
12199 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12200 arguments in ARGARRAY, and a null static chain.
12201 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12202 of type TYPE from the given operands as constructed by build_call_array. */
12203
12204 tree
12205 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12206 int nargs, tree *argarray)
12207 {
12208 tree tem;
12209 #ifdef ENABLE_FOLD_CHECKING
12210 unsigned char checksum_before_fn[16],
12211 checksum_before_arglist[16],
12212 checksum_after_fn[16],
12213 checksum_after_arglist[16];
12214 struct md5_ctx ctx;
12215 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12216 int i;
12217
12218 md5_init_ctx (&ctx);
12219 fold_checksum_tree (fn, &ctx, &ht);
12220 md5_finish_ctx (&ctx, checksum_before_fn);
12221 ht.empty ();
12222
12223 md5_init_ctx (&ctx);
12224 for (i = 0; i < nargs; i++)
12225 fold_checksum_tree (argarray[i], &ctx, &ht);
12226 md5_finish_ctx (&ctx, checksum_before_arglist);
12227 ht.empty ();
12228 #endif
12229
12230 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12231 if (!tem)
12232 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12233
12234 #ifdef ENABLE_FOLD_CHECKING
12235 md5_init_ctx (&ctx);
12236 fold_checksum_tree (fn, &ctx, &ht);
12237 md5_finish_ctx (&ctx, checksum_after_fn);
12238 ht.empty ();
12239
12240 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12241 fold_check_failed (fn, tem);
12242
12243 md5_init_ctx (&ctx);
12244 for (i = 0; i < nargs; i++)
12245 fold_checksum_tree (argarray[i], &ctx, &ht);
12246 md5_finish_ctx (&ctx, checksum_after_arglist);
12247
12248 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12249 fold_check_failed (NULL_TREE, tem);
12250 #endif
12251 return tem;
12252 }
12253
12254 /* Perform constant folding and related simplification of initializer
12255 expression EXPR. These behave identically to "fold_buildN" but ignore
12256 potential run-time traps and exceptions that fold must preserve. */
12257
12258 #define START_FOLD_INIT \
12259 int saved_signaling_nans = flag_signaling_nans;\
12260 int saved_trapping_math = flag_trapping_math;\
12261 int saved_rounding_math = flag_rounding_math;\
12262 int saved_trapv = flag_trapv;\
12263 int saved_folding_initializer = folding_initializer;\
12264 flag_signaling_nans = 0;\
12265 flag_trapping_math = 0;\
12266 flag_rounding_math = 0;\
12267 flag_trapv = 0;\
12268 folding_initializer = 1;
12269
12270 #define END_FOLD_INIT \
12271 flag_signaling_nans = saved_signaling_nans;\
12272 flag_trapping_math = saved_trapping_math;\
12273 flag_rounding_math = saved_rounding_math;\
12274 flag_trapv = saved_trapv;\
12275 folding_initializer = saved_folding_initializer;
12276
12277 tree
12278 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12279 tree type, tree op)
12280 {
12281 tree result;
12282 START_FOLD_INIT;
12283
12284 result = fold_build1_loc (loc, code, type, op);
12285
12286 END_FOLD_INIT;
12287 return result;
12288 }
12289
12290 tree
12291 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12292 tree type, tree op0, tree op1)
12293 {
12294 tree result;
12295 START_FOLD_INIT;
12296
12297 result = fold_build2_loc (loc, code, type, op0, op1);
12298
12299 END_FOLD_INIT;
12300 return result;
12301 }
12302
12303 tree
12304 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12305 int nargs, tree *argarray)
12306 {
12307 tree result;
12308 START_FOLD_INIT;
12309
12310 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12311
12312 END_FOLD_INIT;
12313 return result;
12314 }
12315
12316 #undef START_FOLD_INIT
12317 #undef END_FOLD_INIT
12318
12319 /* Determine if first argument is a multiple of second argument. Return 0 if
12320 it is not, or we cannot easily determined it to be.
12321
12322 An example of the sort of thing we care about (at this point; this routine
12323 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12324 fold cases do now) is discovering that
12325
12326 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12327
12328 is a multiple of
12329
12330 SAVE_EXPR (J * 8)
12331
12332 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12333
12334 This code also handles discovering that
12335
12336 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12337
12338 is a multiple of 8 so we don't have to worry about dealing with a
12339 possible remainder.
12340
12341 Note that we *look* inside a SAVE_EXPR only to determine how it was
12342 calculated; it is not safe for fold to do much of anything else with the
12343 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12344 at run time. For example, the latter example above *cannot* be implemented
12345 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12346 evaluation time of the original SAVE_EXPR is not necessarily the same at
12347 the time the new expression is evaluated. The only optimization of this
12348 sort that would be valid is changing
12349
12350 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12351
12352 divided by 8 to
12353
12354 SAVE_EXPR (I) * SAVE_EXPR (J)
12355
12356 (where the same SAVE_EXPR (J) is used in the original and the
12357 transformed version). */
12358
12359 int
12360 multiple_of_p (tree type, const_tree top, const_tree bottom)
12361 {
12362 gimple *stmt;
12363 tree t1, op1, op2;
12364
12365 if (operand_equal_p (top, bottom, 0))
12366 return 1;
12367
12368 if (TREE_CODE (type) != INTEGER_TYPE)
12369 return 0;
12370
12371 switch (TREE_CODE (top))
12372 {
12373 case BIT_AND_EXPR:
12374 /* Bitwise and provides a power of two multiple. If the mask is
12375 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12376 if (!integer_pow2p (bottom))
12377 return 0;
12378 /* FALLTHRU */
12379
12380 case MULT_EXPR:
12381 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12382 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12383
12384 case MINUS_EXPR:
12385 /* It is impossible to prove if op0 - op1 is multiple of bottom
12386 precisely, so be conservative here checking if both op0 and op1
12387 are multiple of bottom. Note we check the second operand first
12388 since it's usually simpler. */
12389 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12390 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12391
12392 case PLUS_EXPR:
12393 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12394 as op0 - 3 if the expression has unsigned type. For example,
12395 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12396 op1 = TREE_OPERAND (top, 1);
12397 if (TYPE_UNSIGNED (type)
12398 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12399 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12400 return (multiple_of_p (type, op1, bottom)
12401 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12402
12403 case LSHIFT_EXPR:
12404 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12405 {
12406 op1 = TREE_OPERAND (top, 1);
12407 /* const_binop may not detect overflow correctly,
12408 so check for it explicitly here. */
12409 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12410 wi::to_wide (op1))
12411 && 0 != (t1 = fold_convert (type,
12412 const_binop (LSHIFT_EXPR,
12413 size_one_node,
12414 op1)))
12415 && !TREE_OVERFLOW (t1))
12416 return multiple_of_p (type, t1, bottom);
12417 }
12418 return 0;
12419
12420 case NOP_EXPR:
12421 /* Can't handle conversions from non-integral or wider integral type. */
12422 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12423 || (TYPE_PRECISION (type)
12424 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12425 return 0;
12426
12427 /* fall through */
12428
12429 case SAVE_EXPR:
12430 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12431
12432 case COND_EXPR:
12433 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12434 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12435
12436 case INTEGER_CST:
12437 if (TREE_CODE (bottom) != INTEGER_CST
12438 || integer_zerop (bottom)
12439 || (TYPE_UNSIGNED (type)
12440 && (tree_int_cst_sgn (top) < 0
12441 || tree_int_cst_sgn (bottom) < 0)))
12442 return 0;
12443 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12444 SIGNED);
12445
12446 case SSA_NAME:
12447 if (TREE_CODE (bottom) == INTEGER_CST
12448 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12449 && gimple_code (stmt) == GIMPLE_ASSIGN)
12450 {
12451 enum tree_code code = gimple_assign_rhs_code (stmt);
12452
12453 /* Check for special cases to see if top is defined as multiple
12454 of bottom:
12455
12456 top = (X & ~(bottom - 1) ; bottom is power of 2
12457
12458 or
12459
12460 Y = X % bottom
12461 top = X - Y. */
12462 if (code == BIT_AND_EXPR
12463 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12464 && TREE_CODE (op2) == INTEGER_CST
12465 && integer_pow2p (bottom)
12466 && wi::multiple_of_p (wi::to_widest (op2),
12467 wi::to_widest (bottom), UNSIGNED))
12468 return 1;
12469
12470 op1 = gimple_assign_rhs1 (stmt);
12471 if (code == MINUS_EXPR
12472 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12473 && TREE_CODE (op2) == SSA_NAME
12474 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12475 && gimple_code (stmt) == GIMPLE_ASSIGN
12476 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12477 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12478 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12479 return 1;
12480 }
12481
12482 /* fall through */
12483
12484 default:
12485 return 0;
12486 }
12487 }
12488
12489 #define tree_expr_nonnegative_warnv_p(X, Y) \
12490 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12491
12492 #define RECURSE(X) \
12493 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12494
12495 /* Return true if CODE or TYPE is known to be non-negative. */
12496
12497 static bool
12498 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12499 {
12500 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12501 && truth_value_p (code))
12502 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12503 have a signed:1 type (where the value is -1 and 0). */
12504 return true;
12505 return false;
12506 }
12507
12508 /* Return true if (CODE OP0) is known to be non-negative. If the return
12509 value is based on the assumption that signed overflow is undefined,
12510 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12511 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12512
12513 bool
12514 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12515 bool *strict_overflow_p, int depth)
12516 {
12517 if (TYPE_UNSIGNED (type))
12518 return true;
12519
12520 switch (code)
12521 {
12522 case ABS_EXPR:
12523 /* We can't return 1 if flag_wrapv is set because
12524 ABS_EXPR<INT_MIN> = INT_MIN. */
12525 if (!ANY_INTEGRAL_TYPE_P (type))
12526 return true;
12527 if (TYPE_OVERFLOW_UNDEFINED (type))
12528 {
12529 *strict_overflow_p = true;
12530 return true;
12531 }
12532 break;
12533
12534 case NON_LVALUE_EXPR:
12535 case FLOAT_EXPR:
12536 case FIX_TRUNC_EXPR:
12537 return RECURSE (op0);
12538
12539 CASE_CONVERT:
12540 {
12541 tree inner_type = TREE_TYPE (op0);
12542 tree outer_type = type;
12543
12544 if (TREE_CODE (outer_type) == REAL_TYPE)
12545 {
12546 if (TREE_CODE (inner_type) == REAL_TYPE)
12547 return RECURSE (op0);
12548 if (INTEGRAL_TYPE_P (inner_type))
12549 {
12550 if (TYPE_UNSIGNED (inner_type))
12551 return true;
12552 return RECURSE (op0);
12553 }
12554 }
12555 else if (INTEGRAL_TYPE_P (outer_type))
12556 {
12557 if (TREE_CODE (inner_type) == REAL_TYPE)
12558 return RECURSE (op0);
12559 if (INTEGRAL_TYPE_P (inner_type))
12560 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12561 && TYPE_UNSIGNED (inner_type);
12562 }
12563 }
12564 break;
12565
12566 default:
12567 return tree_simple_nonnegative_warnv_p (code, type);
12568 }
12569
12570 /* We don't know sign of `t', so be conservative and return false. */
12571 return false;
12572 }
12573
12574 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12575 value is based on the assumption that signed overflow is undefined,
12576 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12577 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12578
12579 bool
12580 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12581 tree op1, bool *strict_overflow_p,
12582 int depth)
12583 {
12584 if (TYPE_UNSIGNED (type))
12585 return true;
12586
12587 switch (code)
12588 {
12589 case POINTER_PLUS_EXPR:
12590 case PLUS_EXPR:
12591 if (FLOAT_TYPE_P (type))
12592 return RECURSE (op0) && RECURSE (op1);
12593
12594 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12595 both unsigned and at least 2 bits shorter than the result. */
12596 if (TREE_CODE (type) == INTEGER_TYPE
12597 && TREE_CODE (op0) == NOP_EXPR
12598 && TREE_CODE (op1) == NOP_EXPR)
12599 {
12600 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12601 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12602 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12603 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12604 {
12605 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12606 TYPE_PRECISION (inner2)) + 1;
12607 return prec < TYPE_PRECISION (type);
12608 }
12609 }
12610 break;
12611
12612 case MULT_EXPR:
12613 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12614 {
12615 /* x * x is always non-negative for floating point x
12616 or without overflow. */
12617 if (operand_equal_p (op0, op1, 0)
12618 || (RECURSE (op0) && RECURSE (op1)))
12619 {
12620 if (ANY_INTEGRAL_TYPE_P (type)
12621 && TYPE_OVERFLOW_UNDEFINED (type))
12622 *strict_overflow_p = true;
12623 return true;
12624 }
12625 }
12626
12627 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12628 both unsigned and their total bits is shorter than the result. */
12629 if (TREE_CODE (type) == INTEGER_TYPE
12630 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12631 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12632 {
12633 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12634 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12635 : TREE_TYPE (op0);
12636 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12637 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12638 : TREE_TYPE (op1);
12639
12640 bool unsigned0 = TYPE_UNSIGNED (inner0);
12641 bool unsigned1 = TYPE_UNSIGNED (inner1);
12642
12643 if (TREE_CODE (op0) == INTEGER_CST)
12644 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12645
12646 if (TREE_CODE (op1) == INTEGER_CST)
12647 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12648
12649 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12650 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12651 {
12652 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12653 ? tree_int_cst_min_precision (op0, UNSIGNED)
12654 : TYPE_PRECISION (inner0);
12655
12656 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12657 ? tree_int_cst_min_precision (op1, UNSIGNED)
12658 : TYPE_PRECISION (inner1);
12659
12660 return precision0 + precision1 < TYPE_PRECISION (type);
12661 }
12662 }
12663 return false;
12664
12665 case BIT_AND_EXPR:
12666 case MAX_EXPR:
12667 return RECURSE (op0) || RECURSE (op1);
12668
12669 case BIT_IOR_EXPR:
12670 case BIT_XOR_EXPR:
12671 case MIN_EXPR:
12672 case RDIV_EXPR:
12673 case TRUNC_DIV_EXPR:
12674 case CEIL_DIV_EXPR:
12675 case FLOOR_DIV_EXPR:
12676 case ROUND_DIV_EXPR:
12677 return RECURSE (op0) && RECURSE (op1);
12678
12679 case TRUNC_MOD_EXPR:
12680 return RECURSE (op0);
12681
12682 case FLOOR_MOD_EXPR:
12683 return RECURSE (op1);
12684
12685 case CEIL_MOD_EXPR:
12686 case ROUND_MOD_EXPR:
12687 default:
12688 return tree_simple_nonnegative_warnv_p (code, type);
12689 }
12690
12691 /* We don't know sign of `t', so be conservative and return false. */
12692 return false;
12693 }
12694
12695 /* Return true if T is known to be non-negative. If the return
12696 value is based on the assumption that signed overflow is undefined,
12697 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12698 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12699
12700 bool
12701 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12702 {
12703 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12704 return true;
12705
12706 switch (TREE_CODE (t))
12707 {
12708 case INTEGER_CST:
12709 return tree_int_cst_sgn (t) >= 0;
12710
12711 case REAL_CST:
12712 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12713
12714 case FIXED_CST:
12715 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12716
12717 case COND_EXPR:
12718 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12719
12720 case SSA_NAME:
12721 /* Limit the depth of recursion to avoid quadratic behavior.
12722 This is expected to catch almost all occurrences in practice.
12723 If this code misses important cases that unbounded recursion
12724 would not, passes that need this information could be revised
12725 to provide it through dataflow propagation. */
12726 return (!name_registered_for_update_p (t)
12727 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12728 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12729 strict_overflow_p, depth));
12730
12731 default:
12732 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12733 }
12734 }
12735
12736 /* Return true if T is known to be non-negative. If the return
12737 value is based on the assumption that signed overflow is undefined,
12738 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12739 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12740
12741 bool
12742 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12743 bool *strict_overflow_p, int depth)
12744 {
12745 switch (fn)
12746 {
12747 CASE_CFN_ACOS:
12748 CASE_CFN_ACOSH:
12749 CASE_CFN_CABS:
12750 CASE_CFN_COSH:
12751 CASE_CFN_ERFC:
12752 CASE_CFN_EXP:
12753 CASE_CFN_EXP10:
12754 CASE_CFN_EXP2:
12755 CASE_CFN_FABS:
12756 CASE_CFN_FDIM:
12757 CASE_CFN_HYPOT:
12758 CASE_CFN_POW10:
12759 CASE_CFN_FFS:
12760 CASE_CFN_PARITY:
12761 CASE_CFN_POPCOUNT:
12762 CASE_CFN_CLZ:
12763 CASE_CFN_CLRSB:
12764 case CFN_BUILT_IN_BSWAP32:
12765 case CFN_BUILT_IN_BSWAP64:
12766 /* Always true. */
12767 return true;
12768
12769 CASE_CFN_SQRT:
12770 CASE_CFN_SQRT_FN:
12771 /* sqrt(-0.0) is -0.0. */
12772 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12773 return true;
12774 return RECURSE (arg0);
12775
12776 CASE_CFN_ASINH:
12777 CASE_CFN_ATAN:
12778 CASE_CFN_ATANH:
12779 CASE_CFN_CBRT:
12780 CASE_CFN_CEIL:
12781 CASE_CFN_ERF:
12782 CASE_CFN_EXPM1:
12783 CASE_CFN_FLOOR:
12784 CASE_CFN_FMOD:
12785 CASE_CFN_FREXP:
12786 CASE_CFN_ICEIL:
12787 CASE_CFN_IFLOOR:
12788 CASE_CFN_IRINT:
12789 CASE_CFN_IROUND:
12790 CASE_CFN_LCEIL:
12791 CASE_CFN_LDEXP:
12792 CASE_CFN_LFLOOR:
12793 CASE_CFN_LLCEIL:
12794 CASE_CFN_LLFLOOR:
12795 CASE_CFN_LLRINT:
12796 CASE_CFN_LLROUND:
12797 CASE_CFN_LRINT:
12798 CASE_CFN_LROUND:
12799 CASE_CFN_MODF:
12800 CASE_CFN_NEARBYINT:
12801 CASE_CFN_RINT:
12802 CASE_CFN_ROUND:
12803 CASE_CFN_SCALB:
12804 CASE_CFN_SCALBLN:
12805 CASE_CFN_SCALBN:
12806 CASE_CFN_SIGNBIT:
12807 CASE_CFN_SIGNIFICAND:
12808 CASE_CFN_SINH:
12809 CASE_CFN_TANH:
12810 CASE_CFN_TRUNC:
12811 /* True if the 1st argument is nonnegative. */
12812 return RECURSE (arg0);
12813
12814 CASE_CFN_FMAX:
12815 CASE_CFN_FMAX_FN:
12816 /* True if the 1st OR 2nd arguments are nonnegative. */
12817 return RECURSE (arg0) || RECURSE (arg1);
12818
12819 CASE_CFN_FMIN:
12820 CASE_CFN_FMIN_FN:
12821 /* True if the 1st AND 2nd arguments are nonnegative. */
12822 return RECURSE (arg0) && RECURSE (arg1);
12823
12824 CASE_CFN_COPYSIGN:
12825 CASE_CFN_COPYSIGN_FN:
12826 /* True if the 2nd argument is nonnegative. */
12827 return RECURSE (arg1);
12828
12829 CASE_CFN_POWI:
12830 /* True if the 1st argument is nonnegative or the second
12831 argument is an even integer. */
12832 if (TREE_CODE (arg1) == INTEGER_CST
12833 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12834 return true;
12835 return RECURSE (arg0);
12836
12837 CASE_CFN_POW:
12838 /* True if the 1st argument is nonnegative or the second
12839 argument is an even integer valued real. */
12840 if (TREE_CODE (arg1) == REAL_CST)
12841 {
12842 REAL_VALUE_TYPE c;
12843 HOST_WIDE_INT n;
12844
12845 c = TREE_REAL_CST (arg1);
12846 n = real_to_integer (&c);
12847 if ((n & 1) == 0)
12848 {
12849 REAL_VALUE_TYPE cint;
12850 real_from_integer (&cint, VOIDmode, n, SIGNED);
12851 if (real_identical (&c, &cint))
12852 return true;
12853 }
12854 }
12855 return RECURSE (arg0);
12856
12857 default:
12858 break;
12859 }
12860 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
12861 }
12862
12863 /* Return true if T is known to be non-negative. If the return
12864 value is based on the assumption that signed overflow is undefined,
12865 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12866 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12867
12868 static bool
12869 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12870 {
12871 enum tree_code code = TREE_CODE (t);
12872 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12873 return true;
12874
12875 switch (code)
12876 {
12877 case TARGET_EXPR:
12878 {
12879 tree temp = TARGET_EXPR_SLOT (t);
12880 t = TARGET_EXPR_INITIAL (t);
12881
12882 /* If the initializer is non-void, then it's a normal expression
12883 that will be assigned to the slot. */
12884 if (!VOID_TYPE_P (t))
12885 return RECURSE (t);
12886
12887 /* Otherwise, the initializer sets the slot in some way. One common
12888 way is an assignment statement at the end of the initializer. */
12889 while (1)
12890 {
12891 if (TREE_CODE (t) == BIND_EXPR)
12892 t = expr_last (BIND_EXPR_BODY (t));
12893 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12894 || TREE_CODE (t) == TRY_CATCH_EXPR)
12895 t = expr_last (TREE_OPERAND (t, 0));
12896 else if (TREE_CODE (t) == STATEMENT_LIST)
12897 t = expr_last (t);
12898 else
12899 break;
12900 }
12901 if (TREE_CODE (t) == MODIFY_EXPR
12902 && TREE_OPERAND (t, 0) == temp)
12903 return RECURSE (TREE_OPERAND (t, 1));
12904
12905 return false;
12906 }
12907
12908 case CALL_EXPR:
12909 {
12910 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
12911 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
12912
12913 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
12914 get_call_combined_fn (t),
12915 arg0,
12916 arg1,
12917 strict_overflow_p, depth);
12918 }
12919 case COMPOUND_EXPR:
12920 case MODIFY_EXPR:
12921 return RECURSE (TREE_OPERAND (t, 1));
12922
12923 case BIND_EXPR:
12924 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
12925
12926 case SAVE_EXPR:
12927 return RECURSE (TREE_OPERAND (t, 0));
12928
12929 default:
12930 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12931 }
12932 }
12933
12934 #undef RECURSE
12935 #undef tree_expr_nonnegative_warnv_p
12936
12937 /* Return true if T is known to be non-negative. If the return
12938 value is based on the assumption that signed overflow is undefined,
12939 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12940 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12941
12942 bool
12943 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12944 {
12945 enum tree_code code;
12946 if (t == error_mark_node)
12947 return false;
12948
12949 code = TREE_CODE (t);
12950 switch (TREE_CODE_CLASS (code))
12951 {
12952 case tcc_binary:
12953 case tcc_comparison:
12954 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12955 TREE_TYPE (t),
12956 TREE_OPERAND (t, 0),
12957 TREE_OPERAND (t, 1),
12958 strict_overflow_p, depth);
12959
12960 case tcc_unary:
12961 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12962 TREE_TYPE (t),
12963 TREE_OPERAND (t, 0),
12964 strict_overflow_p, depth);
12965
12966 case tcc_constant:
12967 case tcc_declaration:
12968 case tcc_reference:
12969 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
12970
12971 default:
12972 break;
12973 }
12974
12975 switch (code)
12976 {
12977 case TRUTH_AND_EXPR:
12978 case TRUTH_OR_EXPR:
12979 case TRUTH_XOR_EXPR:
12980 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12981 TREE_TYPE (t),
12982 TREE_OPERAND (t, 0),
12983 TREE_OPERAND (t, 1),
12984 strict_overflow_p, depth);
12985 case TRUTH_NOT_EXPR:
12986 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12987 TREE_TYPE (t),
12988 TREE_OPERAND (t, 0),
12989 strict_overflow_p, depth);
12990
12991 case COND_EXPR:
12992 case CONSTRUCTOR:
12993 case OBJ_TYPE_REF:
12994 case ASSERT_EXPR:
12995 case ADDR_EXPR:
12996 case WITH_SIZE_EXPR:
12997 case SSA_NAME:
12998 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
12999
13000 default:
13001 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13002 }
13003 }
13004
13005 /* Return true if `t' is known to be non-negative. Handle warnings
13006 about undefined signed overflow. */
13007
13008 bool
13009 tree_expr_nonnegative_p (tree t)
13010 {
13011 bool ret, strict_overflow_p;
13012
13013 strict_overflow_p = false;
13014 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13015 if (strict_overflow_p)
13016 fold_overflow_warning (("assuming signed overflow does not occur when "
13017 "determining that expression is always "
13018 "non-negative"),
13019 WARN_STRICT_OVERFLOW_MISC);
13020 return ret;
13021 }
13022
13023
13024 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13025 For floating point we further ensure that T is not denormal.
13026 Similar logic is present in nonzero_address in rtlanal.h.
13027
13028 If the return value is based on the assumption that signed overflow
13029 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13030 change *STRICT_OVERFLOW_P. */
13031
13032 bool
13033 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13034 bool *strict_overflow_p)
13035 {
13036 switch (code)
13037 {
13038 case ABS_EXPR:
13039 return tree_expr_nonzero_warnv_p (op0,
13040 strict_overflow_p);
13041
13042 case NOP_EXPR:
13043 {
13044 tree inner_type = TREE_TYPE (op0);
13045 tree outer_type = type;
13046
13047 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13048 && tree_expr_nonzero_warnv_p (op0,
13049 strict_overflow_p));
13050 }
13051 break;
13052
13053 case NON_LVALUE_EXPR:
13054 return tree_expr_nonzero_warnv_p (op0,
13055 strict_overflow_p);
13056
13057 default:
13058 break;
13059 }
13060
13061 return false;
13062 }
13063
13064 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13065 For floating point we further ensure that T is not denormal.
13066 Similar logic is present in nonzero_address in rtlanal.h.
13067
13068 If the return value is based on the assumption that signed overflow
13069 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13070 change *STRICT_OVERFLOW_P. */
13071
13072 bool
13073 tree_binary_nonzero_warnv_p (enum tree_code code,
13074 tree type,
13075 tree op0,
13076 tree op1, bool *strict_overflow_p)
13077 {
13078 bool sub_strict_overflow_p;
13079 switch (code)
13080 {
13081 case POINTER_PLUS_EXPR:
13082 case PLUS_EXPR:
13083 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13084 {
13085 /* With the presence of negative values it is hard
13086 to say something. */
13087 sub_strict_overflow_p = false;
13088 if (!tree_expr_nonnegative_warnv_p (op0,
13089 &sub_strict_overflow_p)
13090 || !tree_expr_nonnegative_warnv_p (op1,
13091 &sub_strict_overflow_p))
13092 return false;
13093 /* One of operands must be positive and the other non-negative. */
13094 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13095 overflows, on a twos-complement machine the sum of two
13096 nonnegative numbers can never be zero. */
13097 return (tree_expr_nonzero_warnv_p (op0,
13098 strict_overflow_p)
13099 || tree_expr_nonzero_warnv_p (op1,
13100 strict_overflow_p));
13101 }
13102 break;
13103
13104 case MULT_EXPR:
13105 if (TYPE_OVERFLOW_UNDEFINED (type))
13106 {
13107 if (tree_expr_nonzero_warnv_p (op0,
13108 strict_overflow_p)
13109 && tree_expr_nonzero_warnv_p (op1,
13110 strict_overflow_p))
13111 {
13112 *strict_overflow_p = true;
13113 return true;
13114 }
13115 }
13116 break;
13117
13118 case MIN_EXPR:
13119 sub_strict_overflow_p = false;
13120 if (tree_expr_nonzero_warnv_p (op0,
13121 &sub_strict_overflow_p)
13122 && tree_expr_nonzero_warnv_p (op1,
13123 &sub_strict_overflow_p))
13124 {
13125 if (sub_strict_overflow_p)
13126 *strict_overflow_p = true;
13127 }
13128 break;
13129
13130 case MAX_EXPR:
13131 sub_strict_overflow_p = false;
13132 if (tree_expr_nonzero_warnv_p (op0,
13133 &sub_strict_overflow_p))
13134 {
13135 if (sub_strict_overflow_p)
13136 *strict_overflow_p = true;
13137
13138 /* When both operands are nonzero, then MAX must be too. */
13139 if (tree_expr_nonzero_warnv_p (op1,
13140 strict_overflow_p))
13141 return true;
13142
13143 /* MAX where operand 0 is positive is positive. */
13144 return tree_expr_nonnegative_warnv_p (op0,
13145 strict_overflow_p);
13146 }
13147 /* MAX where operand 1 is positive is positive. */
13148 else if (tree_expr_nonzero_warnv_p (op1,
13149 &sub_strict_overflow_p)
13150 && tree_expr_nonnegative_warnv_p (op1,
13151 &sub_strict_overflow_p))
13152 {
13153 if (sub_strict_overflow_p)
13154 *strict_overflow_p = true;
13155 return true;
13156 }
13157 break;
13158
13159 case BIT_IOR_EXPR:
13160 return (tree_expr_nonzero_warnv_p (op1,
13161 strict_overflow_p)
13162 || tree_expr_nonzero_warnv_p (op0,
13163 strict_overflow_p));
13164
13165 default:
13166 break;
13167 }
13168
13169 return false;
13170 }
13171
13172 /* Return true when T is an address and is known to be nonzero.
13173 For floating point we further ensure that T is not denormal.
13174 Similar logic is present in nonzero_address in rtlanal.h.
13175
13176 If the return value is based on the assumption that signed overflow
13177 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13178 change *STRICT_OVERFLOW_P. */
13179
13180 bool
13181 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13182 {
13183 bool sub_strict_overflow_p;
13184 switch (TREE_CODE (t))
13185 {
13186 case INTEGER_CST:
13187 return !integer_zerop (t);
13188
13189 case ADDR_EXPR:
13190 {
13191 tree base = TREE_OPERAND (t, 0);
13192
13193 if (!DECL_P (base))
13194 base = get_base_address (base);
13195
13196 if (base && TREE_CODE (base) == TARGET_EXPR)
13197 base = TARGET_EXPR_SLOT (base);
13198
13199 if (!base)
13200 return false;
13201
13202 /* For objects in symbol table check if we know they are non-zero.
13203 Don't do anything for variables and functions before symtab is built;
13204 it is quite possible that they will be declared weak later. */
13205 int nonzero_addr = maybe_nonzero_address (base);
13206 if (nonzero_addr >= 0)
13207 return nonzero_addr;
13208
13209 /* Constants are never weak. */
13210 if (CONSTANT_CLASS_P (base))
13211 return true;
13212
13213 return false;
13214 }
13215
13216 case COND_EXPR:
13217 sub_strict_overflow_p = false;
13218 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13219 &sub_strict_overflow_p)
13220 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13221 &sub_strict_overflow_p))
13222 {
13223 if (sub_strict_overflow_p)
13224 *strict_overflow_p = true;
13225 return true;
13226 }
13227 break;
13228
13229 case SSA_NAME:
13230 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13231 break;
13232 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13233
13234 default:
13235 break;
13236 }
13237 return false;
13238 }
13239
13240 #define integer_valued_real_p(X) \
13241 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13242
13243 #define RECURSE(X) \
13244 ((integer_valued_real_p) (X, depth + 1))
13245
13246 /* Return true if the floating point result of (CODE OP0) has an
13247 integer value. We also allow +Inf, -Inf and NaN to be considered
13248 integer values. Return false for signaling NaN.
13249
13250 DEPTH is the current nesting depth of the query. */
13251
13252 bool
13253 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13254 {
13255 switch (code)
13256 {
13257 case FLOAT_EXPR:
13258 return true;
13259
13260 case ABS_EXPR:
13261 return RECURSE (op0);
13262
13263 CASE_CONVERT:
13264 {
13265 tree type = TREE_TYPE (op0);
13266 if (TREE_CODE (type) == INTEGER_TYPE)
13267 return true;
13268 if (TREE_CODE (type) == REAL_TYPE)
13269 return RECURSE (op0);
13270 break;
13271 }
13272
13273 default:
13274 break;
13275 }
13276 return false;
13277 }
13278
13279 /* Return true if the floating point result of (CODE OP0 OP1) has an
13280 integer value. We also allow +Inf, -Inf and NaN to be considered
13281 integer values. Return false for signaling NaN.
13282
13283 DEPTH is the current nesting depth of the query. */
13284
13285 bool
13286 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13287 {
13288 switch (code)
13289 {
13290 case PLUS_EXPR:
13291 case MINUS_EXPR:
13292 case MULT_EXPR:
13293 case MIN_EXPR:
13294 case MAX_EXPR:
13295 return RECURSE (op0) && RECURSE (op1);
13296
13297 default:
13298 break;
13299 }
13300 return false;
13301 }
13302
13303 /* Return true if the floating point result of calling FNDECL with arguments
13304 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13305 considered integer values. Return false for signaling NaN. If FNDECL
13306 takes fewer than 2 arguments, the remaining ARGn are null.
13307
13308 DEPTH is the current nesting depth of the query. */
13309
13310 bool
13311 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13312 {
13313 switch (fn)
13314 {
13315 CASE_CFN_CEIL:
13316 CASE_CFN_FLOOR:
13317 CASE_CFN_NEARBYINT:
13318 CASE_CFN_RINT:
13319 CASE_CFN_ROUND:
13320 CASE_CFN_TRUNC:
13321 return true;
13322
13323 CASE_CFN_FMIN:
13324 CASE_CFN_FMIN_FN:
13325 CASE_CFN_FMAX:
13326 CASE_CFN_FMAX_FN:
13327 return RECURSE (arg0) && RECURSE (arg1);
13328
13329 default:
13330 break;
13331 }
13332 return false;
13333 }
13334
13335 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13336 has an integer value. We also allow +Inf, -Inf and NaN to be
13337 considered integer values. Return false for signaling NaN.
13338
13339 DEPTH is the current nesting depth of the query. */
13340
13341 bool
13342 integer_valued_real_single_p (tree t, int depth)
13343 {
13344 switch (TREE_CODE (t))
13345 {
13346 case REAL_CST:
13347 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13348
13349 case COND_EXPR:
13350 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13351
13352 case SSA_NAME:
13353 /* Limit the depth of recursion to avoid quadratic behavior.
13354 This is expected to catch almost all occurrences in practice.
13355 If this code misses important cases that unbounded recursion
13356 would not, passes that need this information could be revised
13357 to provide it through dataflow propagation. */
13358 return (!name_registered_for_update_p (t)
13359 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13360 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13361 depth));
13362
13363 default:
13364 break;
13365 }
13366 return false;
13367 }
13368
13369 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13370 has an integer value. We also allow +Inf, -Inf and NaN to be
13371 considered integer values. Return false for signaling NaN.
13372
13373 DEPTH is the current nesting depth of the query. */
13374
13375 static bool
13376 integer_valued_real_invalid_p (tree t, int depth)
13377 {
13378 switch (TREE_CODE (t))
13379 {
13380 case COMPOUND_EXPR:
13381 case MODIFY_EXPR:
13382 case BIND_EXPR:
13383 return RECURSE (TREE_OPERAND (t, 1));
13384
13385 case SAVE_EXPR:
13386 return RECURSE (TREE_OPERAND (t, 0));
13387
13388 default:
13389 break;
13390 }
13391 return false;
13392 }
13393
13394 #undef RECURSE
13395 #undef integer_valued_real_p
13396
13397 /* Return true if the floating point expression T has an integer value.
13398 We also allow +Inf, -Inf and NaN to be considered integer values.
13399 Return false for signaling NaN.
13400
13401 DEPTH is the current nesting depth of the query. */
13402
13403 bool
13404 integer_valued_real_p (tree t, int depth)
13405 {
13406 if (t == error_mark_node)
13407 return false;
13408
13409 tree_code code = TREE_CODE (t);
13410 switch (TREE_CODE_CLASS (code))
13411 {
13412 case tcc_binary:
13413 case tcc_comparison:
13414 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13415 TREE_OPERAND (t, 1), depth);
13416
13417 case tcc_unary:
13418 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13419
13420 case tcc_constant:
13421 case tcc_declaration:
13422 case tcc_reference:
13423 return integer_valued_real_single_p (t, depth);
13424
13425 default:
13426 break;
13427 }
13428
13429 switch (code)
13430 {
13431 case COND_EXPR:
13432 case SSA_NAME:
13433 return integer_valued_real_single_p (t, depth);
13434
13435 case CALL_EXPR:
13436 {
13437 tree arg0 = (call_expr_nargs (t) > 0
13438 ? CALL_EXPR_ARG (t, 0)
13439 : NULL_TREE);
13440 tree arg1 = (call_expr_nargs (t) > 1
13441 ? CALL_EXPR_ARG (t, 1)
13442 : NULL_TREE);
13443 return integer_valued_real_call_p (get_call_combined_fn (t),
13444 arg0, arg1, depth);
13445 }
13446
13447 default:
13448 return integer_valued_real_invalid_p (t, depth);
13449 }
13450 }
13451
13452 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13453 attempt to fold the expression to a constant without modifying TYPE,
13454 OP0 or OP1.
13455
13456 If the expression could be simplified to a constant, then return
13457 the constant. If the expression would not be simplified to a
13458 constant, then return NULL_TREE. */
13459
13460 tree
13461 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13462 {
13463 tree tem = fold_binary (code, type, op0, op1);
13464 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13465 }
13466
13467 /* Given the components of a unary expression CODE, TYPE and OP0,
13468 attempt to fold the expression to a constant without modifying
13469 TYPE or OP0.
13470
13471 If the expression could be simplified to a constant, then return
13472 the constant. If the expression would not be simplified to a
13473 constant, then return NULL_TREE. */
13474
13475 tree
13476 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13477 {
13478 tree tem = fold_unary (code, type, op0);
13479 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13480 }
13481
13482 /* If EXP represents referencing an element in a constant string
13483 (either via pointer arithmetic or array indexing), return the
13484 tree representing the value accessed, otherwise return NULL. */
13485
13486 tree
13487 fold_read_from_constant_string (tree exp)
13488 {
13489 if ((TREE_CODE (exp) == INDIRECT_REF
13490 || TREE_CODE (exp) == ARRAY_REF)
13491 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13492 {
13493 tree exp1 = TREE_OPERAND (exp, 0);
13494 tree index;
13495 tree string;
13496 location_t loc = EXPR_LOCATION (exp);
13497
13498 if (TREE_CODE (exp) == INDIRECT_REF)
13499 string = string_constant (exp1, &index);
13500 else
13501 {
13502 tree low_bound = array_ref_low_bound (exp);
13503 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13504
13505 /* Optimize the special-case of a zero lower bound.
13506
13507 We convert the low_bound to sizetype to avoid some problems
13508 with constant folding. (E.g. suppose the lower bound is 1,
13509 and its mode is QI. Without the conversion,l (ARRAY
13510 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13511 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13512 if (! integer_zerop (low_bound))
13513 index = size_diffop_loc (loc, index,
13514 fold_convert_loc (loc, sizetype, low_bound));
13515
13516 string = exp1;
13517 }
13518
13519 scalar_int_mode char_mode;
13520 if (string
13521 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13522 && TREE_CODE (string) == STRING_CST
13523 && TREE_CODE (index) == INTEGER_CST
13524 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13525 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13526 &char_mode)
13527 && GET_MODE_SIZE (char_mode) == 1)
13528 return build_int_cst_type (TREE_TYPE (exp),
13529 (TREE_STRING_POINTER (string)
13530 [TREE_INT_CST_LOW (index)]));
13531 }
13532 return NULL;
13533 }
13534
13535 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13536 an integer constant, real, or fixed-point constant.
13537
13538 TYPE is the type of the result. */
13539
13540 static tree
13541 fold_negate_const (tree arg0, tree type)
13542 {
13543 tree t = NULL_TREE;
13544
13545 switch (TREE_CODE (arg0))
13546 {
13547 case INTEGER_CST:
13548 {
13549 bool overflow;
13550 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13551 t = force_fit_type (type, val, 1,
13552 (overflow && ! TYPE_UNSIGNED (type))
13553 || TREE_OVERFLOW (arg0));
13554 break;
13555 }
13556
13557 case REAL_CST:
13558 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13559 break;
13560
13561 case FIXED_CST:
13562 {
13563 FIXED_VALUE_TYPE f;
13564 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13565 &(TREE_FIXED_CST (arg0)), NULL,
13566 TYPE_SATURATING (type));
13567 t = build_fixed (type, f);
13568 /* Propagate overflow flags. */
13569 if (overflow_p | TREE_OVERFLOW (arg0))
13570 TREE_OVERFLOW (t) = 1;
13571 break;
13572 }
13573
13574 default:
13575 gcc_unreachable ();
13576 }
13577
13578 return t;
13579 }
13580
13581 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13582 an integer constant or real constant.
13583
13584 TYPE is the type of the result. */
13585
13586 tree
13587 fold_abs_const (tree arg0, tree type)
13588 {
13589 tree t = NULL_TREE;
13590
13591 switch (TREE_CODE (arg0))
13592 {
13593 case INTEGER_CST:
13594 {
13595 /* If the value is unsigned or non-negative, then the absolute value
13596 is the same as the ordinary value. */
13597 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13598 t = arg0;
13599
13600 /* If the value is negative, then the absolute value is
13601 its negation. */
13602 else
13603 {
13604 bool overflow;
13605 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13606 t = force_fit_type (type, val, -1,
13607 overflow | TREE_OVERFLOW (arg0));
13608 }
13609 }
13610 break;
13611
13612 case REAL_CST:
13613 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13614 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13615 else
13616 t = arg0;
13617 break;
13618
13619 default:
13620 gcc_unreachable ();
13621 }
13622
13623 return t;
13624 }
13625
13626 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13627 constant. TYPE is the type of the result. */
13628
13629 static tree
13630 fold_not_const (const_tree arg0, tree type)
13631 {
13632 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13633
13634 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13635 }
13636
13637 /* Given CODE, a relational operator, the target type, TYPE and two
13638 constant operands OP0 and OP1, return the result of the
13639 relational operation. If the result is not a compile time
13640 constant, then return NULL_TREE. */
13641
13642 static tree
13643 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13644 {
13645 int result, invert;
13646
13647 /* From here on, the only cases we handle are when the result is
13648 known to be a constant. */
13649
13650 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13651 {
13652 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13653 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13654
13655 /* Handle the cases where either operand is a NaN. */
13656 if (real_isnan (c0) || real_isnan (c1))
13657 {
13658 switch (code)
13659 {
13660 case EQ_EXPR:
13661 case ORDERED_EXPR:
13662 result = 0;
13663 break;
13664
13665 case NE_EXPR:
13666 case UNORDERED_EXPR:
13667 case UNLT_EXPR:
13668 case UNLE_EXPR:
13669 case UNGT_EXPR:
13670 case UNGE_EXPR:
13671 case UNEQ_EXPR:
13672 result = 1;
13673 break;
13674
13675 case LT_EXPR:
13676 case LE_EXPR:
13677 case GT_EXPR:
13678 case GE_EXPR:
13679 case LTGT_EXPR:
13680 if (flag_trapping_math)
13681 return NULL_TREE;
13682 result = 0;
13683 break;
13684
13685 default:
13686 gcc_unreachable ();
13687 }
13688
13689 return constant_boolean_node (result, type);
13690 }
13691
13692 return constant_boolean_node (real_compare (code, c0, c1), type);
13693 }
13694
13695 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13696 {
13697 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13698 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13699 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13700 }
13701
13702 /* Handle equality/inequality of complex constants. */
13703 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13704 {
13705 tree rcond = fold_relational_const (code, type,
13706 TREE_REALPART (op0),
13707 TREE_REALPART (op1));
13708 tree icond = fold_relational_const (code, type,
13709 TREE_IMAGPART (op0),
13710 TREE_IMAGPART (op1));
13711 if (code == EQ_EXPR)
13712 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13713 else if (code == NE_EXPR)
13714 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13715 else
13716 return NULL_TREE;
13717 }
13718
13719 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13720 {
13721 if (!VECTOR_TYPE_P (type))
13722 {
13723 /* Have vector comparison with scalar boolean result. */
13724 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13725 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13726 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13727 {
13728 tree elem0 = VECTOR_CST_ELT (op0, i);
13729 tree elem1 = VECTOR_CST_ELT (op1, i);
13730 tree tmp = fold_relational_const (code, type, elem0, elem1);
13731 if (tmp == NULL_TREE)
13732 return NULL_TREE;
13733 if (integer_zerop (tmp))
13734 return constant_boolean_node (false, type);
13735 }
13736 return constant_boolean_node (true, type);
13737 }
13738 unsigned count = VECTOR_CST_NELTS (op0);
13739 gcc_assert (VECTOR_CST_NELTS (op1) == count
13740 && TYPE_VECTOR_SUBPARTS (type) == count);
13741
13742 auto_vec<tree, 32> elts (count);
13743 for (unsigned i = 0; i < count; i++)
13744 {
13745 tree elem_type = TREE_TYPE (type);
13746 tree elem0 = VECTOR_CST_ELT (op0, i);
13747 tree elem1 = VECTOR_CST_ELT (op1, i);
13748
13749 tree tem = fold_relational_const (code, elem_type,
13750 elem0, elem1);
13751
13752 if (tem == NULL_TREE)
13753 return NULL_TREE;
13754
13755 elts.quick_push (build_int_cst (elem_type,
13756 integer_zerop (tem) ? 0 : -1));
13757 }
13758
13759 return build_vector (type, elts);
13760 }
13761
13762 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13763
13764 To compute GT, swap the arguments and do LT.
13765 To compute GE, do LT and invert the result.
13766 To compute LE, swap the arguments, do LT and invert the result.
13767 To compute NE, do EQ and invert the result.
13768
13769 Therefore, the code below must handle only EQ and LT. */
13770
13771 if (code == LE_EXPR || code == GT_EXPR)
13772 {
13773 std::swap (op0, op1);
13774 code = swap_tree_comparison (code);
13775 }
13776
13777 /* Note that it is safe to invert for real values here because we
13778 have already handled the one case that it matters. */
13779
13780 invert = 0;
13781 if (code == NE_EXPR || code == GE_EXPR)
13782 {
13783 invert = 1;
13784 code = invert_tree_comparison (code, false);
13785 }
13786
13787 /* Compute a result for LT or EQ if args permit;
13788 Otherwise return T. */
13789 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13790 {
13791 if (code == EQ_EXPR)
13792 result = tree_int_cst_equal (op0, op1);
13793 else
13794 result = tree_int_cst_lt (op0, op1);
13795 }
13796 else
13797 return NULL_TREE;
13798
13799 if (invert)
13800 result ^= 1;
13801 return constant_boolean_node (result, type);
13802 }
13803
13804 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13805 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13806 itself. */
13807
13808 tree
13809 fold_build_cleanup_point_expr (tree type, tree expr)
13810 {
13811 /* If the expression does not have side effects then we don't have to wrap
13812 it with a cleanup point expression. */
13813 if (!TREE_SIDE_EFFECTS (expr))
13814 return expr;
13815
13816 /* If the expression is a return, check to see if the expression inside the
13817 return has no side effects or the right hand side of the modify expression
13818 inside the return. If either don't have side effects set we don't need to
13819 wrap the expression in a cleanup point expression. Note we don't check the
13820 left hand side of the modify because it should always be a return decl. */
13821 if (TREE_CODE (expr) == RETURN_EXPR)
13822 {
13823 tree op = TREE_OPERAND (expr, 0);
13824 if (!op || !TREE_SIDE_EFFECTS (op))
13825 return expr;
13826 op = TREE_OPERAND (op, 1);
13827 if (!TREE_SIDE_EFFECTS (op))
13828 return expr;
13829 }
13830
13831 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13832 }
13833
13834 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13835 of an indirection through OP0, or NULL_TREE if no simplification is
13836 possible. */
13837
13838 tree
13839 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13840 {
13841 tree sub = op0;
13842 tree subtype;
13843
13844 STRIP_NOPS (sub);
13845 subtype = TREE_TYPE (sub);
13846 if (!POINTER_TYPE_P (subtype)
13847 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13848 return NULL_TREE;
13849
13850 if (TREE_CODE (sub) == ADDR_EXPR)
13851 {
13852 tree op = TREE_OPERAND (sub, 0);
13853 tree optype = TREE_TYPE (op);
13854 /* *&CONST_DECL -> to the value of the const decl. */
13855 if (TREE_CODE (op) == CONST_DECL)
13856 return DECL_INITIAL (op);
13857 /* *&p => p; make sure to handle *&"str"[cst] here. */
13858 if (type == optype)
13859 {
13860 tree fop = fold_read_from_constant_string (op);
13861 if (fop)
13862 return fop;
13863 else
13864 return op;
13865 }
13866 /* *(foo *)&fooarray => fooarray[0] */
13867 else if (TREE_CODE (optype) == ARRAY_TYPE
13868 && type == TREE_TYPE (optype)
13869 && (!in_gimple_form
13870 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13871 {
13872 tree type_domain = TYPE_DOMAIN (optype);
13873 tree min_val = size_zero_node;
13874 if (type_domain && TYPE_MIN_VALUE (type_domain))
13875 min_val = TYPE_MIN_VALUE (type_domain);
13876 if (in_gimple_form
13877 && TREE_CODE (min_val) != INTEGER_CST)
13878 return NULL_TREE;
13879 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13880 NULL_TREE, NULL_TREE);
13881 }
13882 /* *(foo *)&complexfoo => __real__ complexfoo */
13883 else if (TREE_CODE (optype) == COMPLEX_TYPE
13884 && type == TREE_TYPE (optype))
13885 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13886 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13887 else if (TREE_CODE (optype) == VECTOR_TYPE
13888 && type == TREE_TYPE (optype))
13889 {
13890 tree part_width = TYPE_SIZE (type);
13891 tree index = bitsize_int (0);
13892 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13893 }
13894 }
13895
13896 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13897 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13898 {
13899 tree op00 = TREE_OPERAND (sub, 0);
13900 tree op01 = TREE_OPERAND (sub, 1);
13901
13902 STRIP_NOPS (op00);
13903 if (TREE_CODE (op00) == ADDR_EXPR)
13904 {
13905 tree op00type;
13906 op00 = TREE_OPERAND (op00, 0);
13907 op00type = TREE_TYPE (op00);
13908
13909 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13910 if (TREE_CODE (op00type) == VECTOR_TYPE
13911 && type == TREE_TYPE (op00type))
13912 {
13913 tree part_width = TYPE_SIZE (type);
13914 unsigned HOST_WIDE_INT max_offset
13915 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13916 * TYPE_VECTOR_SUBPARTS (op00type));
13917 if (tree_int_cst_sign_bit (op01) == 0
13918 && compare_tree_int (op01, max_offset) == -1)
13919 {
13920 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
13921 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13922 tree index = bitsize_int (indexi);
13923 return fold_build3_loc (loc,
13924 BIT_FIELD_REF, type, op00,
13925 part_width, index);
13926 }
13927 }
13928 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13929 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13930 && type == TREE_TYPE (op00type))
13931 {
13932 tree size = TYPE_SIZE_UNIT (type);
13933 if (tree_int_cst_equal (size, op01))
13934 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13935 }
13936 /* ((foo *)&fooarray)[1] => fooarray[1] */
13937 else if (TREE_CODE (op00type) == ARRAY_TYPE
13938 && type == TREE_TYPE (op00type))
13939 {
13940 tree type_domain = TYPE_DOMAIN (op00type);
13941 tree min = size_zero_node;
13942 if (type_domain && TYPE_MIN_VALUE (type_domain))
13943 min = TYPE_MIN_VALUE (type_domain);
13944 offset_int off = wi::to_offset (op01);
13945 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
13946 offset_int remainder;
13947 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
13948 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
13949 {
13950 off = off + wi::to_offset (min);
13951 op01 = wide_int_to_tree (sizetype, off);
13952 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13953 NULL_TREE, NULL_TREE);
13954 }
13955 }
13956 }
13957 }
13958
13959 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13960 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13961 && type == TREE_TYPE (TREE_TYPE (subtype))
13962 && (!in_gimple_form
13963 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13964 {
13965 tree type_domain;
13966 tree min_val = size_zero_node;
13967 sub = build_fold_indirect_ref_loc (loc, sub);
13968 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13969 if (type_domain && TYPE_MIN_VALUE (type_domain))
13970 min_val = TYPE_MIN_VALUE (type_domain);
13971 if (in_gimple_form
13972 && TREE_CODE (min_val) != INTEGER_CST)
13973 return NULL_TREE;
13974 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
13975 NULL_TREE);
13976 }
13977
13978 return NULL_TREE;
13979 }
13980
13981 /* Builds an expression for an indirection through T, simplifying some
13982 cases. */
13983
13984 tree
13985 build_fold_indirect_ref_loc (location_t loc, tree t)
13986 {
13987 tree type = TREE_TYPE (TREE_TYPE (t));
13988 tree sub = fold_indirect_ref_1 (loc, type, t);
13989
13990 if (sub)
13991 return sub;
13992
13993 return build1_loc (loc, INDIRECT_REF, type, t);
13994 }
13995
13996 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13997
13998 tree
13999 fold_indirect_ref_loc (location_t loc, tree t)
14000 {
14001 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14002
14003 if (sub)
14004 return sub;
14005 else
14006 return t;
14007 }
14008
14009 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14010 whose result is ignored. The type of the returned tree need not be
14011 the same as the original expression. */
14012
14013 tree
14014 fold_ignored_result (tree t)
14015 {
14016 if (!TREE_SIDE_EFFECTS (t))
14017 return integer_zero_node;
14018
14019 for (;;)
14020 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14021 {
14022 case tcc_unary:
14023 t = TREE_OPERAND (t, 0);
14024 break;
14025
14026 case tcc_binary:
14027 case tcc_comparison:
14028 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14029 t = TREE_OPERAND (t, 0);
14030 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14031 t = TREE_OPERAND (t, 1);
14032 else
14033 return t;
14034 break;
14035
14036 case tcc_expression:
14037 switch (TREE_CODE (t))
14038 {
14039 case COMPOUND_EXPR:
14040 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14041 return t;
14042 t = TREE_OPERAND (t, 0);
14043 break;
14044
14045 case COND_EXPR:
14046 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14047 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14048 return t;
14049 t = TREE_OPERAND (t, 0);
14050 break;
14051
14052 default:
14053 return t;
14054 }
14055 break;
14056
14057 default:
14058 return t;
14059 }
14060 }
14061
14062 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14063
14064 tree
14065 round_up_loc (location_t loc, tree value, unsigned int divisor)
14066 {
14067 tree div = NULL_TREE;
14068
14069 if (divisor == 1)
14070 return value;
14071
14072 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14073 have to do anything. Only do this when we are not given a const,
14074 because in that case, this check is more expensive than just
14075 doing it. */
14076 if (TREE_CODE (value) != INTEGER_CST)
14077 {
14078 div = build_int_cst (TREE_TYPE (value), divisor);
14079
14080 if (multiple_of_p (TREE_TYPE (value), value, div))
14081 return value;
14082 }
14083
14084 /* If divisor is a power of two, simplify this to bit manipulation. */
14085 if (pow2_or_zerop (divisor))
14086 {
14087 if (TREE_CODE (value) == INTEGER_CST)
14088 {
14089 wide_int val = wi::to_wide (value);
14090 bool overflow_p;
14091
14092 if ((val & (divisor - 1)) == 0)
14093 return value;
14094
14095 overflow_p = TREE_OVERFLOW (value);
14096 val += divisor - 1;
14097 val &= (int) -divisor;
14098 if (val == 0)
14099 overflow_p = true;
14100
14101 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14102 }
14103 else
14104 {
14105 tree t;
14106
14107 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14108 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14109 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14110 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14111 }
14112 }
14113 else
14114 {
14115 if (!div)
14116 div = build_int_cst (TREE_TYPE (value), divisor);
14117 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14118 value = size_binop_loc (loc, MULT_EXPR, value, div);
14119 }
14120
14121 return value;
14122 }
14123
14124 /* Likewise, but round down. */
14125
14126 tree
14127 round_down_loc (location_t loc, tree value, int divisor)
14128 {
14129 tree div = NULL_TREE;
14130
14131 gcc_assert (divisor > 0);
14132 if (divisor == 1)
14133 return value;
14134
14135 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14136 have to do anything. Only do this when we are not given a const,
14137 because in that case, this check is more expensive than just
14138 doing it. */
14139 if (TREE_CODE (value) != INTEGER_CST)
14140 {
14141 div = build_int_cst (TREE_TYPE (value), divisor);
14142
14143 if (multiple_of_p (TREE_TYPE (value), value, div))
14144 return value;
14145 }
14146
14147 /* If divisor is a power of two, simplify this to bit manipulation. */
14148 if (pow2_or_zerop (divisor))
14149 {
14150 tree t;
14151
14152 t = build_int_cst (TREE_TYPE (value), -divisor);
14153 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14154 }
14155 else
14156 {
14157 if (!div)
14158 div = build_int_cst (TREE_TYPE (value), divisor);
14159 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14160 value = size_binop_loc (loc, MULT_EXPR, value, div);
14161 }
14162
14163 return value;
14164 }
14165
14166 /* Returns the pointer to the base of the object addressed by EXP and
14167 extracts the information about the offset of the access, storing it
14168 to PBITPOS and POFFSET. */
14169
14170 static tree
14171 split_address_to_core_and_offset (tree exp,
14172 HOST_WIDE_INT *pbitpos, tree *poffset)
14173 {
14174 tree core;
14175 machine_mode mode;
14176 int unsignedp, reversep, volatilep;
14177 HOST_WIDE_INT bitsize;
14178 location_t loc = EXPR_LOCATION (exp);
14179
14180 if (TREE_CODE (exp) == ADDR_EXPR)
14181 {
14182 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14183 poffset, &mode, &unsignedp, &reversep,
14184 &volatilep);
14185 core = build_fold_addr_expr_loc (loc, core);
14186 }
14187 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14188 {
14189 core = TREE_OPERAND (exp, 0);
14190 STRIP_NOPS (core);
14191 *pbitpos = 0;
14192 *poffset = TREE_OPERAND (exp, 1);
14193 if (TREE_CODE (*poffset) == INTEGER_CST)
14194 {
14195 offset_int tem = wi::sext (wi::to_offset (*poffset),
14196 TYPE_PRECISION (TREE_TYPE (*poffset)));
14197 tem <<= LOG2_BITS_PER_UNIT;
14198 if (wi::fits_shwi_p (tem))
14199 {
14200 *pbitpos = tem.to_shwi ();
14201 *poffset = NULL_TREE;
14202 }
14203 }
14204 }
14205 else
14206 {
14207 core = exp;
14208 *pbitpos = 0;
14209 *poffset = NULL_TREE;
14210 }
14211
14212 return core;
14213 }
14214
14215 /* Returns true if addresses of E1 and E2 differ by a constant, false
14216 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14217
14218 bool
14219 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14220 {
14221 tree core1, core2;
14222 HOST_WIDE_INT bitpos1, bitpos2;
14223 tree toffset1, toffset2, tdiff, type;
14224
14225 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14226 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14227
14228 if (bitpos1 % BITS_PER_UNIT != 0
14229 || bitpos2 % BITS_PER_UNIT != 0
14230 || !operand_equal_p (core1, core2, 0))
14231 return false;
14232
14233 if (toffset1 && toffset2)
14234 {
14235 type = TREE_TYPE (toffset1);
14236 if (type != TREE_TYPE (toffset2))
14237 toffset2 = fold_convert (type, toffset2);
14238
14239 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14240 if (!cst_and_fits_in_hwi (tdiff))
14241 return false;
14242
14243 *diff = int_cst_value (tdiff);
14244 }
14245 else if (toffset1 || toffset2)
14246 {
14247 /* If only one of the offsets is non-constant, the difference cannot
14248 be a constant. */
14249 return false;
14250 }
14251 else
14252 *diff = 0;
14253
14254 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14255 return true;
14256 }
14257
14258 /* Return OFF converted to a pointer offset type suitable as offset for
14259 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14260 tree
14261 convert_to_ptrofftype_loc (location_t loc, tree off)
14262 {
14263 return fold_convert_loc (loc, sizetype, off);
14264 }
14265
14266 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14267 tree
14268 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14269 {
14270 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14271 ptr, convert_to_ptrofftype_loc (loc, off));
14272 }
14273
14274 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14275 tree
14276 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14277 {
14278 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14279 ptr, size_int (off));
14280 }
14281
14282 /* Return a char pointer for a C string if it is a string constant
14283 or sum of string constant and integer constant. We only support
14284 string constants properly terminated with '\0' character.
14285 If STRLEN is a valid pointer, length (including terminating character)
14286 of returned string is stored to the argument. */
14287
14288 const char *
14289 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14290 {
14291 tree offset_node;
14292
14293 if (strlen)
14294 *strlen = 0;
14295
14296 src = string_constant (src, &offset_node);
14297 if (src == 0)
14298 return NULL;
14299
14300 unsigned HOST_WIDE_INT offset = 0;
14301 if (offset_node != NULL_TREE)
14302 {
14303 if (!tree_fits_uhwi_p (offset_node))
14304 return NULL;
14305 else
14306 offset = tree_to_uhwi (offset_node);
14307 }
14308
14309 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14310 const char *string = TREE_STRING_POINTER (src);
14311
14312 /* Support only properly null-terminated strings. */
14313 if (string_length == 0
14314 || string[string_length - 1] != '\0'
14315 || offset >= string_length)
14316 return NULL;
14317
14318 if (strlen)
14319 *strlen = string_length - offset;
14320 return string + offset;
14321 }
14322
14323 #if CHECKING_P
14324
14325 namespace selftest {
14326
14327 /* Helper functions for writing tests of folding trees. */
14328
14329 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14330
14331 static void
14332 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14333 tree constant)
14334 {
14335 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14336 }
14337
14338 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14339 wrapping WRAPPED_EXPR. */
14340
14341 static void
14342 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14343 tree wrapped_expr)
14344 {
14345 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14346 ASSERT_NE (wrapped_expr, result);
14347 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14348 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14349 }
14350
14351 /* Verify that various arithmetic binary operations are folded
14352 correctly. */
14353
14354 static void
14355 test_arithmetic_folding ()
14356 {
14357 tree type = integer_type_node;
14358 tree x = create_tmp_var_raw (type, "x");
14359 tree zero = build_zero_cst (type);
14360 tree one = build_int_cst (type, 1);
14361
14362 /* Addition. */
14363 /* 1 <-- (0 + 1) */
14364 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14365 one);
14366 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14367 one);
14368
14369 /* (nonlvalue)x <-- (x + 0) */
14370 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14371 x);
14372
14373 /* Subtraction. */
14374 /* 0 <-- (x - x) */
14375 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14376 zero);
14377 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14378 x);
14379
14380 /* Multiplication. */
14381 /* 0 <-- (x * 0) */
14382 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14383 zero);
14384
14385 /* (nonlvalue)x <-- (x * 1) */
14386 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14387 x);
14388 }
14389
14390 /* Verify that various binary operations on vectors are folded
14391 correctly. */
14392
14393 static void
14394 test_vector_folding ()
14395 {
14396 tree inner_type = integer_type_node;
14397 tree type = build_vector_type (inner_type, 4);
14398 tree zero = build_zero_cst (type);
14399 tree one = build_one_cst (type);
14400
14401 /* Verify equality tests that return a scalar boolean result. */
14402 tree res_type = boolean_type_node;
14403 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14404 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14405 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14406 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14407 }
14408
14409 /* Run all of the selftests within this file. */
14410
14411 void
14412 fold_const_c_tests ()
14413 {
14414 test_arithmetic_folding ();
14415 test_vector_folding ();
14416 }
14417
14418 } // namespace selftest
14419
14420 #endif /* CHECKING_P */