re PR middle-end/82062 (simple conditional expressions no longer folded)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84
85 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */
87 int folding_initializer = 0;
88
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
93 COMPCODE_FALSE = 0,
94 COMPCODE_LT = 1,
95 COMPCODE_EQ = 2,
96 COMPCODE_LE = 3,
97 COMPCODE_GT = 4,
98 COMPCODE_LTGT = 5,
99 COMPCODE_GE = 6,
100 COMPCODE_ORD = 7,
101 COMPCODE_UNORD = 8,
102 COMPCODE_UNLT = 9,
103 COMPCODE_UNEQ = 10,
104 COMPCODE_UNLE = 11,
105 COMPCODE_UNGT = 12,
106 COMPCODE_NE = 13,
107 COMPCODE_UNGE = 14,
108 COMPCODE_TRUE = 15
109 };
110
111 static bool negate_expr_p (tree);
112 static tree negate_expr (tree);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int twoval_comparison_p (tree, tree *, tree *, int *);
117 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
118 static tree optimize_bit_field_compare (location_t, enum tree_code,
119 tree, tree, tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138 static tree fold_view_convert_expr (tree, tree);
139 static tree fold_negate_expr (location_t, tree);
140
141
142 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
143 Otherwise, return LOC. */
144
145 static location_t
146 expr_location_or (tree t, location_t loc)
147 {
148 location_t tloc = EXPR_LOCATION (t);
149 return tloc == UNKNOWN_LOCATION ? loc : tloc;
150 }
151
152 /* Similar to protected_set_expr_location, but never modify x in place,
153 if location can and needs to be set, unshare it. */
154
155 static inline tree
156 protected_set_expr_location_unshare (tree x, location_t loc)
157 {
158 if (CAN_HAVE_LOCATION_P (x)
159 && EXPR_LOCATION (x) != loc
160 && !(TREE_CODE (x) == SAVE_EXPR
161 || TREE_CODE (x) == TARGET_EXPR
162 || TREE_CODE (x) == BIND_EXPR))
163 {
164 x = copy_node (x);
165 SET_EXPR_LOCATION (x, loc);
166 }
167 return x;
168 }
169 \f
170 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
171 division and returns the quotient. Otherwise returns
172 NULL_TREE. */
173
174 tree
175 div_if_zero_remainder (const_tree arg1, const_tree arg2)
176 {
177 widest_int quo;
178
179 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
180 SIGNED, &quo))
181 return wide_int_to_tree (TREE_TYPE (arg1), quo);
182
183 return NULL_TREE;
184 }
185 \f
186 /* This is nonzero if we should defer warnings about undefined
187 overflow. This facility exists because these warnings are a
188 special case. The code to estimate loop iterations does not want
189 to issue any warnings, since it works with expressions which do not
190 occur in user code. Various bits of cleanup code call fold(), but
191 only use the result if it has certain characteristics (e.g., is a
192 constant); that code only wants to issue a warning if the result is
193 used. */
194
195 static int fold_deferring_overflow_warnings;
196
197 /* If a warning about undefined overflow is deferred, this is the
198 warning. Note that this may cause us to turn two warnings into
199 one, but that is fine since it is sufficient to only give one
200 warning per expression. */
201
202 static const char* fold_deferred_overflow_warning;
203
204 /* If a warning about undefined overflow is deferred, this is the
205 level at which the warning should be emitted. */
206
207 static enum warn_strict_overflow_code fold_deferred_overflow_code;
208
209 /* Start deferring overflow warnings. We could use a stack here to
210 permit nested calls, but at present it is not necessary. */
211
212 void
213 fold_defer_overflow_warnings (void)
214 {
215 ++fold_deferring_overflow_warnings;
216 }
217
218 /* Stop deferring overflow warnings. If there is a pending warning,
219 and ISSUE is true, then issue the warning if appropriate. STMT is
220 the statement with which the warning should be associated (used for
221 location information); STMT may be NULL. CODE is the level of the
222 warning--a warn_strict_overflow_code value. This function will use
223 the smaller of CODE and the deferred code when deciding whether to
224 issue the warning. CODE may be zero to mean to always use the
225 deferred code. */
226
227 void
228 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
229 {
230 const char *warnmsg;
231 location_t locus;
232
233 gcc_assert (fold_deferring_overflow_warnings > 0);
234 --fold_deferring_overflow_warnings;
235 if (fold_deferring_overflow_warnings > 0)
236 {
237 if (fold_deferred_overflow_warning != NULL
238 && code != 0
239 && code < (int) fold_deferred_overflow_code)
240 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
241 return;
242 }
243
244 warnmsg = fold_deferred_overflow_warning;
245 fold_deferred_overflow_warning = NULL;
246
247 if (!issue || warnmsg == NULL)
248 return;
249
250 if (gimple_no_warning_p (stmt))
251 return;
252
253 /* Use the smallest code level when deciding to issue the
254 warning. */
255 if (code == 0 || code > (int) fold_deferred_overflow_code)
256 code = fold_deferred_overflow_code;
257
258 if (!issue_strict_overflow_warning (code))
259 return;
260
261 if (stmt == NULL)
262 locus = input_location;
263 else
264 locus = gimple_location (stmt);
265 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
266 }
267
268 /* Stop deferring overflow warnings, ignoring any deferred
269 warnings. */
270
271 void
272 fold_undefer_and_ignore_overflow_warnings (void)
273 {
274 fold_undefer_overflow_warnings (false, NULL, 0);
275 }
276
277 /* Whether we are deferring overflow warnings. */
278
279 bool
280 fold_deferring_overflow_warnings_p (void)
281 {
282 return fold_deferring_overflow_warnings > 0;
283 }
284
285 /* This is called when we fold something based on the fact that signed
286 overflow is undefined. */
287
288 void
289 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
290 {
291 if (fold_deferring_overflow_warnings > 0)
292 {
293 if (fold_deferred_overflow_warning == NULL
294 || wc < fold_deferred_overflow_code)
295 {
296 fold_deferred_overflow_warning = gmsgid;
297 fold_deferred_overflow_code = wc;
298 }
299 }
300 else if (issue_strict_overflow_warning (wc))
301 warning (OPT_Wstrict_overflow, gmsgid);
302 }
303 \f
304 /* Return true if the built-in mathematical function specified by CODE
305 is odd, i.e. -f(x) == f(-x). */
306
307 bool
308 negate_mathfn_p (combined_fn fn)
309 {
310 switch (fn)
311 {
312 CASE_CFN_ASIN:
313 CASE_CFN_ASINH:
314 CASE_CFN_ATAN:
315 CASE_CFN_ATANH:
316 CASE_CFN_CASIN:
317 CASE_CFN_CASINH:
318 CASE_CFN_CATAN:
319 CASE_CFN_CATANH:
320 CASE_CFN_CBRT:
321 CASE_CFN_CPROJ:
322 CASE_CFN_CSIN:
323 CASE_CFN_CSINH:
324 CASE_CFN_CTAN:
325 CASE_CFN_CTANH:
326 CASE_CFN_ERF:
327 CASE_CFN_LLROUND:
328 CASE_CFN_LROUND:
329 CASE_CFN_ROUND:
330 CASE_CFN_SIN:
331 CASE_CFN_SINH:
332 CASE_CFN_TAN:
333 CASE_CFN_TANH:
334 CASE_CFN_TRUNC:
335 return true;
336
337 CASE_CFN_LLRINT:
338 CASE_CFN_LRINT:
339 CASE_CFN_NEARBYINT:
340 CASE_CFN_RINT:
341 return !flag_rounding_math;
342
343 default:
344 break;
345 }
346 return false;
347 }
348
349 /* Check whether we may negate an integer constant T without causing
350 overflow. */
351
352 bool
353 may_negate_without_overflow_p (const_tree t)
354 {
355 tree type;
356
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
358
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
362
363 return !wi::only_sign_bit_p (wi::to_wide (t));
364 }
365
366 /* Determine whether an expression T can be cheaply negated using
367 the function negate_expr without introducing undefined overflow. */
368
369 static bool
370 negate_expr_p (tree t)
371 {
372 tree type;
373
374 if (t == 0)
375 return false;
376
377 type = TREE_TYPE (t);
378
379 STRIP_SIGN_NOPS (t);
380 switch (TREE_CODE (t))
381 {
382 case INTEGER_CST:
383 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
384 return true;
385
386 /* Check that -CST will not overflow type. */
387 return may_negate_without_overflow_p (t);
388 case BIT_NOT_EXPR:
389 return (INTEGRAL_TYPE_P (type)
390 && TYPE_OVERFLOW_WRAPS (type));
391
392 case FIXED_CST:
393 return true;
394
395 case NEGATE_EXPR:
396 return !TYPE_OVERFLOW_SANITIZED (type);
397
398 case REAL_CST:
399 /* We want to canonicalize to positive real constants. Pretend
400 that only negative ones can be easily negated. */
401 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
402
403 case COMPLEX_CST:
404 return negate_expr_p (TREE_REALPART (t))
405 && negate_expr_p (TREE_IMAGPART (t));
406
407 case VECTOR_CST:
408 {
409 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
410 return true;
411
412 int count = VECTOR_CST_NELTS (t), i;
413
414 for (i = 0; i < count; i++)
415 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
416 return false;
417
418 return true;
419 }
420
421 case COMPLEX_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0))
423 && negate_expr_p (TREE_OPERAND (t, 1));
424
425 case CONJ_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0));
427
428 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
430 || HONOR_SIGNED_ZEROS (element_mode (type))
431 || (INTEGRAL_TYPE_P (type)
432 && ! TYPE_OVERFLOW_WRAPS (type)))
433 return false;
434 /* -(A + B) -> (-B) - A. */
435 if (negate_expr_p (TREE_OPERAND (t, 1)))
436 return true;
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 && !HONOR_SIGNED_ZEROS (element_mode (type))
444 && (! INTEGRAL_TYPE_P (type)
445 || TYPE_OVERFLOW_WRAPS (type));
446
447 case MULT_EXPR:
448 if (TYPE_UNSIGNED (type))
449 break;
450 /* INT_MIN/n * n doesn't overflow while negating one operand it does
451 if n is a (negative) power of two. */
452 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
453 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
454 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
455 && (wi::popcount
456 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
457 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
460 break;
461
462 /* Fall through. */
463
464 case RDIV_EXPR:
465 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
466 return negate_expr_p (TREE_OPERAND (t, 1))
467 || negate_expr_p (TREE_OPERAND (t, 0));
468 break;
469
470 case TRUNC_DIV_EXPR:
471 case ROUND_DIV_EXPR:
472 case EXACT_DIV_EXPR:
473 if (TYPE_UNSIGNED (type))
474 break;
475 if (negate_expr_p (TREE_OPERAND (t, 0)))
476 return true;
477 /* In general we can't negate B in A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. */
480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
483 && ! integer_onep (TREE_OPERAND (t, 1))))
484 return negate_expr_p (TREE_OPERAND (t, 1));
485 break;
486
487 case NOP_EXPR:
488 /* Negate -((double)float) as (double)(-float). */
489 if (TREE_CODE (type) == REAL_TYPE)
490 {
491 tree tem = strip_float_extensions (t);
492 if (tem != t)
493 return negate_expr_p (tem);
494 }
495 break;
496
497 case CALL_EXPR:
498 /* Negate -f(x) as f(-x). */
499 if (negate_mathfn_p (get_call_combined_fn (t)))
500 return negate_expr_p (CALL_EXPR_ARG (t, 0));
501 break;
502
503 case RSHIFT_EXPR:
504 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
505 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
506 {
507 tree op1 = TREE_OPERAND (t, 1);
508 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
509 return true;
510 }
511 break;
512
513 default:
514 break;
515 }
516 return false;
517 }
518
519 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
520 simplification is possible.
521 If negate_expr_p would return true for T, NULL_TREE will never be
522 returned. */
523
524 static tree
525 fold_negate_expr_1 (location_t loc, tree t)
526 {
527 tree type = TREE_TYPE (t);
528 tree tem;
529
530 switch (TREE_CODE (t))
531 {
532 /* Convert - (~A) to A + 1. */
533 case BIT_NOT_EXPR:
534 if (INTEGRAL_TYPE_P (type))
535 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
536 build_one_cst (type));
537 break;
538
539 case INTEGER_CST:
540 tem = fold_negate_const (t, type);
541 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && !TYPE_OVERFLOW_TRAPS (type)
544 && TYPE_OVERFLOW_WRAPS (type))
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
546 return tem;
547 break;
548
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 return tem;
552
553 case FIXED_CST:
554 tem = fold_negate_const (t, type);
555 return tem;
556
557 case COMPLEX_CST:
558 {
559 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
560 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
561 if (rpart && ipart)
562 return build_complex (type, rpart, ipart);
563 }
564 break;
565
566 case VECTOR_CST:
567 {
568 int count = VECTOR_CST_NELTS (t), i;
569
570 auto_vec<tree, 32> elts (count);
571 for (i = 0; i < count; i++)
572 {
573 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 if (elt == NULL_TREE)
575 return NULL_TREE;
576 elts.quick_push (elt);
577 }
578
579 return build_vector (type, elts);
580 }
581
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
588
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
594
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
599
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
603 {
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
606 {
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
610 }
611
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
614 {
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
618 }
619 }
620 break;
621
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
629
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
633
634 /* Fall through. */
635
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
638 {
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
647 }
648 break;
649
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
671
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
675 {
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
679 }
680 break;
681
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 {
687 tree fndecl, arg;
688
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
692 }
693 break;
694
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 {
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
701 {
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
708 }
709 }
710 break;
711
712 default:
713 break;
714 }
715
716 return NULL_TREE;
717 }
718
719 /* A wrapper for fold_negate_expr_1. */
720
721 static tree
722 fold_negate_expr (location_t loc, tree t)
723 {
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
730 }
731
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
735
736 static tree
737 negate_expr (tree t)
738 {
739 tree type, tem;
740 location_t loc;
741
742 if (t == NULL_TREE)
743 return NULL_TREE;
744
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
748
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
753 }
754 \f
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
762
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
766
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
772
773 If IN is itself a literal or constant, return it as appropriate.
774
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
777
778 static tree
779 split_tree (tree in, tree type, enum tree_code code,
780 tree *minus_varp, tree *conp, tree *minus_conp,
781 tree *litp, tree *minus_litp, int negate_p)
782 {
783 tree var = 0;
784 *minus_varp = 0;
785 *conp = 0;
786 *minus_conp = 0;
787 *litp = 0;
788 *minus_litp = 0;
789
790 /* Strip any conversions that don't change the machine mode or signedness. */
791 STRIP_SIGN_NOPS (in);
792
793 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
794 || TREE_CODE (in) == FIXED_CST)
795 *litp = in;
796 else if (TREE_CODE (in) == code
797 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
798 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
799 /* We can associate addition and subtraction together (even
800 though the C standard doesn't say so) for integers because
801 the value is not affected. For reals, the value might be
802 affected, so we can't. */
803 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
804 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR
806 && (TREE_CODE (in) == PLUS_EXPR
807 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
808 {
809 tree op0 = TREE_OPERAND (in, 0);
810 tree op1 = TREE_OPERAND (in, 1);
811 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
812 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
813
814 /* First see if either of the operands is a literal, then a constant. */
815 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
816 || TREE_CODE (op0) == FIXED_CST)
817 *litp = op0, op0 = 0;
818 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
819 || TREE_CODE (op1) == FIXED_CST)
820 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
821
822 if (op0 != 0 && TREE_CONSTANT (op0))
823 *conp = op0, op0 = 0;
824 else if (op1 != 0 && TREE_CONSTANT (op1))
825 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
826
827 /* If we haven't dealt with either operand, this is not a case we can
828 decompose. Otherwise, VAR is either of the ones remaining, if any. */
829 if (op0 != 0 && op1 != 0)
830 var = in;
831 else if (op0 != 0)
832 var = op0;
833 else
834 var = op1, neg_var_p = neg1_p;
835
836 /* Now do any needed negations. */
837 if (neg_litp_p)
838 *minus_litp = *litp, *litp = 0;
839 if (neg_conp_p && *conp)
840 *minus_conp = *conp, *conp = 0;
841 if (neg_var_p && var)
842 *minus_varp = var, var = 0;
843 }
844 else if (TREE_CONSTANT (in))
845 *conp = in;
846 else if (TREE_CODE (in) == BIT_NOT_EXPR
847 && code == PLUS_EXPR)
848 {
849 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
850 when IN is constant. */
851 *litp = build_minus_one_cst (type);
852 *minus_varp = TREE_OPERAND (in, 0);
853 }
854 else
855 var = in;
856
857 if (negate_p)
858 {
859 if (*litp)
860 *minus_litp = *litp, *litp = 0;
861 else if (*minus_litp)
862 *litp = *minus_litp, *minus_litp = 0;
863 if (*conp)
864 *minus_conp = *conp, *conp = 0;
865 else if (*minus_conp)
866 *conp = *minus_conp, *minus_conp = 0;
867 if (var)
868 *minus_varp = var, var = 0;
869 else if (*minus_varp)
870 var = *minus_varp, *minus_varp = 0;
871 }
872
873 if (*litp
874 && TREE_OVERFLOW_P (*litp))
875 *litp = drop_tree_overflow (*litp);
876 if (*minus_litp
877 && TREE_OVERFLOW_P (*minus_litp))
878 *minus_litp = drop_tree_overflow (*minus_litp);
879
880 return var;
881 }
882
883 /* Re-associate trees split by the above function. T1 and T2 are
884 either expressions to associate or null. Return the new
885 expression, if any. LOC is the location of the new expression. If
886 we build an operation, do it in TYPE and with CODE. */
887
888 static tree
889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 {
891 if (t1 == 0)
892 {
893 gcc_assert (t2 == 0 || code != MINUS_EXPR);
894 return t2;
895 }
896 else if (t2 == 0)
897 return t1;
898
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
904 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 {
906 if (code == PLUS_EXPR)
907 {
908 if (TREE_CODE (t1) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t2),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t1, 0)));
913 else if (TREE_CODE (t2) == NEGATE_EXPR)
914 return build2_loc (loc, MINUS_EXPR, type,
915 fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type,
917 TREE_OPERAND (t2, 0)));
918 else if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921 else if (code == MINUS_EXPR)
922 {
923 if (integer_zerop (t2))
924 return fold_convert_loc (loc, type, t1);
925 }
926
927 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
929 }
930
931 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
932 fold_convert_loc (loc, type, t2));
933 }
934 \f
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936 for use in int_const_binop, size_binop and size_diffop. */
937
938 static bool
939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 {
941 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942 return false;
943 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
944 return false;
945
946 switch (code)
947 {
948 case LSHIFT_EXPR:
949 case RSHIFT_EXPR:
950 case LROTATE_EXPR:
951 case RROTATE_EXPR:
952 return true;
953
954 default:
955 break;
956 }
957
958 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 && TYPE_MODE (type1) == TYPE_MODE (type2);
961 }
962
963
964 /* Combine two integer constants PARG1 and PARG2 under operation CODE
965 to produce a new constant. Return NULL_TREE if we don't know how
966 to evaluate CODE at compile-time. */
967
968 static tree
969 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
970 int overflowable)
971 {
972 wide_int res;
973 tree t;
974 tree type = TREE_TYPE (parg1);
975 signop sign = TYPE_SIGN (type);
976 bool overflow = false;
977
978 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
979 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
980
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 arg2 = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005
1006 if (code == RSHIFT_EXPR)
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 else
1012 res = wi::lshift (arg1, arg2);
1013 break;
1014
1015 case RROTATE_EXPR:
1016 case LROTATE_EXPR:
1017 if (wi::neg_p (arg2))
1018 {
1019 arg2 = -arg2;
1020 if (code == RROTATE_EXPR)
1021 code = LROTATE_EXPR;
1022 else
1023 code = RROTATE_EXPR;
1024 }
1025
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, arg2);
1028 else
1029 res = wi::lrotate (arg1, arg2);
1030 break;
1031
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, &overflow);
1034 break;
1035
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, &overflow);
1042 break;
1043
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1047
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_floor (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::div_round (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return NULL_TREE;
1088 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1089 break;
1090
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return NULL_TREE;
1094 res = wi::mod_round (arg1, arg2, sign, &overflow);
1095 break;
1096
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1100
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1104
1105 default:
1106 return NULL_TREE;
1107 }
1108
1109 t = force_fit_type (type, res, overflowable,
1110 (((sign == SIGNED || overflowable == -1)
1111 && overflow)
1112 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1113
1114 return t;
1115 }
1116
1117 tree
1118 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 {
1120 return int_const_binop_1 (code, arg1, arg2, 1);
1121 }
1122
1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1124 constant. We assume ARG1 and ARG2 have the same data type, or at least
1125 are the same kind of constant and the same machine mode. Return zero if
1126 combining the constants is not allowed in the current operating mode. */
1127
1128 static tree
1129 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 {
1131 /* Sanity check for the recursive cases. */
1132 if (!arg1 || !arg2)
1133 return NULL_TREE;
1134
1135 STRIP_NOPS (arg1);
1136 STRIP_NOPS (arg2);
1137
1138 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1139 {
1140 if (code == POINTER_PLUS_EXPR)
1141 return int_const_binop (PLUS_EXPR,
1142 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1143
1144 return int_const_binop (code, arg1, arg2);
1145 }
1146
1147 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1148 {
1149 machine_mode mode;
1150 REAL_VALUE_TYPE d1;
1151 REAL_VALUE_TYPE d2;
1152 REAL_VALUE_TYPE value;
1153 REAL_VALUE_TYPE result;
1154 bool inexact;
1155 tree t, type;
1156
1157 /* The following codes are handled by real_arithmetic. */
1158 switch (code)
1159 {
1160 case PLUS_EXPR:
1161 case MINUS_EXPR:
1162 case MULT_EXPR:
1163 case RDIV_EXPR:
1164 case MIN_EXPR:
1165 case MAX_EXPR:
1166 break;
1167
1168 default:
1169 return NULL_TREE;
1170 }
1171
1172 d1 = TREE_REAL_CST (arg1);
1173 d2 = TREE_REAL_CST (arg2);
1174
1175 type = TREE_TYPE (arg1);
1176 mode = TYPE_MODE (type);
1177
1178 /* Don't perform operation if we honor signaling NaNs and
1179 either operand is a signaling NaN. */
1180 if (HONOR_SNANS (mode)
1181 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1182 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1183 return NULL_TREE;
1184
1185 /* Don't perform operation if it would raise a division
1186 by zero exception. */
1187 if (code == RDIV_EXPR
1188 && real_equal (&d2, &dconst0)
1189 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1190 return NULL_TREE;
1191
1192 /* If either operand is a NaN, just return it. Otherwise, set up
1193 for floating-point trap; we return an overflow. */
1194 if (REAL_VALUE_ISNAN (d1))
1195 {
1196 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1197 is off. */
1198 d1.signalling = 0;
1199 t = build_real (type, d1);
1200 return t;
1201 }
1202 else if (REAL_VALUE_ISNAN (d2))
1203 {
1204 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1205 is off. */
1206 d2.signalling = 0;
1207 t = build_real (type, d2);
1208 return t;
1209 }
1210
1211 inexact = real_arithmetic (&value, code, &d1, &d2);
1212 real_convert (&result, mode, &value);
1213
1214 /* Don't constant fold this floating point operation if
1215 the result has overflowed and flag_trapping_math. */
1216 if (flag_trapping_math
1217 && MODE_HAS_INFINITIES (mode)
1218 && REAL_VALUE_ISINF (result)
1219 && !REAL_VALUE_ISINF (d1)
1220 && !REAL_VALUE_ISINF (d2))
1221 return NULL_TREE;
1222
1223 /* Don't constant fold this floating point operation if the
1224 result may dependent upon the run-time rounding mode and
1225 flag_rounding_math is set, or if GCC's software emulation
1226 is unable to accurately represent the result. */
1227 if ((flag_rounding_math
1228 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1229 && (inexact || !real_identical (&result, &value)))
1230 return NULL_TREE;
1231
1232 t = build_real (type, result);
1233
1234 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1235 return t;
1236 }
1237
1238 if (TREE_CODE (arg1) == FIXED_CST)
1239 {
1240 FIXED_VALUE_TYPE f1;
1241 FIXED_VALUE_TYPE f2;
1242 FIXED_VALUE_TYPE result;
1243 tree t, type;
1244 int sat_p;
1245 bool overflow_p;
1246
1247 /* The following codes are handled by fixed_arithmetic. */
1248 switch (code)
1249 {
1250 case PLUS_EXPR:
1251 case MINUS_EXPR:
1252 case MULT_EXPR:
1253 case TRUNC_DIV_EXPR:
1254 if (TREE_CODE (arg2) != FIXED_CST)
1255 return NULL_TREE;
1256 f2 = TREE_FIXED_CST (arg2);
1257 break;
1258
1259 case LSHIFT_EXPR:
1260 case RSHIFT_EXPR:
1261 {
1262 if (TREE_CODE (arg2) != INTEGER_CST)
1263 return NULL_TREE;
1264 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1265 f2.data.high = w2.elt (1);
1266 f2.data.low = w2.ulow ();
1267 f2.mode = SImode;
1268 }
1269 break;
1270
1271 default:
1272 return NULL_TREE;
1273 }
1274
1275 f1 = TREE_FIXED_CST (arg1);
1276 type = TREE_TYPE (arg1);
1277 sat_p = TYPE_SATURATING (type);
1278 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1279 t = build_fixed (type, result);
1280 /* Propagate overflow flags. */
1281 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1282 TREE_OVERFLOW (t) = 1;
1283 return t;
1284 }
1285
1286 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1287 {
1288 tree type = TREE_TYPE (arg1);
1289 tree r1 = TREE_REALPART (arg1);
1290 tree i1 = TREE_IMAGPART (arg1);
1291 tree r2 = TREE_REALPART (arg2);
1292 tree i2 = TREE_IMAGPART (arg2);
1293 tree real, imag;
1294
1295 switch (code)
1296 {
1297 case PLUS_EXPR:
1298 case MINUS_EXPR:
1299 real = const_binop (code, r1, r2);
1300 imag = const_binop (code, i1, i2);
1301 break;
1302
1303 case MULT_EXPR:
1304 if (COMPLEX_FLOAT_TYPE_P (type))
1305 return do_mpc_arg2 (arg1, arg2, type,
1306 /* do_nonfinite= */ folding_initializer,
1307 mpc_mul);
1308
1309 real = const_binop (MINUS_EXPR,
1310 const_binop (MULT_EXPR, r1, r2),
1311 const_binop (MULT_EXPR, i1, i2));
1312 imag = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r1, i2),
1314 const_binop (MULT_EXPR, i1, r2));
1315 break;
1316
1317 case RDIV_EXPR:
1318 if (COMPLEX_FLOAT_TYPE_P (type))
1319 return do_mpc_arg2 (arg1, arg2, type,
1320 /* do_nonfinite= */ folding_initializer,
1321 mpc_div);
1322 /* Fallthru. */
1323 case TRUNC_DIV_EXPR:
1324 case CEIL_DIV_EXPR:
1325 case FLOOR_DIV_EXPR:
1326 case ROUND_DIV_EXPR:
1327 if (flag_complex_method == 0)
1328 {
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_straight().
1331
1332 Expand complex division to scalars, straightforward algorithm.
1333 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1334 t = br*br + bi*bi
1335 */
1336 tree magsquared
1337 = const_binop (PLUS_EXPR,
1338 const_binop (MULT_EXPR, r2, r2),
1339 const_binop (MULT_EXPR, i2, i2));
1340 tree t1
1341 = const_binop (PLUS_EXPR,
1342 const_binop (MULT_EXPR, r1, r2),
1343 const_binop (MULT_EXPR, i1, i2));
1344 tree t2
1345 = const_binop (MINUS_EXPR,
1346 const_binop (MULT_EXPR, i1, r2),
1347 const_binop (MULT_EXPR, r1, i2));
1348
1349 real = const_binop (code, t1, magsquared);
1350 imag = const_binop (code, t2, magsquared);
1351 }
1352 else
1353 {
1354 /* Keep this algorithm in sync with
1355 tree-complex.c:expand_complex_div_wide().
1356
1357 Expand complex division to scalars, modified algorithm to minimize
1358 overflow with wide input ranges. */
1359 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1360 fold_abs_const (r2, TREE_TYPE (type)),
1361 fold_abs_const (i2, TREE_TYPE (type)));
1362
1363 if (integer_nonzerop (compare))
1364 {
1365 /* In the TRUE branch, we compute
1366 ratio = br/bi;
1367 div = (br * ratio) + bi;
1368 tr = (ar * ratio) + ai;
1369 ti = (ai * ratio) - ar;
1370 tr = tr / div;
1371 ti = ti / div; */
1372 tree ratio = const_binop (code, r2, i2);
1373 tree div = const_binop (PLUS_EXPR, i2,
1374 const_binop (MULT_EXPR, r2, ratio));
1375 real = const_binop (MULT_EXPR, r1, ratio);
1376 real = const_binop (PLUS_EXPR, real, i1);
1377 real = const_binop (code, real, div);
1378
1379 imag = const_binop (MULT_EXPR, i1, ratio);
1380 imag = const_binop (MINUS_EXPR, imag, r1);
1381 imag = const_binop (code, imag, div);
1382 }
1383 else
1384 {
1385 /* In the FALSE branch, we compute
1386 ratio = d/c;
1387 divisor = (d * ratio) + c;
1388 tr = (b * ratio) + a;
1389 ti = b - (a * ratio);
1390 tr = tr / div;
1391 ti = ti / div; */
1392 tree ratio = const_binop (code, i2, r2);
1393 tree div = const_binop (PLUS_EXPR, r2,
1394 const_binop (MULT_EXPR, i2, ratio));
1395
1396 real = const_binop (MULT_EXPR, i1, ratio);
1397 real = const_binop (PLUS_EXPR, real, r1);
1398 real = const_binop (code, real, div);
1399
1400 imag = const_binop (MULT_EXPR, r1, ratio);
1401 imag = const_binop (MINUS_EXPR, i1, imag);
1402 imag = const_binop (code, imag, div);
1403 }
1404 }
1405 break;
1406
1407 default:
1408 return NULL_TREE;
1409 }
1410
1411 if (real && imag)
1412 return build_complex (type, real, imag);
1413 }
1414
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == VECTOR_CST)
1417 {
1418 tree type = TREE_TYPE (arg1);
1419 int count = VECTOR_CST_NELTS (arg1), i;
1420
1421 auto_vec<tree, 32> elts (count);
1422 for (i = 0; i < count; i++)
1423 {
1424 tree elem1 = VECTOR_CST_ELT (arg1, i);
1425 tree elem2 = VECTOR_CST_ELT (arg2, i);
1426
1427 tree elt = const_binop (code, elem1, elem2);
1428
1429 /* It is possible that const_binop cannot handle the given
1430 code and return NULL_TREE */
1431 if (elt == NULL_TREE)
1432 return NULL_TREE;
1433 elts.quick_push (elt);
1434 }
1435
1436 return build_vector (type, elts);
1437 }
1438
1439 /* Shifts allow a scalar offset for a vector. */
1440 if (TREE_CODE (arg1) == VECTOR_CST
1441 && TREE_CODE (arg2) == INTEGER_CST)
1442 {
1443 tree type = TREE_TYPE (arg1);
1444 int count = VECTOR_CST_NELTS (arg1), i;
1445
1446 auto_vec<tree, 32> elts (count);
1447 for (i = 0; i < count; i++)
1448 {
1449 tree elem1 = VECTOR_CST_ELT (arg1, i);
1450
1451 tree elt = const_binop (code, elem1, arg2);
1452
1453 /* It is possible that const_binop cannot handle the given
1454 code and return NULL_TREE. */
1455 if (elt == NULL_TREE)
1456 return NULL_TREE;
1457 elts.quick_push (elt);
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1467
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1470 {
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1473
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1477 {
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1485
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1488 {
1489 unsigned int out_nelts, in_nelts, i;
1490
1491 if (TREE_CODE (arg1) != VECTOR_CST
1492 || TREE_CODE (arg2) != VECTOR_CST)
1493 return NULL_TREE;
1494
1495 in_nelts = VECTOR_CST_NELTS (arg1);
1496 out_nelts = in_nelts * 2;
1497 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1498 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1499
1500 auto_vec<tree, 32> elts (out_nelts);
1501 for (i = 0; i < out_nelts; i++)
1502 {
1503 tree elt = (i < in_nelts
1504 ? VECTOR_CST_ELT (arg1, i)
1505 : VECTOR_CST_ELT (arg2, i - in_nelts));
1506 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1507 ? NOP_EXPR : FIX_TRUNC_EXPR,
1508 TREE_TYPE (type), elt);
1509 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1510 return NULL_TREE;
1511 elts.quick_push (elt);
1512 }
1513
1514 return build_vector (type, elts);
1515 }
1516
1517 case VEC_WIDEN_MULT_LO_EXPR:
1518 case VEC_WIDEN_MULT_HI_EXPR:
1519 case VEC_WIDEN_MULT_EVEN_EXPR:
1520 case VEC_WIDEN_MULT_ODD_EXPR:
1521 {
1522 unsigned int out_nelts, in_nelts, out, ofs, scale;
1523
1524 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1525 return NULL_TREE;
1526
1527 in_nelts = VECTOR_CST_NELTS (arg1);
1528 out_nelts = in_nelts / 2;
1529 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1530 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1531
1532 if (code == VEC_WIDEN_MULT_LO_EXPR)
1533 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1534 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1536 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1537 scale = 1, ofs = 0;
1538 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1539 scale = 1, ofs = 1;
1540
1541 auto_vec<tree, 32> elts (out_nelts);
1542 for (out = 0; out < out_nelts; out++)
1543 {
1544 unsigned int in = (out << scale) + ofs;
1545 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1546 VECTOR_CST_ELT (arg1, in));
1547 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1548 VECTOR_CST_ELT (arg2, in));
1549
1550 if (t1 == NULL_TREE || t2 == NULL_TREE)
1551 return NULL_TREE;
1552 tree elt = const_binop (MULT_EXPR, t1, t2);
1553 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1554 return NULL_TREE;
1555 elts.quick_push (elt);
1556 }
1557
1558 return build_vector (type, elts);
1559 }
1560
1561 default:;
1562 }
1563
1564 if (TREE_CODE_CLASS (code) != tcc_binary)
1565 return NULL_TREE;
1566
1567 /* Make sure type and arg0 have the same saturating flag. */
1568 gcc_checking_assert (TYPE_SATURATING (type)
1569 == TYPE_SATURATING (TREE_TYPE (arg1)));
1570
1571 return const_binop (code, arg1, arg2);
1572 }
1573
1574 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1575 Return zero if computing the constants is not possible. */
1576
1577 tree
1578 const_unop (enum tree_code code, tree type, tree arg0)
1579 {
1580 /* Don't perform the operation, other than NEGATE and ABS, if
1581 flag_signaling_nans is on and the operand is a signaling NaN. */
1582 if (TREE_CODE (arg0) == REAL_CST
1583 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1584 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1585 && code != NEGATE_EXPR
1586 && code != ABS_EXPR)
1587 return NULL_TREE;
1588
1589 switch (code)
1590 {
1591 CASE_CONVERT:
1592 case FLOAT_EXPR:
1593 case FIX_TRUNC_EXPR:
1594 case FIXED_CONVERT_EXPR:
1595 return fold_convert_const (code, type, arg0);
1596
1597 case ADDR_SPACE_CONVERT_EXPR:
1598 /* If the source address is 0, and the source address space
1599 cannot have a valid object at 0, fold to dest type null. */
1600 if (integer_zerop (arg0)
1601 && !(targetm.addr_space.zero_address_valid
1602 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1603 return fold_convert_const (code, type, arg0);
1604 break;
1605
1606 case VIEW_CONVERT_EXPR:
1607 return fold_view_convert_expr (type, arg0);
1608
1609 case NEGATE_EXPR:
1610 {
1611 /* Can't call fold_negate_const directly here as that doesn't
1612 handle all cases and we might not be able to negate some
1613 constants. */
1614 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1615 if (tem && CONSTANT_CLASS_P (tem))
1616 return tem;
1617 break;
1618 }
1619
1620 case ABS_EXPR:
1621 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1622 return fold_abs_const (arg0, type);
1623 break;
1624
1625 case CONJ_EXPR:
1626 if (TREE_CODE (arg0) == COMPLEX_CST)
1627 {
1628 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1629 TREE_TYPE (type));
1630 return build_complex (type, TREE_REALPART (arg0), ipart);
1631 }
1632 break;
1633
1634 case BIT_NOT_EXPR:
1635 if (TREE_CODE (arg0) == INTEGER_CST)
1636 return fold_not_const (arg0, type);
1637 /* Perform BIT_NOT_EXPR on each element individually. */
1638 else if (TREE_CODE (arg0) == VECTOR_CST)
1639 {
1640 tree elem;
1641 unsigned count = VECTOR_CST_NELTS (arg0), i;
1642
1643 auto_vec<tree, 32> elements (count);
1644 for (i = 0; i < count; i++)
1645 {
1646 elem = VECTOR_CST_ELT (arg0, i);
1647 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1648 if (elem == NULL_TREE)
1649 break;
1650 elements.quick_push (elem);
1651 }
1652 if (i == count)
1653 return build_vector (type, elements);
1654 }
1655 break;
1656
1657 case TRUTH_NOT_EXPR:
1658 if (TREE_CODE (arg0) == INTEGER_CST)
1659 return constant_boolean_node (integer_zerop (arg0), type);
1660 break;
1661
1662 case REALPART_EXPR:
1663 if (TREE_CODE (arg0) == COMPLEX_CST)
1664 return fold_convert (type, TREE_REALPART (arg0));
1665 break;
1666
1667 case IMAGPART_EXPR:
1668 if (TREE_CODE (arg0) == COMPLEX_CST)
1669 return fold_convert (type, TREE_IMAGPART (arg0));
1670 break;
1671
1672 case VEC_UNPACK_LO_EXPR:
1673 case VEC_UNPACK_HI_EXPR:
1674 case VEC_UNPACK_FLOAT_LO_EXPR:
1675 case VEC_UNPACK_FLOAT_HI_EXPR:
1676 {
1677 unsigned int out_nelts, in_nelts, i;
1678 enum tree_code subcode;
1679
1680 if (TREE_CODE (arg0) != VECTOR_CST)
1681 return NULL_TREE;
1682
1683 in_nelts = VECTOR_CST_NELTS (arg0);
1684 out_nelts = in_nelts / 2;
1685 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1686
1687 unsigned int offset = 0;
1688 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1689 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1690 offset = out_nelts;
1691
1692 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1693 subcode = NOP_EXPR;
1694 else
1695 subcode = FLOAT_EXPR;
1696
1697 auto_vec<tree, 32> elts (out_nelts);
1698 for (i = 0; i < out_nelts; i++)
1699 {
1700 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1701 VECTOR_CST_ELT (arg0, i + offset));
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1705 }
1706
1707 return build_vector (type, elts);
1708 }
1709
1710 case REDUC_MIN_EXPR:
1711 case REDUC_MAX_EXPR:
1712 case REDUC_PLUS_EXPR:
1713 {
1714 unsigned int nelts, i;
1715 enum tree_code subcode;
1716
1717 if (TREE_CODE (arg0) != VECTOR_CST)
1718 return NULL_TREE;
1719 nelts = VECTOR_CST_NELTS (arg0);
1720
1721 switch (code)
1722 {
1723 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1724 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1725 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1726 default: gcc_unreachable ();
1727 }
1728
1729 tree res = VECTOR_CST_ELT (arg0, 0);
1730 for (i = 1; i < nelts; i++)
1731 {
1732 res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
1733 if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
1734 return NULL_TREE;
1735 }
1736
1737 return res;
1738 }
1739
1740 default:
1741 break;
1742 }
1743
1744 return NULL_TREE;
1745 }
1746
1747 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1748 indicates which particular sizetype to create. */
1749
1750 tree
1751 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1752 {
1753 return build_int_cst (sizetype_tab[(int) kind], number);
1754 }
1755 \f
1756 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1757 is a tree code. The type of the result is taken from the operands.
1758 Both must be equivalent integer types, ala int_binop_types_match_p.
1759 If the operands are constant, so is the result. */
1760
1761 tree
1762 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1763 {
1764 tree type = TREE_TYPE (arg0);
1765
1766 if (arg0 == error_mark_node || arg1 == error_mark_node)
1767 return error_mark_node;
1768
1769 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1770 TREE_TYPE (arg1)));
1771
1772 /* Handle the special case of two integer constants faster. */
1773 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1774 {
1775 /* And some specific cases even faster than that. */
1776 if (code == PLUS_EXPR)
1777 {
1778 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1779 return arg1;
1780 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1781 return arg0;
1782 }
1783 else if (code == MINUS_EXPR)
1784 {
1785 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1786 return arg0;
1787 }
1788 else if (code == MULT_EXPR)
1789 {
1790 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1791 return arg1;
1792 }
1793
1794 /* Handle general case of two integer constants. For sizetype
1795 constant calculations we always want to know about overflow,
1796 even in the unsigned case. */
1797 return int_const_binop_1 (code, arg0, arg1, -1);
1798 }
1799
1800 return fold_build2_loc (loc, code, type, arg0, arg1);
1801 }
1802
1803 /* Given two values, either both of sizetype or both of bitsizetype,
1804 compute the difference between the two values. Return the value
1805 in signed type corresponding to the type of the operands. */
1806
1807 tree
1808 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1809 {
1810 tree type = TREE_TYPE (arg0);
1811 tree ctype;
1812
1813 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1814 TREE_TYPE (arg1)));
1815
1816 /* If the type is already signed, just do the simple thing. */
1817 if (!TYPE_UNSIGNED (type))
1818 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1819
1820 if (type == sizetype)
1821 ctype = ssizetype;
1822 else if (type == bitsizetype)
1823 ctype = sbitsizetype;
1824 else
1825 ctype = signed_type_for (type);
1826
1827 /* If either operand is not a constant, do the conversions to the signed
1828 type and subtract. The hardware will do the right thing with any
1829 overflow in the subtraction. */
1830 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1831 return size_binop_loc (loc, MINUS_EXPR,
1832 fold_convert_loc (loc, ctype, arg0),
1833 fold_convert_loc (loc, ctype, arg1));
1834
1835 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1836 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1837 overflow) and negate (which can't either). Special-case a result
1838 of zero while we're here. */
1839 if (tree_int_cst_equal (arg0, arg1))
1840 return build_int_cst (ctype, 0);
1841 else if (tree_int_cst_lt (arg1, arg0))
1842 return fold_convert_loc (loc, ctype,
1843 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1844 else
1845 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1846 fold_convert_loc (loc, ctype,
1847 size_binop_loc (loc,
1848 MINUS_EXPR,
1849 arg1, arg0)));
1850 }
1851 \f
1852 /* A subroutine of fold_convert_const handling conversions of an
1853 INTEGER_CST to another integer type. */
1854
1855 static tree
1856 fold_convert_const_int_from_int (tree type, const_tree arg1)
1857 {
1858 /* Given an integer constant, make new constant with new type,
1859 appropriately sign-extended or truncated. Use widest_int
1860 so that any extension is done according ARG1's type. */
1861 return force_fit_type (type, wi::to_widest (arg1),
1862 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1863 TREE_OVERFLOW (arg1));
1864 }
1865
1866 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1867 to an integer type. */
1868
1869 static tree
1870 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1871 {
1872 bool overflow = false;
1873 tree t;
1874
1875 /* The following code implements the floating point to integer
1876 conversion rules required by the Java Language Specification,
1877 that IEEE NaNs are mapped to zero and values that overflow
1878 the target precision saturate, i.e. values greater than
1879 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1880 are mapped to INT_MIN. These semantics are allowed by the
1881 C and C++ standards that simply state that the behavior of
1882 FP-to-integer conversion is unspecified upon overflow. */
1883
1884 wide_int val;
1885 REAL_VALUE_TYPE r;
1886 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1887
1888 switch (code)
1889 {
1890 case FIX_TRUNC_EXPR:
1891 real_trunc (&r, VOIDmode, &x);
1892 break;
1893
1894 default:
1895 gcc_unreachable ();
1896 }
1897
1898 /* If R is NaN, return zero and show we have an overflow. */
1899 if (REAL_VALUE_ISNAN (r))
1900 {
1901 overflow = true;
1902 val = wi::zero (TYPE_PRECISION (type));
1903 }
1904
1905 /* See if R is less than the lower bound or greater than the
1906 upper bound. */
1907
1908 if (! overflow)
1909 {
1910 tree lt = TYPE_MIN_VALUE (type);
1911 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1912 if (real_less (&r, &l))
1913 {
1914 overflow = true;
1915 val = wi::to_wide (lt);
1916 }
1917 }
1918
1919 if (! overflow)
1920 {
1921 tree ut = TYPE_MAX_VALUE (type);
1922 if (ut)
1923 {
1924 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1925 if (real_less (&u, &r))
1926 {
1927 overflow = true;
1928 val = wi::to_wide (ut);
1929 }
1930 }
1931 }
1932
1933 if (! overflow)
1934 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1935
1936 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1937 return t;
1938 }
1939
1940 /* A subroutine of fold_convert_const handling conversions of a
1941 FIXED_CST to an integer type. */
1942
1943 static tree
1944 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1945 {
1946 tree t;
1947 double_int temp, temp_trunc;
1948 scalar_mode mode;
1949
1950 /* Right shift FIXED_CST to temp by fbit. */
1951 temp = TREE_FIXED_CST (arg1).data;
1952 mode = TREE_FIXED_CST (arg1).mode;
1953 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1954 {
1955 temp = temp.rshift (GET_MODE_FBIT (mode),
1956 HOST_BITS_PER_DOUBLE_INT,
1957 SIGNED_FIXED_POINT_MODE_P (mode));
1958
1959 /* Left shift temp to temp_trunc by fbit. */
1960 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1961 HOST_BITS_PER_DOUBLE_INT,
1962 SIGNED_FIXED_POINT_MODE_P (mode));
1963 }
1964 else
1965 {
1966 temp = double_int_zero;
1967 temp_trunc = double_int_zero;
1968 }
1969
1970 /* If FIXED_CST is negative, we need to round the value toward 0.
1971 By checking if the fractional bits are not zero to add 1 to temp. */
1972 if (SIGNED_FIXED_POINT_MODE_P (mode)
1973 && temp_trunc.is_negative ()
1974 && TREE_FIXED_CST (arg1).data != temp_trunc)
1975 temp += double_int_one;
1976
1977 /* Given a fixed-point constant, make new constant with new type,
1978 appropriately sign-extended or truncated. */
1979 t = force_fit_type (type, temp, -1,
1980 (temp.is_negative ()
1981 && (TYPE_UNSIGNED (type)
1982 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1983 | TREE_OVERFLOW (arg1));
1984
1985 return t;
1986 }
1987
1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1989 to another floating point type. */
1990
1991 static tree
1992 fold_convert_const_real_from_real (tree type, const_tree arg1)
1993 {
1994 REAL_VALUE_TYPE value;
1995 tree t;
1996
1997 /* Don't perform the operation if flag_signaling_nans is on
1998 and the operand is a signaling NaN. */
1999 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2000 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2001 return NULL_TREE;
2002
2003 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2004 t = build_real (type, value);
2005
2006 /* If converting an infinity or NAN to a representation that doesn't
2007 have one, set the overflow bit so that we can produce some kind of
2008 error message at the appropriate point if necessary. It's not the
2009 most user-friendly message, but it's better than nothing. */
2010 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2011 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2012 TREE_OVERFLOW (t) = 1;
2013 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2014 && !MODE_HAS_NANS (TYPE_MODE (type)))
2015 TREE_OVERFLOW (t) = 1;
2016 /* Regular overflow, conversion produced an infinity in a mode that
2017 can't represent them. */
2018 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2019 && REAL_VALUE_ISINF (value)
2020 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2021 TREE_OVERFLOW (t) = 1;
2022 else
2023 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2024 return t;
2025 }
2026
2027 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2028 to a floating point type. */
2029
2030 static tree
2031 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2032 {
2033 REAL_VALUE_TYPE value;
2034 tree t;
2035
2036 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2037 &TREE_FIXED_CST (arg1));
2038 t = build_real (type, value);
2039
2040 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2041 return t;
2042 }
2043
2044 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2045 to another fixed-point type. */
2046
2047 static tree
2048 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2049 {
2050 FIXED_VALUE_TYPE value;
2051 tree t;
2052 bool overflow_p;
2053
2054 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2055 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2056 t = build_fixed (type, value);
2057
2058 /* Propagate overflow flags. */
2059 if (overflow_p | TREE_OVERFLOW (arg1))
2060 TREE_OVERFLOW (t) = 1;
2061 return t;
2062 }
2063
2064 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2065 to a fixed-point type. */
2066
2067 static tree
2068 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2069 {
2070 FIXED_VALUE_TYPE value;
2071 tree t;
2072 bool overflow_p;
2073 double_int di;
2074
2075 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2076
2077 di.low = TREE_INT_CST_ELT (arg1, 0);
2078 if (TREE_INT_CST_NUNITS (arg1) == 1)
2079 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2080 else
2081 di.high = TREE_INT_CST_ELT (arg1, 1);
2082
2083 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2084 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2085 TYPE_SATURATING (type));
2086 t = build_fixed (type, value);
2087
2088 /* Propagate overflow flags. */
2089 if (overflow_p | TREE_OVERFLOW (arg1))
2090 TREE_OVERFLOW (t) = 1;
2091 return t;
2092 }
2093
2094 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2095 to a fixed-point type. */
2096
2097 static tree
2098 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2099 {
2100 FIXED_VALUE_TYPE value;
2101 tree t;
2102 bool overflow_p;
2103
2104 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2105 &TREE_REAL_CST (arg1),
2106 TYPE_SATURATING (type));
2107 t = build_fixed (type, value);
2108
2109 /* Propagate overflow flags. */
2110 if (overflow_p | TREE_OVERFLOW (arg1))
2111 TREE_OVERFLOW (t) = 1;
2112 return t;
2113 }
2114
2115 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2116 type TYPE. If no simplification can be done return NULL_TREE. */
2117
2118 static tree
2119 fold_convert_const (enum tree_code code, tree type, tree arg1)
2120 {
2121 if (TREE_TYPE (arg1) == type)
2122 return arg1;
2123
2124 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2125 || TREE_CODE (type) == OFFSET_TYPE)
2126 {
2127 if (TREE_CODE (arg1) == INTEGER_CST)
2128 return fold_convert_const_int_from_int (type, arg1);
2129 else if (TREE_CODE (arg1) == REAL_CST)
2130 return fold_convert_const_int_from_real (code, type, arg1);
2131 else if (TREE_CODE (arg1) == FIXED_CST)
2132 return fold_convert_const_int_from_fixed (type, arg1);
2133 }
2134 else if (TREE_CODE (type) == REAL_TYPE)
2135 {
2136 if (TREE_CODE (arg1) == INTEGER_CST)
2137 return build_real_from_int_cst (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_real_from_real (type, arg1);
2140 else if (TREE_CODE (arg1) == FIXED_CST)
2141 return fold_convert_const_real_from_fixed (type, arg1);
2142 }
2143 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2144 {
2145 if (TREE_CODE (arg1) == FIXED_CST)
2146 return fold_convert_const_fixed_from_fixed (type, arg1);
2147 else if (TREE_CODE (arg1) == INTEGER_CST)
2148 return fold_convert_const_fixed_from_int (type, arg1);
2149 else if (TREE_CODE (arg1) == REAL_CST)
2150 return fold_convert_const_fixed_from_real (type, arg1);
2151 }
2152 else if (TREE_CODE (type) == VECTOR_TYPE)
2153 {
2154 if (TREE_CODE (arg1) == VECTOR_CST
2155 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2156 {
2157 int len = VECTOR_CST_NELTS (arg1);
2158 tree elttype = TREE_TYPE (type);
2159 auto_vec<tree, 32> v (len);
2160 for (int i = 0; i < len; ++i)
2161 {
2162 tree elt = VECTOR_CST_ELT (arg1, i);
2163 tree cvt = fold_convert_const (code, elttype, elt);
2164 if (cvt == NULL_TREE)
2165 return NULL_TREE;
2166 v.quick_push (cvt);
2167 }
2168 return build_vector (type, v);
2169 }
2170 }
2171 return NULL_TREE;
2172 }
2173
2174 /* Construct a vector of zero elements of vector type TYPE. */
2175
2176 static tree
2177 build_zero_vector (tree type)
2178 {
2179 tree t;
2180
2181 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2182 return build_vector_from_val (type, t);
2183 }
2184
2185 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2186
2187 bool
2188 fold_convertible_p (const_tree type, const_tree arg)
2189 {
2190 tree orig = TREE_TYPE (arg);
2191
2192 if (type == orig)
2193 return true;
2194
2195 if (TREE_CODE (arg) == ERROR_MARK
2196 || TREE_CODE (type) == ERROR_MARK
2197 || TREE_CODE (orig) == ERROR_MARK)
2198 return false;
2199
2200 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2201 return true;
2202
2203 switch (TREE_CODE (type))
2204 {
2205 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2206 case POINTER_TYPE: case REFERENCE_TYPE:
2207 case OFFSET_TYPE:
2208 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2209 || TREE_CODE (orig) == OFFSET_TYPE);
2210
2211 case REAL_TYPE:
2212 case FIXED_POINT_TYPE:
2213 case VECTOR_TYPE:
2214 case VOID_TYPE:
2215 return TREE_CODE (type) == TREE_CODE (orig);
2216
2217 default:
2218 return false;
2219 }
2220 }
2221
2222 /* Convert expression ARG to type TYPE. Used by the middle-end for
2223 simple conversions in preference to calling the front-end's convert. */
2224
2225 tree
2226 fold_convert_loc (location_t loc, tree type, tree arg)
2227 {
2228 tree orig = TREE_TYPE (arg);
2229 tree tem;
2230
2231 if (type == orig)
2232 return arg;
2233
2234 if (TREE_CODE (arg) == ERROR_MARK
2235 || TREE_CODE (type) == ERROR_MARK
2236 || TREE_CODE (orig) == ERROR_MARK)
2237 return error_mark_node;
2238
2239 switch (TREE_CODE (type))
2240 {
2241 case POINTER_TYPE:
2242 case REFERENCE_TYPE:
2243 /* Handle conversions between pointers to different address spaces. */
2244 if (POINTER_TYPE_P (orig)
2245 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2246 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2247 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2248 /* fall through */
2249
2250 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2251 case OFFSET_TYPE:
2252 if (TREE_CODE (arg) == INTEGER_CST)
2253 {
2254 tem = fold_convert_const (NOP_EXPR, type, arg);
2255 if (tem != NULL_TREE)
2256 return tem;
2257 }
2258 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2259 || TREE_CODE (orig) == OFFSET_TYPE)
2260 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2261 if (TREE_CODE (orig) == COMPLEX_TYPE)
2262 return fold_convert_loc (loc, type,
2263 fold_build1_loc (loc, REALPART_EXPR,
2264 TREE_TYPE (orig), arg));
2265 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2266 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2267 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2268
2269 case REAL_TYPE:
2270 if (TREE_CODE (arg) == INTEGER_CST)
2271 {
2272 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2273 if (tem != NULL_TREE)
2274 return tem;
2275 }
2276 else if (TREE_CODE (arg) == REAL_CST)
2277 {
2278 tem = fold_convert_const (NOP_EXPR, type, arg);
2279 if (tem != NULL_TREE)
2280 return tem;
2281 }
2282 else if (TREE_CODE (arg) == FIXED_CST)
2283 {
2284 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2285 if (tem != NULL_TREE)
2286 return tem;
2287 }
2288
2289 switch (TREE_CODE (orig))
2290 {
2291 case INTEGER_TYPE:
2292 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2293 case POINTER_TYPE: case REFERENCE_TYPE:
2294 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2295
2296 case REAL_TYPE:
2297 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2298
2299 case FIXED_POINT_TYPE:
2300 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2301
2302 case COMPLEX_TYPE:
2303 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2304 return fold_convert_loc (loc, type, tem);
2305
2306 default:
2307 gcc_unreachable ();
2308 }
2309
2310 case FIXED_POINT_TYPE:
2311 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2312 || TREE_CODE (arg) == REAL_CST)
2313 {
2314 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2315 if (tem != NULL_TREE)
2316 goto fold_convert_exit;
2317 }
2318
2319 switch (TREE_CODE (orig))
2320 {
2321 case FIXED_POINT_TYPE:
2322 case INTEGER_TYPE:
2323 case ENUMERAL_TYPE:
2324 case BOOLEAN_TYPE:
2325 case REAL_TYPE:
2326 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2327
2328 case COMPLEX_TYPE:
2329 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2330 return fold_convert_loc (loc, type, tem);
2331
2332 default:
2333 gcc_unreachable ();
2334 }
2335
2336 case COMPLEX_TYPE:
2337 switch (TREE_CODE (orig))
2338 {
2339 case INTEGER_TYPE:
2340 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2341 case POINTER_TYPE: case REFERENCE_TYPE:
2342 case REAL_TYPE:
2343 case FIXED_POINT_TYPE:
2344 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2345 fold_convert_loc (loc, TREE_TYPE (type), arg),
2346 fold_convert_loc (loc, TREE_TYPE (type),
2347 integer_zero_node));
2348 case COMPLEX_TYPE:
2349 {
2350 tree rpart, ipart;
2351
2352 if (TREE_CODE (arg) == COMPLEX_EXPR)
2353 {
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2355 TREE_OPERAND (arg, 0));
2356 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2357 TREE_OPERAND (arg, 1));
2358 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2359 }
2360
2361 arg = save_expr (arg);
2362 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2363 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2364 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2365 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2366 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2367 }
2368
2369 default:
2370 gcc_unreachable ();
2371 }
2372
2373 case VECTOR_TYPE:
2374 if (integer_zerop (arg))
2375 return build_zero_vector (type);
2376 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2377 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2378 || TREE_CODE (orig) == VECTOR_TYPE);
2379 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2380
2381 case VOID_TYPE:
2382 tem = fold_ignored_result (arg);
2383 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2384
2385 default:
2386 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2387 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2388 gcc_unreachable ();
2389 }
2390 fold_convert_exit:
2391 protected_set_expr_location_unshare (tem, loc);
2392 return tem;
2393 }
2394 \f
2395 /* Return false if expr can be assumed not to be an lvalue, true
2396 otherwise. */
2397
2398 static bool
2399 maybe_lvalue_p (const_tree x)
2400 {
2401 /* We only need to wrap lvalue tree codes. */
2402 switch (TREE_CODE (x))
2403 {
2404 case VAR_DECL:
2405 case PARM_DECL:
2406 case RESULT_DECL:
2407 case LABEL_DECL:
2408 case FUNCTION_DECL:
2409 case SSA_NAME:
2410
2411 case COMPONENT_REF:
2412 case MEM_REF:
2413 case INDIRECT_REF:
2414 case ARRAY_REF:
2415 case ARRAY_RANGE_REF:
2416 case BIT_FIELD_REF:
2417 case OBJ_TYPE_REF:
2418
2419 case REALPART_EXPR:
2420 case IMAGPART_EXPR:
2421 case PREINCREMENT_EXPR:
2422 case PREDECREMENT_EXPR:
2423 case SAVE_EXPR:
2424 case TRY_CATCH_EXPR:
2425 case WITH_CLEANUP_EXPR:
2426 case COMPOUND_EXPR:
2427 case MODIFY_EXPR:
2428 case TARGET_EXPR:
2429 case COND_EXPR:
2430 case BIND_EXPR:
2431 break;
2432
2433 default:
2434 /* Assume the worst for front-end tree codes. */
2435 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2436 break;
2437 return false;
2438 }
2439
2440 return true;
2441 }
2442
2443 /* Return an expr equal to X but certainly not valid as an lvalue. */
2444
2445 tree
2446 non_lvalue_loc (location_t loc, tree x)
2447 {
2448 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2449 us. */
2450 if (in_gimple_form)
2451 return x;
2452
2453 if (! maybe_lvalue_p (x))
2454 return x;
2455 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2456 }
2457
2458 /* When pedantic, return an expr equal to X but certainly not valid as a
2459 pedantic lvalue. Otherwise, return X. */
2460
2461 static tree
2462 pedantic_non_lvalue_loc (location_t loc, tree x)
2463 {
2464 return protected_set_expr_location_unshare (x, loc);
2465 }
2466 \f
2467 /* Given a tree comparison code, return the code that is the logical inverse.
2468 It is generally not safe to do this for floating-point comparisons, except
2469 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2470 ERROR_MARK in this case. */
2471
2472 enum tree_code
2473 invert_tree_comparison (enum tree_code code, bool honor_nans)
2474 {
2475 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2476 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2477 return ERROR_MARK;
2478
2479 switch (code)
2480 {
2481 case EQ_EXPR:
2482 return NE_EXPR;
2483 case NE_EXPR:
2484 return EQ_EXPR;
2485 case GT_EXPR:
2486 return honor_nans ? UNLE_EXPR : LE_EXPR;
2487 case GE_EXPR:
2488 return honor_nans ? UNLT_EXPR : LT_EXPR;
2489 case LT_EXPR:
2490 return honor_nans ? UNGE_EXPR : GE_EXPR;
2491 case LE_EXPR:
2492 return honor_nans ? UNGT_EXPR : GT_EXPR;
2493 case LTGT_EXPR:
2494 return UNEQ_EXPR;
2495 case UNEQ_EXPR:
2496 return LTGT_EXPR;
2497 case UNGT_EXPR:
2498 return LE_EXPR;
2499 case UNGE_EXPR:
2500 return LT_EXPR;
2501 case UNLT_EXPR:
2502 return GE_EXPR;
2503 case UNLE_EXPR:
2504 return GT_EXPR;
2505 case ORDERED_EXPR:
2506 return UNORDERED_EXPR;
2507 case UNORDERED_EXPR:
2508 return ORDERED_EXPR;
2509 default:
2510 gcc_unreachable ();
2511 }
2512 }
2513
2514 /* Similar, but return the comparison that results if the operands are
2515 swapped. This is safe for floating-point. */
2516
2517 enum tree_code
2518 swap_tree_comparison (enum tree_code code)
2519 {
2520 switch (code)
2521 {
2522 case EQ_EXPR:
2523 case NE_EXPR:
2524 case ORDERED_EXPR:
2525 case UNORDERED_EXPR:
2526 case LTGT_EXPR:
2527 case UNEQ_EXPR:
2528 return code;
2529 case GT_EXPR:
2530 return LT_EXPR;
2531 case GE_EXPR:
2532 return LE_EXPR;
2533 case LT_EXPR:
2534 return GT_EXPR;
2535 case LE_EXPR:
2536 return GE_EXPR;
2537 case UNGT_EXPR:
2538 return UNLT_EXPR;
2539 case UNGE_EXPR:
2540 return UNLE_EXPR;
2541 case UNLT_EXPR:
2542 return UNGT_EXPR;
2543 case UNLE_EXPR:
2544 return UNGE_EXPR;
2545 default:
2546 gcc_unreachable ();
2547 }
2548 }
2549
2550
2551 /* Convert a comparison tree code from an enum tree_code representation
2552 into a compcode bit-based encoding. This function is the inverse of
2553 compcode_to_comparison. */
2554
2555 static enum comparison_code
2556 comparison_to_compcode (enum tree_code code)
2557 {
2558 switch (code)
2559 {
2560 case LT_EXPR:
2561 return COMPCODE_LT;
2562 case EQ_EXPR:
2563 return COMPCODE_EQ;
2564 case LE_EXPR:
2565 return COMPCODE_LE;
2566 case GT_EXPR:
2567 return COMPCODE_GT;
2568 case NE_EXPR:
2569 return COMPCODE_NE;
2570 case GE_EXPR:
2571 return COMPCODE_GE;
2572 case ORDERED_EXPR:
2573 return COMPCODE_ORD;
2574 case UNORDERED_EXPR:
2575 return COMPCODE_UNORD;
2576 case UNLT_EXPR:
2577 return COMPCODE_UNLT;
2578 case UNEQ_EXPR:
2579 return COMPCODE_UNEQ;
2580 case UNLE_EXPR:
2581 return COMPCODE_UNLE;
2582 case UNGT_EXPR:
2583 return COMPCODE_UNGT;
2584 case LTGT_EXPR:
2585 return COMPCODE_LTGT;
2586 case UNGE_EXPR:
2587 return COMPCODE_UNGE;
2588 default:
2589 gcc_unreachable ();
2590 }
2591 }
2592
2593 /* Convert a compcode bit-based encoding of a comparison operator back
2594 to GCC's enum tree_code representation. This function is the
2595 inverse of comparison_to_compcode. */
2596
2597 static enum tree_code
2598 compcode_to_comparison (enum comparison_code code)
2599 {
2600 switch (code)
2601 {
2602 case COMPCODE_LT:
2603 return LT_EXPR;
2604 case COMPCODE_EQ:
2605 return EQ_EXPR;
2606 case COMPCODE_LE:
2607 return LE_EXPR;
2608 case COMPCODE_GT:
2609 return GT_EXPR;
2610 case COMPCODE_NE:
2611 return NE_EXPR;
2612 case COMPCODE_GE:
2613 return GE_EXPR;
2614 case COMPCODE_ORD:
2615 return ORDERED_EXPR;
2616 case COMPCODE_UNORD:
2617 return UNORDERED_EXPR;
2618 case COMPCODE_UNLT:
2619 return UNLT_EXPR;
2620 case COMPCODE_UNEQ:
2621 return UNEQ_EXPR;
2622 case COMPCODE_UNLE:
2623 return UNLE_EXPR;
2624 case COMPCODE_UNGT:
2625 return UNGT_EXPR;
2626 case COMPCODE_LTGT:
2627 return LTGT_EXPR;
2628 case COMPCODE_UNGE:
2629 return UNGE_EXPR;
2630 default:
2631 gcc_unreachable ();
2632 }
2633 }
2634
2635 /* Return a tree for the comparison which is the combination of
2636 doing the AND or OR (depending on CODE) of the two operations LCODE
2637 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2638 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2639 if this makes the transformation invalid. */
2640
2641 tree
2642 combine_comparisons (location_t loc,
2643 enum tree_code code, enum tree_code lcode,
2644 enum tree_code rcode, tree truth_type,
2645 tree ll_arg, tree lr_arg)
2646 {
2647 bool honor_nans = HONOR_NANS (ll_arg);
2648 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2649 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2650 int compcode;
2651
2652 switch (code)
2653 {
2654 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2655 compcode = lcompcode & rcompcode;
2656 break;
2657
2658 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2659 compcode = lcompcode | rcompcode;
2660 break;
2661
2662 default:
2663 return NULL_TREE;
2664 }
2665
2666 if (!honor_nans)
2667 {
2668 /* Eliminate unordered comparisons, as well as LTGT and ORD
2669 which are not used unless the mode has NaNs. */
2670 compcode &= ~COMPCODE_UNORD;
2671 if (compcode == COMPCODE_LTGT)
2672 compcode = COMPCODE_NE;
2673 else if (compcode == COMPCODE_ORD)
2674 compcode = COMPCODE_TRUE;
2675 }
2676 else if (flag_trapping_math)
2677 {
2678 /* Check that the original operation and the optimized ones will trap
2679 under the same condition. */
2680 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2681 && (lcompcode != COMPCODE_EQ)
2682 && (lcompcode != COMPCODE_ORD);
2683 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2684 && (rcompcode != COMPCODE_EQ)
2685 && (rcompcode != COMPCODE_ORD);
2686 bool trap = (compcode & COMPCODE_UNORD) == 0
2687 && (compcode != COMPCODE_EQ)
2688 && (compcode != COMPCODE_ORD);
2689
2690 /* In a short-circuited boolean expression the LHS might be
2691 such that the RHS, if evaluated, will never trap. For
2692 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2693 if neither x nor y is NaN. (This is a mixed blessing: for
2694 example, the expression above will never trap, hence
2695 optimizing it to x < y would be invalid). */
2696 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2697 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2698 rtrap = false;
2699
2700 /* If the comparison was short-circuited, and only the RHS
2701 trapped, we may now generate a spurious trap. */
2702 if (rtrap && !ltrap
2703 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2704 return NULL_TREE;
2705
2706 /* If we changed the conditions that cause a trap, we lose. */
2707 if ((ltrap || rtrap) != trap)
2708 return NULL_TREE;
2709 }
2710
2711 if (compcode == COMPCODE_TRUE)
2712 return constant_boolean_node (true, truth_type);
2713 else if (compcode == COMPCODE_FALSE)
2714 return constant_boolean_node (false, truth_type);
2715 else
2716 {
2717 enum tree_code tcode;
2718
2719 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2720 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2721 }
2722 }
2723 \f
2724 /* Return nonzero if two operands (typically of the same tree node)
2725 are necessarily equal. FLAGS modifies behavior as follows:
2726
2727 If OEP_ONLY_CONST is set, only return nonzero for constants.
2728 This function tests whether the operands are indistinguishable;
2729 it does not test whether they are equal using C's == operation.
2730 The distinction is important for IEEE floating point, because
2731 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2732 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2733
2734 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2735 even though it may hold multiple values during a function.
2736 This is because a GCC tree node guarantees that nothing else is
2737 executed between the evaluation of its "operands" (which may often
2738 be evaluated in arbitrary order). Hence if the operands themselves
2739 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2740 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2741 unset means assuming isochronic (or instantaneous) tree equivalence.
2742 Unless comparing arbitrary expression trees, such as from different
2743 statements, this flag can usually be left unset.
2744
2745 If OEP_PURE_SAME is set, then pure functions with identical arguments
2746 are considered the same. It is used when the caller has other ways
2747 to ensure that global memory is unchanged in between.
2748
2749 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2750 not values of expressions.
2751
2752 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2753 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2754
2755 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2756 any operand with side effect. This is unnecesarily conservative in the
2757 case we know that arg0 and arg1 are in disjoint code paths (such as in
2758 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2759 addresses with TREE_CONSTANT flag set so we know that &var == &var
2760 even if var is volatile. */
2761
2762 int
2763 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2764 {
2765 /* When checking, verify at the outermost operand_equal_p call that
2766 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2767 hash value. */
2768 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2769 {
2770 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2771 {
2772 if (arg0 != arg1)
2773 {
2774 inchash::hash hstate0 (0), hstate1 (0);
2775 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2776 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2777 hashval_t h0 = hstate0.end ();
2778 hashval_t h1 = hstate1.end ();
2779 gcc_assert (h0 == h1);
2780 }
2781 return 1;
2782 }
2783 else
2784 return 0;
2785 }
2786
2787 /* If either is ERROR_MARK, they aren't equal. */
2788 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2789 || TREE_TYPE (arg0) == error_mark_node
2790 || TREE_TYPE (arg1) == error_mark_node)
2791 return 0;
2792
2793 /* Similar, if either does not have a type (like a released SSA name),
2794 they aren't equal. */
2795 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2796 return 0;
2797
2798 /* We cannot consider pointers to different address space equal. */
2799 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2800 && POINTER_TYPE_P (TREE_TYPE (arg1))
2801 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2802 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2803 return 0;
2804
2805 /* Check equality of integer constants before bailing out due to
2806 precision differences. */
2807 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2808 {
2809 /* Address of INTEGER_CST is not defined; check that we did not forget
2810 to drop the OEP_ADDRESS_OF flags. */
2811 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2812 return tree_int_cst_equal (arg0, arg1);
2813 }
2814
2815 if (!(flags & OEP_ADDRESS_OF))
2816 {
2817 /* If both types don't have the same signedness, then we can't consider
2818 them equal. We must check this before the STRIP_NOPS calls
2819 because they may change the signedness of the arguments. As pointers
2820 strictly don't have a signedness, require either two pointers or
2821 two non-pointers as well. */
2822 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2823 || POINTER_TYPE_P (TREE_TYPE (arg0))
2824 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2825 return 0;
2826
2827 /* If both types don't have the same precision, then it is not safe
2828 to strip NOPs. */
2829 if (element_precision (TREE_TYPE (arg0))
2830 != element_precision (TREE_TYPE (arg1)))
2831 return 0;
2832
2833 STRIP_NOPS (arg0);
2834 STRIP_NOPS (arg1);
2835 }
2836 #if 0
2837 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2838 sanity check once the issue is solved. */
2839 else
2840 /* Addresses of conversions and SSA_NAMEs (and many other things)
2841 are not defined. Check that we did not forget to drop the
2842 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2843 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2844 && TREE_CODE (arg0) != SSA_NAME);
2845 #endif
2846
2847 /* In case both args are comparisons but with different comparison
2848 code, try to swap the comparison operands of one arg to produce
2849 a match and compare that variant. */
2850 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2851 && COMPARISON_CLASS_P (arg0)
2852 && COMPARISON_CLASS_P (arg1))
2853 {
2854 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2855
2856 if (TREE_CODE (arg0) == swap_code)
2857 return operand_equal_p (TREE_OPERAND (arg0, 0),
2858 TREE_OPERAND (arg1, 1), flags)
2859 && operand_equal_p (TREE_OPERAND (arg0, 1),
2860 TREE_OPERAND (arg1, 0), flags);
2861 }
2862
2863 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2864 {
2865 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2866 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2867 ;
2868 else if (flags & OEP_ADDRESS_OF)
2869 {
2870 /* If we are interested in comparing addresses ignore
2871 MEM_REF wrappings of the base that can appear just for
2872 TBAA reasons. */
2873 if (TREE_CODE (arg0) == MEM_REF
2874 && DECL_P (arg1)
2875 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2876 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2877 && integer_zerop (TREE_OPERAND (arg0, 1)))
2878 return 1;
2879 else if (TREE_CODE (arg1) == MEM_REF
2880 && DECL_P (arg0)
2881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2882 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2883 && integer_zerop (TREE_OPERAND (arg1, 1)))
2884 return 1;
2885 return 0;
2886 }
2887 else
2888 return 0;
2889 }
2890
2891 /* When not checking adddresses, this is needed for conversions and for
2892 COMPONENT_REF. Might as well play it safe and always test this. */
2893 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2894 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2895 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2896 && !(flags & OEP_ADDRESS_OF)))
2897 return 0;
2898
2899 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2900 We don't care about side effects in that case because the SAVE_EXPR
2901 takes care of that for us. In all other cases, two expressions are
2902 equal if they have no side effects. If we have two identical
2903 expressions with side effects that should be treated the same due
2904 to the only side effects being identical SAVE_EXPR's, that will
2905 be detected in the recursive calls below.
2906 If we are taking an invariant address of two identical objects
2907 they are necessarily equal as well. */
2908 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2909 && (TREE_CODE (arg0) == SAVE_EXPR
2910 || (flags & OEP_MATCH_SIDE_EFFECTS)
2911 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2912 return 1;
2913
2914 /* Next handle constant cases, those for which we can return 1 even
2915 if ONLY_CONST is set. */
2916 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2917 switch (TREE_CODE (arg0))
2918 {
2919 case INTEGER_CST:
2920 return tree_int_cst_equal (arg0, arg1);
2921
2922 case FIXED_CST:
2923 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2924 TREE_FIXED_CST (arg1));
2925
2926 case REAL_CST:
2927 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2928 return 1;
2929
2930
2931 if (!HONOR_SIGNED_ZEROS (arg0))
2932 {
2933 /* If we do not distinguish between signed and unsigned zero,
2934 consider them equal. */
2935 if (real_zerop (arg0) && real_zerop (arg1))
2936 return 1;
2937 }
2938 return 0;
2939
2940 case VECTOR_CST:
2941 {
2942 unsigned i;
2943
2944 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2945 return 0;
2946
2947 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2948 {
2949 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2950 VECTOR_CST_ELT (arg1, i), flags))
2951 return 0;
2952 }
2953 return 1;
2954 }
2955
2956 case COMPLEX_CST:
2957 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2958 flags)
2959 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2960 flags));
2961
2962 case STRING_CST:
2963 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2964 && ! memcmp (TREE_STRING_POINTER (arg0),
2965 TREE_STRING_POINTER (arg1),
2966 TREE_STRING_LENGTH (arg0)));
2967
2968 case ADDR_EXPR:
2969 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2970 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2971 flags | OEP_ADDRESS_OF
2972 | OEP_MATCH_SIDE_EFFECTS);
2973 case CONSTRUCTOR:
2974 /* In GIMPLE empty constructors are allowed in initializers of
2975 aggregates. */
2976 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2977 default:
2978 break;
2979 }
2980
2981 if (flags & OEP_ONLY_CONST)
2982 return 0;
2983
2984 /* Define macros to test an operand from arg0 and arg1 for equality and a
2985 variant that allows null and views null as being different from any
2986 non-null value. In the latter case, if either is null, the both
2987 must be; otherwise, do the normal comparison. */
2988 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2989 TREE_OPERAND (arg1, N), flags)
2990
2991 #define OP_SAME_WITH_NULL(N) \
2992 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2993 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2994
2995 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2996 {
2997 case tcc_unary:
2998 /* Two conversions are equal only if signedness and modes match. */
2999 switch (TREE_CODE (arg0))
3000 {
3001 CASE_CONVERT:
3002 case FIX_TRUNC_EXPR:
3003 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3004 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3005 return 0;
3006 break;
3007 default:
3008 break;
3009 }
3010
3011 return OP_SAME (0);
3012
3013
3014 case tcc_comparison:
3015 case tcc_binary:
3016 if (OP_SAME (0) && OP_SAME (1))
3017 return 1;
3018
3019 /* For commutative ops, allow the other order. */
3020 return (commutative_tree_code (TREE_CODE (arg0))
3021 && operand_equal_p (TREE_OPERAND (arg0, 0),
3022 TREE_OPERAND (arg1, 1), flags)
3023 && operand_equal_p (TREE_OPERAND (arg0, 1),
3024 TREE_OPERAND (arg1, 0), flags));
3025
3026 case tcc_reference:
3027 /* If either of the pointer (or reference) expressions we are
3028 dereferencing contain a side effect, these cannot be equal,
3029 but their addresses can be. */
3030 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3031 && (TREE_SIDE_EFFECTS (arg0)
3032 || TREE_SIDE_EFFECTS (arg1)))
3033 return 0;
3034
3035 switch (TREE_CODE (arg0))
3036 {
3037 case INDIRECT_REF:
3038 if (!(flags & OEP_ADDRESS_OF)
3039 && (TYPE_ALIGN (TREE_TYPE (arg0))
3040 != TYPE_ALIGN (TREE_TYPE (arg1))))
3041 return 0;
3042 flags &= ~OEP_ADDRESS_OF;
3043 return OP_SAME (0);
3044
3045 case IMAGPART_EXPR:
3046 /* Require the same offset. */
3047 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3048 TYPE_SIZE (TREE_TYPE (arg1)),
3049 flags & ~OEP_ADDRESS_OF))
3050 return 0;
3051
3052 /* Fallthru. */
3053 case REALPART_EXPR:
3054 case VIEW_CONVERT_EXPR:
3055 return OP_SAME (0);
3056
3057 case TARGET_MEM_REF:
3058 case MEM_REF:
3059 if (!(flags & OEP_ADDRESS_OF))
3060 {
3061 /* Require equal access sizes */
3062 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3063 && (!TYPE_SIZE (TREE_TYPE (arg0))
3064 || !TYPE_SIZE (TREE_TYPE (arg1))
3065 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3066 TYPE_SIZE (TREE_TYPE (arg1)),
3067 flags)))
3068 return 0;
3069 /* Verify that access happens in similar types. */
3070 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3071 return 0;
3072 /* Verify that accesses are TBAA compatible. */
3073 if (!alias_ptr_types_compatible_p
3074 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3075 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3076 || (MR_DEPENDENCE_CLIQUE (arg0)
3077 != MR_DEPENDENCE_CLIQUE (arg1))
3078 || (MR_DEPENDENCE_BASE (arg0)
3079 != MR_DEPENDENCE_BASE (arg1)))
3080 return 0;
3081 /* Verify that alignment is compatible. */
3082 if (TYPE_ALIGN (TREE_TYPE (arg0))
3083 != TYPE_ALIGN (TREE_TYPE (arg1)))
3084 return 0;
3085 }
3086 flags &= ~OEP_ADDRESS_OF;
3087 return (OP_SAME (0) && OP_SAME (1)
3088 /* TARGET_MEM_REF require equal extra operands. */
3089 && (TREE_CODE (arg0) != TARGET_MEM_REF
3090 || (OP_SAME_WITH_NULL (2)
3091 && OP_SAME_WITH_NULL (3)
3092 && OP_SAME_WITH_NULL (4))));
3093
3094 case ARRAY_REF:
3095 case ARRAY_RANGE_REF:
3096 if (!OP_SAME (0))
3097 return 0;
3098 flags &= ~OEP_ADDRESS_OF;
3099 /* Compare the array index by value if it is constant first as we
3100 may have different types but same value here. */
3101 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3102 TREE_OPERAND (arg1, 1))
3103 || OP_SAME (1))
3104 && OP_SAME_WITH_NULL (2)
3105 && OP_SAME_WITH_NULL (3)
3106 /* Compare low bound and element size as with OEP_ADDRESS_OF
3107 we have to account for the offset of the ref. */
3108 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3109 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3110 || (operand_equal_p (array_ref_low_bound
3111 (CONST_CAST_TREE (arg0)),
3112 array_ref_low_bound
3113 (CONST_CAST_TREE (arg1)), flags)
3114 && operand_equal_p (array_ref_element_size
3115 (CONST_CAST_TREE (arg0)),
3116 array_ref_element_size
3117 (CONST_CAST_TREE (arg1)),
3118 flags))));
3119
3120 case COMPONENT_REF:
3121 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3122 may be NULL when we're called to compare MEM_EXPRs. */
3123 if (!OP_SAME_WITH_NULL (0)
3124 || !OP_SAME (1))
3125 return 0;
3126 flags &= ~OEP_ADDRESS_OF;
3127 return OP_SAME_WITH_NULL (2);
3128
3129 case BIT_FIELD_REF:
3130 if (!OP_SAME (0))
3131 return 0;
3132 flags &= ~OEP_ADDRESS_OF;
3133 return OP_SAME (1) && OP_SAME (2);
3134
3135 default:
3136 return 0;
3137 }
3138
3139 case tcc_expression:
3140 switch (TREE_CODE (arg0))
3141 {
3142 case ADDR_EXPR:
3143 /* Be sure we pass right ADDRESS_OF flag. */
3144 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3145 return operand_equal_p (TREE_OPERAND (arg0, 0),
3146 TREE_OPERAND (arg1, 0),
3147 flags | OEP_ADDRESS_OF);
3148
3149 case TRUTH_NOT_EXPR:
3150 return OP_SAME (0);
3151
3152 case TRUTH_ANDIF_EXPR:
3153 case TRUTH_ORIF_EXPR:
3154 return OP_SAME (0) && OP_SAME (1);
3155
3156 case FMA_EXPR:
3157 case WIDEN_MULT_PLUS_EXPR:
3158 case WIDEN_MULT_MINUS_EXPR:
3159 if (!OP_SAME (2))
3160 return 0;
3161 /* The multiplcation operands are commutative. */
3162 /* FALLTHRU */
3163
3164 case TRUTH_AND_EXPR:
3165 case TRUTH_OR_EXPR:
3166 case TRUTH_XOR_EXPR:
3167 if (OP_SAME (0) && OP_SAME (1))
3168 return 1;
3169
3170 /* Otherwise take into account this is a commutative operation. */
3171 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3172 TREE_OPERAND (arg1, 1), flags)
3173 && operand_equal_p (TREE_OPERAND (arg0, 1),
3174 TREE_OPERAND (arg1, 0), flags));
3175
3176 case COND_EXPR:
3177 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3178 return 0;
3179 flags &= ~OEP_ADDRESS_OF;
3180 return OP_SAME (0);
3181
3182 case BIT_INSERT_EXPR:
3183 /* BIT_INSERT_EXPR has an implict operand as the type precision
3184 of op1. Need to check to make sure they are the same. */
3185 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3186 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3187 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3188 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3189 return false;
3190 /* FALLTHRU */
3191
3192 case VEC_COND_EXPR:
3193 case DOT_PROD_EXPR:
3194 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3195
3196 case MODIFY_EXPR:
3197 case INIT_EXPR:
3198 case COMPOUND_EXPR:
3199 case PREDECREMENT_EXPR:
3200 case PREINCREMENT_EXPR:
3201 case POSTDECREMENT_EXPR:
3202 case POSTINCREMENT_EXPR:
3203 if (flags & OEP_LEXICOGRAPHIC)
3204 return OP_SAME (0) && OP_SAME (1);
3205 return 0;
3206
3207 case CLEANUP_POINT_EXPR:
3208 case EXPR_STMT:
3209 if (flags & OEP_LEXICOGRAPHIC)
3210 return OP_SAME (0);
3211 return 0;
3212
3213 default:
3214 return 0;
3215 }
3216
3217 case tcc_vl_exp:
3218 switch (TREE_CODE (arg0))
3219 {
3220 case CALL_EXPR:
3221 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3222 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3223 /* If not both CALL_EXPRs are either internal or normal function
3224 functions, then they are not equal. */
3225 return 0;
3226 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3227 {
3228 /* If the CALL_EXPRs call different internal functions, then they
3229 are not equal. */
3230 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3231 return 0;
3232 }
3233 else
3234 {
3235 /* If the CALL_EXPRs call different functions, then they are not
3236 equal. */
3237 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3238 flags))
3239 return 0;
3240 }
3241
3242 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3243 {
3244 unsigned int cef = call_expr_flags (arg0);
3245 if (flags & OEP_PURE_SAME)
3246 cef &= ECF_CONST | ECF_PURE;
3247 else
3248 cef &= ECF_CONST;
3249 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3250 return 0;
3251 }
3252
3253 /* Now see if all the arguments are the same. */
3254 {
3255 const_call_expr_arg_iterator iter0, iter1;
3256 const_tree a0, a1;
3257 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3258 a1 = first_const_call_expr_arg (arg1, &iter1);
3259 a0 && a1;
3260 a0 = next_const_call_expr_arg (&iter0),
3261 a1 = next_const_call_expr_arg (&iter1))
3262 if (! operand_equal_p (a0, a1, flags))
3263 return 0;
3264
3265 /* If we get here and both argument lists are exhausted
3266 then the CALL_EXPRs are equal. */
3267 return ! (a0 || a1);
3268 }
3269 default:
3270 return 0;
3271 }
3272
3273 case tcc_declaration:
3274 /* Consider __builtin_sqrt equal to sqrt. */
3275 return (TREE_CODE (arg0) == FUNCTION_DECL
3276 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3277 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3278 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3279
3280 case tcc_exceptional:
3281 if (TREE_CODE (arg0) == CONSTRUCTOR)
3282 {
3283 /* In GIMPLE constructors are used only to build vectors from
3284 elements. Individual elements in the constructor must be
3285 indexed in increasing order and form an initial sequence.
3286
3287 We make no effort to compare constructors in generic.
3288 (see sem_variable::equals in ipa-icf which can do so for
3289 constants). */
3290 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3291 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3292 return 0;
3293
3294 /* Be sure that vectors constructed have the same representation.
3295 We only tested element precision and modes to match.
3296 Vectors may be BLKmode and thus also check that the number of
3297 parts match. */
3298 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3299 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3300 return 0;
3301
3302 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3303 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3304 unsigned int len = vec_safe_length (v0);
3305
3306 if (len != vec_safe_length (v1))
3307 return 0;
3308
3309 for (unsigned int i = 0; i < len; i++)
3310 {
3311 constructor_elt *c0 = &(*v0)[i];
3312 constructor_elt *c1 = &(*v1)[i];
3313
3314 if (!operand_equal_p (c0->value, c1->value, flags)
3315 /* In GIMPLE the indexes can be either NULL or matching i.
3316 Double check this so we won't get false
3317 positives for GENERIC. */
3318 || (c0->index
3319 && (TREE_CODE (c0->index) != INTEGER_CST
3320 || !compare_tree_int (c0->index, i)))
3321 || (c1->index
3322 && (TREE_CODE (c1->index) != INTEGER_CST
3323 || !compare_tree_int (c1->index, i))))
3324 return 0;
3325 }
3326 return 1;
3327 }
3328 else if (TREE_CODE (arg0) == STATEMENT_LIST
3329 && (flags & OEP_LEXICOGRAPHIC))
3330 {
3331 /* Compare the STATEMENT_LISTs. */
3332 tree_stmt_iterator tsi1, tsi2;
3333 tree body1 = CONST_CAST_TREE (arg0);
3334 tree body2 = CONST_CAST_TREE (arg1);
3335 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3336 tsi_next (&tsi1), tsi_next (&tsi2))
3337 {
3338 /* The lists don't have the same number of statements. */
3339 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3340 return 0;
3341 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3342 return 1;
3343 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3344 OEP_LEXICOGRAPHIC))
3345 return 0;
3346 }
3347 }
3348 return 0;
3349
3350 case tcc_statement:
3351 switch (TREE_CODE (arg0))
3352 {
3353 case RETURN_EXPR:
3354 if (flags & OEP_LEXICOGRAPHIC)
3355 return OP_SAME_WITH_NULL (0);
3356 return 0;
3357 default:
3358 return 0;
3359 }
3360
3361 default:
3362 return 0;
3363 }
3364
3365 #undef OP_SAME
3366 #undef OP_SAME_WITH_NULL
3367 }
3368 \f
3369 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3370 with a different signedness or a narrower precision. */
3371
3372 static bool
3373 operand_equal_for_comparison_p (tree arg0, tree arg1)
3374 {
3375 if (operand_equal_p (arg0, arg1, 0))
3376 return true;
3377
3378 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3379 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3380 return false;
3381
3382 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3383 and see if the inner values are the same. This removes any
3384 signedness comparison, which doesn't matter here. */
3385 tree op0 = arg0;
3386 tree op1 = arg1;
3387 STRIP_NOPS (op0);
3388 STRIP_NOPS (op1);
3389 if (operand_equal_p (op0, op1, 0))
3390 return true;
3391
3392 /* Discard a single widening conversion from ARG1 and see if the inner
3393 value is the same as ARG0. */
3394 if (CONVERT_EXPR_P (arg1)
3395 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3397 < TYPE_PRECISION (TREE_TYPE (arg1))
3398 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3399 return true;
3400
3401 return false;
3402 }
3403 \f
3404 /* See if ARG is an expression that is either a comparison or is performing
3405 arithmetic on comparisons. The comparisons must only be comparing
3406 two different values, which will be stored in *CVAL1 and *CVAL2; if
3407 they are nonzero it means that some operands have already been found.
3408 No variables may be used anywhere else in the expression except in the
3409 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3410 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3411
3412 If this is true, return 1. Otherwise, return zero. */
3413
3414 static int
3415 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3416 {
3417 enum tree_code code = TREE_CODE (arg);
3418 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3419
3420 /* We can handle some of the tcc_expression cases here. */
3421 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3422 tclass = tcc_unary;
3423 else if (tclass == tcc_expression
3424 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3425 || code == COMPOUND_EXPR))
3426 tclass = tcc_binary;
3427
3428 else if (tclass == tcc_expression && code == SAVE_EXPR
3429 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3430 {
3431 /* If we've already found a CVAL1 or CVAL2, this expression is
3432 two complex to handle. */
3433 if (*cval1 || *cval2)
3434 return 0;
3435
3436 tclass = tcc_unary;
3437 *save_p = 1;
3438 }
3439
3440 switch (tclass)
3441 {
3442 case tcc_unary:
3443 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3444
3445 case tcc_binary:
3446 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3447 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3448 cval1, cval2, save_p));
3449
3450 case tcc_constant:
3451 return 1;
3452
3453 case tcc_expression:
3454 if (code == COND_EXPR)
3455 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3456 cval1, cval2, save_p)
3457 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3458 cval1, cval2, save_p)
3459 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3460 cval1, cval2, save_p));
3461 return 0;
3462
3463 case tcc_comparison:
3464 /* First see if we can handle the first operand, then the second. For
3465 the second operand, we know *CVAL1 can't be zero. It must be that
3466 one side of the comparison is each of the values; test for the
3467 case where this isn't true by failing if the two operands
3468 are the same. */
3469
3470 if (operand_equal_p (TREE_OPERAND (arg, 0),
3471 TREE_OPERAND (arg, 1), 0))
3472 return 0;
3473
3474 if (*cval1 == 0)
3475 *cval1 = TREE_OPERAND (arg, 0);
3476 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3477 ;
3478 else if (*cval2 == 0)
3479 *cval2 = TREE_OPERAND (arg, 0);
3480 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3481 ;
3482 else
3483 return 0;
3484
3485 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3486 ;
3487 else if (*cval2 == 0)
3488 *cval2 = TREE_OPERAND (arg, 1);
3489 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3490 ;
3491 else
3492 return 0;
3493
3494 return 1;
3495
3496 default:
3497 return 0;
3498 }
3499 }
3500 \f
3501 /* ARG is a tree that is known to contain just arithmetic operations and
3502 comparisons. Evaluate the operations in the tree substituting NEW0 for
3503 any occurrence of OLD0 as an operand of a comparison and likewise for
3504 NEW1 and OLD1. */
3505
3506 static tree
3507 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3508 tree old1, tree new1)
3509 {
3510 tree type = TREE_TYPE (arg);
3511 enum tree_code code = TREE_CODE (arg);
3512 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3513
3514 /* We can handle some of the tcc_expression cases here. */
3515 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3516 tclass = tcc_unary;
3517 else if (tclass == tcc_expression
3518 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3519 tclass = tcc_binary;
3520
3521 switch (tclass)
3522 {
3523 case tcc_unary:
3524 return fold_build1_loc (loc, code, type,
3525 eval_subst (loc, TREE_OPERAND (arg, 0),
3526 old0, new0, old1, new1));
3527
3528 case tcc_binary:
3529 return fold_build2_loc (loc, code, type,
3530 eval_subst (loc, TREE_OPERAND (arg, 0),
3531 old0, new0, old1, new1),
3532 eval_subst (loc, TREE_OPERAND (arg, 1),
3533 old0, new0, old1, new1));
3534
3535 case tcc_expression:
3536 switch (code)
3537 {
3538 case SAVE_EXPR:
3539 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3540 old1, new1);
3541
3542 case COMPOUND_EXPR:
3543 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3544 old1, new1);
3545
3546 case COND_EXPR:
3547 return fold_build3_loc (loc, code, type,
3548 eval_subst (loc, TREE_OPERAND (arg, 0),
3549 old0, new0, old1, new1),
3550 eval_subst (loc, TREE_OPERAND (arg, 1),
3551 old0, new0, old1, new1),
3552 eval_subst (loc, TREE_OPERAND (arg, 2),
3553 old0, new0, old1, new1));
3554 default:
3555 break;
3556 }
3557 /* Fall through - ??? */
3558
3559 case tcc_comparison:
3560 {
3561 tree arg0 = TREE_OPERAND (arg, 0);
3562 tree arg1 = TREE_OPERAND (arg, 1);
3563
3564 /* We need to check both for exact equality and tree equality. The
3565 former will be true if the operand has a side-effect. In that
3566 case, we know the operand occurred exactly once. */
3567
3568 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3569 arg0 = new0;
3570 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3571 arg0 = new1;
3572
3573 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3574 arg1 = new0;
3575 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3576 arg1 = new1;
3577
3578 return fold_build2_loc (loc, code, type, arg0, arg1);
3579 }
3580
3581 default:
3582 return arg;
3583 }
3584 }
3585 \f
3586 /* Return a tree for the case when the result of an expression is RESULT
3587 converted to TYPE and OMITTED was previously an operand of the expression
3588 but is now not needed (e.g., we folded OMITTED * 0).
3589
3590 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3591 the conversion of RESULT to TYPE. */
3592
3593 tree
3594 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3595 {
3596 tree t = fold_convert_loc (loc, type, result);
3597
3598 /* If the resulting operand is an empty statement, just return the omitted
3599 statement casted to void. */
3600 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3601 return build1_loc (loc, NOP_EXPR, void_type_node,
3602 fold_ignored_result (omitted));
3603
3604 if (TREE_SIDE_EFFECTS (omitted))
3605 return build2_loc (loc, COMPOUND_EXPR, type,
3606 fold_ignored_result (omitted), t);
3607
3608 return non_lvalue_loc (loc, t);
3609 }
3610
3611 /* Return a tree for the case when the result of an expression is RESULT
3612 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3613 of the expression but are now not needed.
3614
3615 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3616 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3617 evaluated before OMITTED2. Otherwise, if neither has side effects,
3618 just do the conversion of RESULT to TYPE. */
3619
3620 tree
3621 omit_two_operands_loc (location_t loc, tree type, tree result,
3622 tree omitted1, tree omitted2)
3623 {
3624 tree t = fold_convert_loc (loc, type, result);
3625
3626 if (TREE_SIDE_EFFECTS (omitted2))
3627 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3628 if (TREE_SIDE_EFFECTS (omitted1))
3629 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3630
3631 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3632 }
3633
3634 \f
3635 /* Return a simplified tree node for the truth-negation of ARG. This
3636 never alters ARG itself. We assume that ARG is an operation that
3637 returns a truth value (0 or 1).
3638
3639 FIXME: one would think we would fold the result, but it causes
3640 problems with the dominator optimizer. */
3641
3642 static tree
3643 fold_truth_not_expr (location_t loc, tree arg)
3644 {
3645 tree type = TREE_TYPE (arg);
3646 enum tree_code code = TREE_CODE (arg);
3647 location_t loc1, loc2;
3648
3649 /* If this is a comparison, we can simply invert it, except for
3650 floating-point non-equality comparisons, in which case we just
3651 enclose a TRUTH_NOT_EXPR around what we have. */
3652
3653 if (TREE_CODE_CLASS (code) == tcc_comparison)
3654 {
3655 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3656 if (FLOAT_TYPE_P (op_type)
3657 && flag_trapping_math
3658 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3659 && code != NE_EXPR && code != EQ_EXPR)
3660 return NULL_TREE;
3661
3662 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3663 if (code == ERROR_MARK)
3664 return NULL_TREE;
3665
3666 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3667 TREE_OPERAND (arg, 1));
3668 if (TREE_NO_WARNING (arg))
3669 TREE_NO_WARNING (ret) = 1;
3670 return ret;
3671 }
3672
3673 switch (code)
3674 {
3675 case INTEGER_CST:
3676 return constant_boolean_node (integer_zerop (arg), type);
3677
3678 case TRUTH_AND_EXPR:
3679 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3680 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3681 return build2_loc (loc, TRUTH_OR_EXPR, type,
3682 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3683 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3684
3685 case TRUTH_OR_EXPR:
3686 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3687 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3688 return build2_loc (loc, TRUTH_AND_EXPR, type,
3689 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3690 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3691
3692 case TRUTH_XOR_EXPR:
3693 /* Here we can invert either operand. We invert the first operand
3694 unless the second operand is a TRUTH_NOT_EXPR in which case our
3695 result is the XOR of the first operand with the inside of the
3696 negation of the second operand. */
3697
3698 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3699 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3700 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3701 else
3702 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3703 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3704 TREE_OPERAND (arg, 1));
3705
3706 case TRUTH_ANDIF_EXPR:
3707 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3708 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3709 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3710 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3711 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3712
3713 case TRUTH_ORIF_EXPR:
3714 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3715 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3716 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3717 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3718 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3719
3720 case TRUTH_NOT_EXPR:
3721 return TREE_OPERAND (arg, 0);
3722
3723 case COND_EXPR:
3724 {
3725 tree arg1 = TREE_OPERAND (arg, 1);
3726 tree arg2 = TREE_OPERAND (arg, 2);
3727
3728 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3729 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3730
3731 /* A COND_EXPR may have a throw as one operand, which
3732 then has void type. Just leave void operands
3733 as they are. */
3734 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3735 VOID_TYPE_P (TREE_TYPE (arg1))
3736 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3737 VOID_TYPE_P (TREE_TYPE (arg2))
3738 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3739 }
3740
3741 case COMPOUND_EXPR:
3742 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3743 return build2_loc (loc, COMPOUND_EXPR, type,
3744 TREE_OPERAND (arg, 0),
3745 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3746
3747 case NON_LVALUE_EXPR:
3748 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3749 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3750
3751 CASE_CONVERT:
3752 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3753 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3754
3755 /* fall through */
3756
3757 case FLOAT_EXPR:
3758 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3759 return build1_loc (loc, TREE_CODE (arg), type,
3760 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3761
3762 case BIT_AND_EXPR:
3763 if (!integer_onep (TREE_OPERAND (arg, 1)))
3764 return NULL_TREE;
3765 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3766
3767 case SAVE_EXPR:
3768 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3769
3770 case CLEANUP_POINT_EXPR:
3771 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3772 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3773 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3774
3775 default:
3776 return NULL_TREE;
3777 }
3778 }
3779
3780 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3781 assume that ARG is an operation that returns a truth value (0 or 1
3782 for scalars, 0 or -1 for vectors). Return the folded expression if
3783 folding is successful. Otherwise, return NULL_TREE. */
3784
3785 static tree
3786 fold_invert_truthvalue (location_t loc, tree arg)
3787 {
3788 tree type = TREE_TYPE (arg);
3789 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3790 ? BIT_NOT_EXPR
3791 : TRUTH_NOT_EXPR,
3792 type, arg);
3793 }
3794
3795 /* Return a simplified tree node for the truth-negation of ARG. This
3796 never alters ARG itself. We assume that ARG is an operation that
3797 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3798
3799 tree
3800 invert_truthvalue_loc (location_t loc, tree arg)
3801 {
3802 if (TREE_CODE (arg) == ERROR_MARK)
3803 return arg;
3804
3805 tree type = TREE_TYPE (arg);
3806 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3807 ? BIT_NOT_EXPR
3808 : TRUTH_NOT_EXPR,
3809 type, arg);
3810 }
3811 \f
3812 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3813 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3814 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3815 is the original memory reference used to preserve the alias set of
3816 the access. */
3817
3818 static tree
3819 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3820 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3821 int unsignedp, int reversep)
3822 {
3823 tree result, bftype;
3824
3825 /* Attempt not to lose the access path if possible. */
3826 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3827 {
3828 tree ninner = TREE_OPERAND (orig_inner, 0);
3829 machine_mode nmode;
3830 HOST_WIDE_INT nbitsize, nbitpos;
3831 tree noffset;
3832 int nunsignedp, nreversep, nvolatilep = 0;
3833 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3834 &noffset, &nmode, &nunsignedp,
3835 &nreversep, &nvolatilep);
3836 if (base == inner
3837 && noffset == NULL_TREE
3838 && nbitsize >= bitsize
3839 && nbitpos <= bitpos
3840 && bitpos + bitsize <= nbitpos + nbitsize
3841 && !reversep
3842 && !nreversep
3843 && !nvolatilep)
3844 {
3845 inner = ninner;
3846 bitpos -= nbitpos;
3847 }
3848 }
3849
3850 alias_set_type iset = get_alias_set (orig_inner);
3851 if (iset == 0 && get_alias_set (inner) != iset)
3852 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3853 build_fold_addr_expr (inner),
3854 build_int_cst (ptr_type_node, 0));
3855
3856 if (bitpos == 0 && !reversep)
3857 {
3858 tree size = TYPE_SIZE (TREE_TYPE (inner));
3859 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3860 || POINTER_TYPE_P (TREE_TYPE (inner)))
3861 && tree_fits_shwi_p (size)
3862 && tree_to_shwi (size) == bitsize)
3863 return fold_convert_loc (loc, type, inner);
3864 }
3865
3866 bftype = type;
3867 if (TYPE_PRECISION (bftype) != bitsize
3868 || TYPE_UNSIGNED (bftype) == !unsignedp)
3869 bftype = build_nonstandard_integer_type (bitsize, 0);
3870
3871 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3872 bitsize_int (bitsize), bitsize_int (bitpos));
3873 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3874
3875 if (bftype != type)
3876 result = fold_convert_loc (loc, type, result);
3877
3878 return result;
3879 }
3880
3881 /* Optimize a bit-field compare.
3882
3883 There are two cases: First is a compare against a constant and the
3884 second is a comparison of two items where the fields are at the same
3885 bit position relative to the start of a chunk (byte, halfword, word)
3886 large enough to contain it. In these cases we can avoid the shift
3887 implicit in bitfield extractions.
3888
3889 For constants, we emit a compare of the shifted constant with the
3890 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3891 compared. For two fields at the same position, we do the ANDs with the
3892 similar mask and compare the result of the ANDs.
3893
3894 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3895 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3896 are the left and right operands of the comparison, respectively.
3897
3898 If the optimization described above can be done, we return the resulting
3899 tree. Otherwise we return zero. */
3900
3901 static tree
3902 optimize_bit_field_compare (location_t loc, enum tree_code code,
3903 tree compare_type, tree lhs, tree rhs)
3904 {
3905 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3906 tree type = TREE_TYPE (lhs);
3907 tree unsigned_type;
3908 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3909 machine_mode lmode, rmode;
3910 scalar_int_mode nmode;
3911 int lunsignedp, runsignedp;
3912 int lreversep, rreversep;
3913 int lvolatilep = 0, rvolatilep = 0;
3914 tree linner, rinner = NULL_TREE;
3915 tree mask;
3916 tree offset;
3917
3918 /* Get all the information about the extractions being done. If the bit size
3919 if the same as the size of the underlying object, we aren't doing an
3920 extraction at all and so can do nothing. We also don't want to
3921 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3922 then will no longer be able to replace it. */
3923 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3924 &lunsignedp, &lreversep, &lvolatilep);
3925 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3926 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3927 return 0;
3928
3929 if (const_p)
3930 rreversep = lreversep;
3931 else
3932 {
3933 /* If this is not a constant, we can only do something if bit positions,
3934 sizes, signedness and storage order are the same. */
3935 rinner
3936 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3937 &runsignedp, &rreversep, &rvolatilep);
3938
3939 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3940 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3941 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3942 return 0;
3943 }
3944
3945 /* Honor the C++ memory model and mimic what RTL expansion does. */
3946 unsigned HOST_WIDE_INT bitstart = 0;
3947 unsigned HOST_WIDE_INT bitend = 0;
3948 if (TREE_CODE (lhs) == COMPONENT_REF)
3949 {
3950 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3951 if (offset != NULL_TREE)
3952 return 0;
3953 }
3954
3955 /* See if we can find a mode to refer to this field. We should be able to,
3956 but fail if we can't. */
3957 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
3958 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3959 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3960 TYPE_ALIGN (TREE_TYPE (rinner))),
3961 BITS_PER_WORD, false, &nmode))
3962 return 0;
3963
3964 /* Set signed and unsigned types of the precision of this mode for the
3965 shifts below. */
3966 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3967
3968 /* Compute the bit position and size for the new reference and our offset
3969 within it. If the new reference is the same size as the original, we
3970 won't optimize anything, so return zero. */
3971 nbitsize = GET_MODE_BITSIZE (nmode);
3972 nbitpos = lbitpos & ~ (nbitsize - 1);
3973 lbitpos -= nbitpos;
3974 if (nbitsize == lbitsize)
3975 return 0;
3976
3977 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3978 lbitpos = nbitsize - lbitsize - lbitpos;
3979
3980 /* Make the mask to be used against the extracted field. */
3981 mask = build_int_cst_type (unsigned_type, -1);
3982 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3983 mask = const_binop (RSHIFT_EXPR, mask,
3984 size_int (nbitsize - lbitsize - lbitpos));
3985
3986 if (! const_p)
3987 {
3988 if (nbitpos < 0)
3989 return 0;
3990
3991 /* If not comparing with constant, just rework the comparison
3992 and return. */
3993 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
3994 nbitsize, nbitpos, 1, lreversep);
3995 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
3996 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
3997 nbitsize, nbitpos, 1, rreversep);
3998 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
3999 return fold_build2_loc (loc, code, compare_type, t1, t2);
4000 }
4001
4002 /* Otherwise, we are handling the constant case. See if the constant is too
4003 big for the field. Warn and return a tree for 0 (false) if so. We do
4004 this not only for its own sake, but to avoid having to test for this
4005 error case below. If we didn't, we might generate wrong code.
4006
4007 For unsigned fields, the constant shifted right by the field length should
4008 be all zero. For signed fields, the high-order bits should agree with
4009 the sign bit. */
4010
4011 if (lunsignedp)
4012 {
4013 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4014 {
4015 warning (0, "comparison is always %d due to width of bit-field",
4016 code == NE_EXPR);
4017 return constant_boolean_node (code == NE_EXPR, compare_type);
4018 }
4019 }
4020 else
4021 {
4022 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4023 if (tem != 0 && tem != -1)
4024 {
4025 warning (0, "comparison is always %d due to width of bit-field",
4026 code == NE_EXPR);
4027 return constant_boolean_node (code == NE_EXPR, compare_type);
4028 }
4029 }
4030
4031 if (nbitpos < 0)
4032 return 0;
4033
4034 /* Single-bit compares should always be against zero. */
4035 if (lbitsize == 1 && ! integer_zerop (rhs))
4036 {
4037 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4038 rhs = build_int_cst (type, 0);
4039 }
4040
4041 /* Make a new bitfield reference, shift the constant over the
4042 appropriate number of bits and mask it with the computed mask
4043 (in case this was a signed field). If we changed it, make a new one. */
4044 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4045 nbitsize, nbitpos, 1, lreversep);
4046
4047 rhs = const_binop (BIT_AND_EXPR,
4048 const_binop (LSHIFT_EXPR,
4049 fold_convert_loc (loc, unsigned_type, rhs),
4050 size_int (lbitpos)),
4051 mask);
4052
4053 lhs = build2_loc (loc, code, compare_type,
4054 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4055 return lhs;
4056 }
4057 \f
4058 /* Subroutine for fold_truth_andor_1: decode a field reference.
4059
4060 If EXP is a comparison reference, we return the innermost reference.
4061
4062 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4063 set to the starting bit number.
4064
4065 If the innermost field can be completely contained in a mode-sized
4066 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4067
4068 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4069 otherwise it is not changed.
4070
4071 *PUNSIGNEDP is set to the signedness of the field.
4072
4073 *PREVERSEP is set to the storage order of the field.
4074
4075 *PMASK is set to the mask used. This is either contained in a
4076 BIT_AND_EXPR or derived from the width of the field.
4077
4078 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4079
4080 Return 0 if this is not a component reference or is one that we can't
4081 do anything with. */
4082
4083 static tree
4084 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4085 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4086 int *punsignedp, int *preversep, int *pvolatilep,
4087 tree *pmask, tree *pand_mask)
4088 {
4089 tree exp = *exp_;
4090 tree outer_type = 0;
4091 tree and_mask = 0;
4092 tree mask, inner, offset;
4093 tree unsigned_type;
4094 unsigned int precision;
4095
4096 /* All the optimizations using this function assume integer fields.
4097 There are problems with FP fields since the type_for_size call
4098 below can fail for, e.g., XFmode. */
4099 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4100 return 0;
4101
4102 /* We are interested in the bare arrangement of bits, so strip everything
4103 that doesn't affect the machine mode. However, record the type of the
4104 outermost expression if it may matter below. */
4105 if (CONVERT_EXPR_P (exp)
4106 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4107 outer_type = TREE_TYPE (exp);
4108 STRIP_NOPS (exp);
4109
4110 if (TREE_CODE (exp) == BIT_AND_EXPR)
4111 {
4112 and_mask = TREE_OPERAND (exp, 1);
4113 exp = TREE_OPERAND (exp, 0);
4114 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4115 if (TREE_CODE (and_mask) != INTEGER_CST)
4116 return 0;
4117 }
4118
4119 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4120 punsignedp, preversep, pvolatilep);
4121 if ((inner == exp && and_mask == 0)
4122 || *pbitsize < 0 || offset != 0
4123 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4124 /* Reject out-of-bound accesses (PR79731). */
4125 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4126 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4127 *pbitpos + *pbitsize) < 0))
4128 return 0;
4129
4130 *exp_ = exp;
4131
4132 /* If the number of bits in the reference is the same as the bitsize of
4133 the outer type, then the outer type gives the signedness. Otherwise
4134 (in case of a small bitfield) the signedness is unchanged. */
4135 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4136 *punsignedp = TYPE_UNSIGNED (outer_type);
4137
4138 /* Compute the mask to access the bitfield. */
4139 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4140 precision = TYPE_PRECISION (unsigned_type);
4141
4142 mask = build_int_cst_type (unsigned_type, -1);
4143
4144 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4145 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4146
4147 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4148 if (and_mask != 0)
4149 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4150 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4151
4152 *pmask = mask;
4153 *pand_mask = and_mask;
4154 return inner;
4155 }
4156
4157 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4158 bit positions and MASK is SIGNED. */
4159
4160 static int
4161 all_ones_mask_p (const_tree mask, unsigned int size)
4162 {
4163 tree type = TREE_TYPE (mask);
4164 unsigned int precision = TYPE_PRECISION (type);
4165
4166 /* If this function returns true when the type of the mask is
4167 UNSIGNED, then there will be errors. In particular see
4168 gcc.c-torture/execute/990326-1.c. There does not appear to be
4169 any documentation paper trail as to why this is so. But the pre
4170 wide-int worked with that restriction and it has been preserved
4171 here. */
4172 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4173 return false;
4174
4175 return wi::mask (size, false, precision) == wi::to_wide (mask);
4176 }
4177
4178 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4179 represents the sign bit of EXP's type. If EXP represents a sign
4180 or zero extension, also test VAL against the unextended type.
4181 The return value is the (sub)expression whose sign bit is VAL,
4182 or NULL_TREE otherwise. */
4183
4184 tree
4185 sign_bit_p (tree exp, const_tree val)
4186 {
4187 int width;
4188 tree t;
4189
4190 /* Tree EXP must have an integral type. */
4191 t = TREE_TYPE (exp);
4192 if (! INTEGRAL_TYPE_P (t))
4193 return NULL_TREE;
4194
4195 /* Tree VAL must be an integer constant. */
4196 if (TREE_CODE (val) != INTEGER_CST
4197 || TREE_OVERFLOW (val))
4198 return NULL_TREE;
4199
4200 width = TYPE_PRECISION (t);
4201 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4202 return exp;
4203
4204 /* Handle extension from a narrower type. */
4205 if (TREE_CODE (exp) == NOP_EXPR
4206 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4207 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4208
4209 return NULL_TREE;
4210 }
4211
4212 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4213 to be evaluated unconditionally. */
4214
4215 static int
4216 simple_operand_p (const_tree exp)
4217 {
4218 /* Strip any conversions that don't change the machine mode. */
4219 STRIP_NOPS (exp);
4220
4221 return (CONSTANT_CLASS_P (exp)
4222 || TREE_CODE (exp) == SSA_NAME
4223 || (DECL_P (exp)
4224 && ! TREE_ADDRESSABLE (exp)
4225 && ! TREE_THIS_VOLATILE (exp)
4226 && ! DECL_NONLOCAL (exp)
4227 /* Don't regard global variables as simple. They may be
4228 allocated in ways unknown to the compiler (shared memory,
4229 #pragma weak, etc). */
4230 && ! TREE_PUBLIC (exp)
4231 && ! DECL_EXTERNAL (exp)
4232 /* Weakrefs are not safe to be read, since they can be NULL.
4233 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4234 have DECL_WEAK flag set. */
4235 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4236 /* Loading a static variable is unduly expensive, but global
4237 registers aren't expensive. */
4238 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4239 }
4240
4241 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4242 to be evaluated unconditionally.
4243 I addition to simple_operand_p, we assume that comparisons, conversions,
4244 and logic-not operations are simple, if their operands are simple, too. */
4245
4246 static bool
4247 simple_operand_p_2 (tree exp)
4248 {
4249 enum tree_code code;
4250
4251 if (TREE_SIDE_EFFECTS (exp)
4252 || tree_could_trap_p (exp))
4253 return false;
4254
4255 while (CONVERT_EXPR_P (exp))
4256 exp = TREE_OPERAND (exp, 0);
4257
4258 code = TREE_CODE (exp);
4259
4260 if (TREE_CODE_CLASS (code) == tcc_comparison)
4261 return (simple_operand_p (TREE_OPERAND (exp, 0))
4262 && simple_operand_p (TREE_OPERAND (exp, 1)));
4263
4264 if (code == TRUTH_NOT_EXPR)
4265 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4266
4267 return simple_operand_p (exp);
4268 }
4269
4270 \f
4271 /* The following functions are subroutines to fold_range_test and allow it to
4272 try to change a logical combination of comparisons into a range test.
4273
4274 For example, both
4275 X == 2 || X == 3 || X == 4 || X == 5
4276 and
4277 X >= 2 && X <= 5
4278 are converted to
4279 (unsigned) (X - 2) <= 3
4280
4281 We describe each set of comparisons as being either inside or outside
4282 a range, using a variable named like IN_P, and then describe the
4283 range with a lower and upper bound. If one of the bounds is omitted,
4284 it represents either the highest or lowest value of the type.
4285
4286 In the comments below, we represent a range by two numbers in brackets
4287 preceded by a "+" to designate being inside that range, or a "-" to
4288 designate being outside that range, so the condition can be inverted by
4289 flipping the prefix. An omitted bound is represented by a "-". For
4290 example, "- [-, 10]" means being outside the range starting at the lowest
4291 possible value and ending at 10, in other words, being greater than 10.
4292 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4293 always false.
4294
4295 We set up things so that the missing bounds are handled in a consistent
4296 manner so neither a missing bound nor "true" and "false" need to be
4297 handled using a special case. */
4298
4299 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4300 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4301 and UPPER1_P are nonzero if the respective argument is an upper bound
4302 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4303 must be specified for a comparison. ARG1 will be converted to ARG0's
4304 type if both are specified. */
4305
4306 static tree
4307 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4308 tree arg1, int upper1_p)
4309 {
4310 tree tem;
4311 int result;
4312 int sgn0, sgn1;
4313
4314 /* If neither arg represents infinity, do the normal operation.
4315 Else, if not a comparison, return infinity. Else handle the special
4316 comparison rules. Note that most of the cases below won't occur, but
4317 are handled for consistency. */
4318
4319 if (arg0 != 0 && arg1 != 0)
4320 {
4321 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4322 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4323 STRIP_NOPS (tem);
4324 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4325 }
4326
4327 if (TREE_CODE_CLASS (code) != tcc_comparison)
4328 return 0;
4329
4330 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4331 for neither. In real maths, we cannot assume open ended ranges are
4332 the same. But, this is computer arithmetic, where numbers are finite.
4333 We can therefore make the transformation of any unbounded range with
4334 the value Z, Z being greater than any representable number. This permits
4335 us to treat unbounded ranges as equal. */
4336 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4337 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4338 switch (code)
4339 {
4340 case EQ_EXPR:
4341 result = sgn0 == sgn1;
4342 break;
4343 case NE_EXPR:
4344 result = sgn0 != sgn1;
4345 break;
4346 case LT_EXPR:
4347 result = sgn0 < sgn1;
4348 break;
4349 case LE_EXPR:
4350 result = sgn0 <= sgn1;
4351 break;
4352 case GT_EXPR:
4353 result = sgn0 > sgn1;
4354 break;
4355 case GE_EXPR:
4356 result = sgn0 >= sgn1;
4357 break;
4358 default:
4359 gcc_unreachable ();
4360 }
4361
4362 return constant_boolean_node (result, type);
4363 }
4364 \f
4365 /* Helper routine for make_range. Perform one step for it, return
4366 new expression if the loop should continue or NULL_TREE if it should
4367 stop. */
4368
4369 tree
4370 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4371 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4372 bool *strict_overflow_p)
4373 {
4374 tree arg0_type = TREE_TYPE (arg0);
4375 tree n_low, n_high, low = *p_low, high = *p_high;
4376 int in_p = *p_in_p, n_in_p;
4377
4378 switch (code)
4379 {
4380 case TRUTH_NOT_EXPR:
4381 /* We can only do something if the range is testing for zero. */
4382 if (low == NULL_TREE || high == NULL_TREE
4383 || ! integer_zerop (low) || ! integer_zerop (high))
4384 return NULL_TREE;
4385 *p_in_p = ! in_p;
4386 return arg0;
4387
4388 case EQ_EXPR: case NE_EXPR:
4389 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4390 /* We can only do something if the range is testing for zero
4391 and if the second operand is an integer constant. Note that
4392 saying something is "in" the range we make is done by
4393 complementing IN_P since it will set in the initial case of
4394 being not equal to zero; "out" is leaving it alone. */
4395 if (low == NULL_TREE || high == NULL_TREE
4396 || ! integer_zerop (low) || ! integer_zerop (high)
4397 || TREE_CODE (arg1) != INTEGER_CST)
4398 return NULL_TREE;
4399
4400 switch (code)
4401 {
4402 case NE_EXPR: /* - [c, c] */
4403 low = high = arg1;
4404 break;
4405 case EQ_EXPR: /* + [c, c] */
4406 in_p = ! in_p, low = high = arg1;
4407 break;
4408 case GT_EXPR: /* - [-, c] */
4409 low = 0, high = arg1;
4410 break;
4411 case GE_EXPR: /* + [c, -] */
4412 in_p = ! in_p, low = arg1, high = 0;
4413 break;
4414 case LT_EXPR: /* - [c, -] */
4415 low = arg1, high = 0;
4416 break;
4417 case LE_EXPR: /* + [-, c] */
4418 in_p = ! in_p, low = 0, high = arg1;
4419 break;
4420 default:
4421 gcc_unreachable ();
4422 }
4423
4424 /* If this is an unsigned comparison, we also know that EXP is
4425 greater than or equal to zero. We base the range tests we make
4426 on that fact, so we record it here so we can parse existing
4427 range tests. We test arg0_type since often the return type
4428 of, e.g. EQ_EXPR, is boolean. */
4429 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4430 {
4431 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4432 in_p, low, high, 1,
4433 build_int_cst (arg0_type, 0),
4434 NULL_TREE))
4435 return NULL_TREE;
4436
4437 in_p = n_in_p, low = n_low, high = n_high;
4438
4439 /* If the high bound is missing, but we have a nonzero low
4440 bound, reverse the range so it goes from zero to the low bound
4441 minus 1. */
4442 if (high == 0 && low && ! integer_zerop (low))
4443 {
4444 in_p = ! in_p;
4445 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4446 build_int_cst (TREE_TYPE (low), 1), 0);
4447 low = build_int_cst (arg0_type, 0);
4448 }
4449 }
4450
4451 *p_low = low;
4452 *p_high = high;
4453 *p_in_p = in_p;
4454 return arg0;
4455
4456 case NEGATE_EXPR:
4457 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4458 low and high are non-NULL, then normalize will DTRT. */
4459 if (!TYPE_UNSIGNED (arg0_type)
4460 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4461 {
4462 if (low == NULL_TREE)
4463 low = TYPE_MIN_VALUE (arg0_type);
4464 if (high == NULL_TREE)
4465 high = TYPE_MAX_VALUE (arg0_type);
4466 }
4467
4468 /* (-x) IN [a,b] -> x in [-b, -a] */
4469 n_low = range_binop (MINUS_EXPR, exp_type,
4470 build_int_cst (exp_type, 0),
4471 0, high, 1);
4472 n_high = range_binop (MINUS_EXPR, exp_type,
4473 build_int_cst (exp_type, 0),
4474 0, low, 0);
4475 if (n_high != 0 && TREE_OVERFLOW (n_high))
4476 return NULL_TREE;
4477 goto normalize;
4478
4479 case BIT_NOT_EXPR:
4480 /* ~ X -> -X - 1 */
4481 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4482 build_int_cst (exp_type, 1));
4483
4484 case PLUS_EXPR:
4485 case MINUS_EXPR:
4486 if (TREE_CODE (arg1) != INTEGER_CST)
4487 return NULL_TREE;
4488
4489 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4490 move a constant to the other side. */
4491 if (!TYPE_UNSIGNED (arg0_type)
4492 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4493 return NULL_TREE;
4494
4495 /* If EXP is signed, any overflow in the computation is undefined,
4496 so we don't worry about it so long as our computations on
4497 the bounds don't overflow. For unsigned, overflow is defined
4498 and this is exactly the right thing. */
4499 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4500 arg0_type, low, 0, arg1, 0);
4501 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4502 arg0_type, high, 1, arg1, 0);
4503 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4504 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4505 return NULL_TREE;
4506
4507 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4508 *strict_overflow_p = true;
4509
4510 normalize:
4511 /* Check for an unsigned range which has wrapped around the maximum
4512 value thus making n_high < n_low, and normalize it. */
4513 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4514 {
4515 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4516 build_int_cst (TREE_TYPE (n_high), 1), 0);
4517 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4518 build_int_cst (TREE_TYPE (n_low), 1), 0);
4519
4520 /* If the range is of the form +/- [ x+1, x ], we won't
4521 be able to normalize it. But then, it represents the
4522 whole range or the empty set, so make it
4523 +/- [ -, - ]. */
4524 if (tree_int_cst_equal (n_low, low)
4525 && tree_int_cst_equal (n_high, high))
4526 low = high = 0;
4527 else
4528 in_p = ! in_p;
4529 }
4530 else
4531 low = n_low, high = n_high;
4532
4533 *p_low = low;
4534 *p_high = high;
4535 *p_in_p = in_p;
4536 return arg0;
4537
4538 CASE_CONVERT:
4539 case NON_LVALUE_EXPR:
4540 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4541 return NULL_TREE;
4542
4543 if (! INTEGRAL_TYPE_P (arg0_type)
4544 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4545 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4546 return NULL_TREE;
4547
4548 n_low = low, n_high = high;
4549
4550 if (n_low != 0)
4551 n_low = fold_convert_loc (loc, arg0_type, n_low);
4552
4553 if (n_high != 0)
4554 n_high = fold_convert_loc (loc, arg0_type, n_high);
4555
4556 /* If we're converting arg0 from an unsigned type, to exp,
4557 a signed type, we will be doing the comparison as unsigned.
4558 The tests above have already verified that LOW and HIGH
4559 are both positive.
4560
4561 So we have to ensure that we will handle large unsigned
4562 values the same way that the current signed bounds treat
4563 negative values. */
4564
4565 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4566 {
4567 tree high_positive;
4568 tree equiv_type;
4569 /* For fixed-point modes, we need to pass the saturating flag
4570 as the 2nd parameter. */
4571 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4572 equiv_type
4573 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4574 TYPE_SATURATING (arg0_type));
4575 else
4576 equiv_type
4577 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4578
4579 /* A range without an upper bound is, naturally, unbounded.
4580 Since convert would have cropped a very large value, use
4581 the max value for the destination type. */
4582 high_positive
4583 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4584 : TYPE_MAX_VALUE (arg0_type);
4585
4586 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4587 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4588 fold_convert_loc (loc, arg0_type,
4589 high_positive),
4590 build_int_cst (arg0_type, 1));
4591
4592 /* If the low bound is specified, "and" the range with the
4593 range for which the original unsigned value will be
4594 positive. */
4595 if (low != 0)
4596 {
4597 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4598 1, fold_convert_loc (loc, arg0_type,
4599 integer_zero_node),
4600 high_positive))
4601 return NULL_TREE;
4602
4603 in_p = (n_in_p == in_p);
4604 }
4605 else
4606 {
4607 /* Otherwise, "or" the range with the range of the input
4608 that will be interpreted as negative. */
4609 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4610 1, fold_convert_loc (loc, arg0_type,
4611 integer_zero_node),
4612 high_positive))
4613 return NULL_TREE;
4614
4615 in_p = (in_p != n_in_p);
4616 }
4617 }
4618
4619 *p_low = n_low;
4620 *p_high = n_high;
4621 *p_in_p = in_p;
4622 return arg0;
4623
4624 default:
4625 return NULL_TREE;
4626 }
4627 }
4628
4629 /* Given EXP, a logical expression, set the range it is testing into
4630 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4631 actually being tested. *PLOW and *PHIGH will be made of the same
4632 type as the returned expression. If EXP is not a comparison, we
4633 will most likely not be returning a useful value and range. Set
4634 *STRICT_OVERFLOW_P to true if the return value is only valid
4635 because signed overflow is undefined; otherwise, do not change
4636 *STRICT_OVERFLOW_P. */
4637
4638 tree
4639 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4640 bool *strict_overflow_p)
4641 {
4642 enum tree_code code;
4643 tree arg0, arg1 = NULL_TREE;
4644 tree exp_type, nexp;
4645 int in_p;
4646 tree low, high;
4647 location_t loc = EXPR_LOCATION (exp);
4648
4649 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4650 and see if we can refine the range. Some of the cases below may not
4651 happen, but it doesn't seem worth worrying about this. We "continue"
4652 the outer loop when we've changed something; otherwise we "break"
4653 the switch, which will "break" the while. */
4654
4655 in_p = 0;
4656 low = high = build_int_cst (TREE_TYPE (exp), 0);
4657
4658 while (1)
4659 {
4660 code = TREE_CODE (exp);
4661 exp_type = TREE_TYPE (exp);
4662 arg0 = NULL_TREE;
4663
4664 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4665 {
4666 if (TREE_OPERAND_LENGTH (exp) > 0)
4667 arg0 = TREE_OPERAND (exp, 0);
4668 if (TREE_CODE_CLASS (code) == tcc_binary
4669 || TREE_CODE_CLASS (code) == tcc_comparison
4670 || (TREE_CODE_CLASS (code) == tcc_expression
4671 && TREE_OPERAND_LENGTH (exp) > 1))
4672 arg1 = TREE_OPERAND (exp, 1);
4673 }
4674 if (arg0 == NULL_TREE)
4675 break;
4676
4677 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4678 &high, &in_p, strict_overflow_p);
4679 if (nexp == NULL_TREE)
4680 break;
4681 exp = nexp;
4682 }
4683
4684 /* If EXP is a constant, we can evaluate whether this is true or false. */
4685 if (TREE_CODE (exp) == INTEGER_CST)
4686 {
4687 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4688 exp, 0, low, 0))
4689 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4690 exp, 1, high, 1)));
4691 low = high = 0;
4692 exp = 0;
4693 }
4694
4695 *pin_p = in_p, *plow = low, *phigh = high;
4696 return exp;
4697 }
4698
4699 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4700 a bitwise check i.e. when
4701 LOW == 0xXX...X00...0
4702 HIGH == 0xXX...X11...1
4703 Return corresponding mask in MASK and stem in VALUE. */
4704
4705 static bool
4706 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4707 tree *value)
4708 {
4709 if (TREE_CODE (low) != INTEGER_CST
4710 || TREE_CODE (high) != INTEGER_CST)
4711 return false;
4712
4713 unsigned prec = TYPE_PRECISION (type);
4714 wide_int lo = wi::to_wide (low, prec);
4715 wide_int hi = wi::to_wide (high, prec);
4716
4717 wide_int end_mask = lo ^ hi;
4718 if ((end_mask & (end_mask + 1)) != 0
4719 || (lo & end_mask) != 0)
4720 return false;
4721
4722 wide_int stem_mask = ~end_mask;
4723 wide_int stem = lo & stem_mask;
4724 if (stem != (hi & stem_mask))
4725 return false;
4726
4727 *mask = wide_int_to_tree (type, stem_mask);
4728 *value = wide_int_to_tree (type, stem);
4729
4730 return true;
4731 }
4732 \f
4733 /* Helper routine for build_range_check and match.pd. Return the type to
4734 perform the check or NULL if it shouldn't be optimized. */
4735
4736 tree
4737 range_check_type (tree etype)
4738 {
4739 /* First make sure that arithmetics in this type is valid, then make sure
4740 that it wraps around. */
4741 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4742 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4743 TYPE_UNSIGNED (etype));
4744
4745 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4746 {
4747 tree utype, minv, maxv;
4748
4749 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4750 for the type in question, as we rely on this here. */
4751 utype = unsigned_type_for (etype);
4752 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4753 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4754 build_int_cst (TREE_TYPE (maxv), 1), 1);
4755 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4756
4757 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4758 minv, 1, maxv, 1)))
4759 etype = utype;
4760 else
4761 return NULL_TREE;
4762 }
4763 return etype;
4764 }
4765
4766 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4767 type, TYPE, return an expression to test if EXP is in (or out of, depending
4768 on IN_P) the range. Return 0 if the test couldn't be created. */
4769
4770 tree
4771 build_range_check (location_t loc, tree type, tree exp, int in_p,
4772 tree low, tree high)
4773 {
4774 tree etype = TREE_TYPE (exp), mask, value;
4775
4776 /* Disable this optimization for function pointer expressions
4777 on targets that require function pointer canonicalization. */
4778 if (targetm.have_canonicalize_funcptr_for_compare ()
4779 && TREE_CODE (etype) == POINTER_TYPE
4780 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4781 return NULL_TREE;
4782
4783 if (! in_p)
4784 {
4785 value = build_range_check (loc, type, exp, 1, low, high);
4786 if (value != 0)
4787 return invert_truthvalue_loc (loc, value);
4788
4789 return 0;
4790 }
4791
4792 if (low == 0 && high == 0)
4793 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4794
4795 if (low == 0)
4796 return fold_build2_loc (loc, LE_EXPR, type, exp,
4797 fold_convert_loc (loc, etype, high));
4798
4799 if (high == 0)
4800 return fold_build2_loc (loc, GE_EXPR, type, exp,
4801 fold_convert_loc (loc, etype, low));
4802
4803 if (operand_equal_p (low, high, 0))
4804 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4805 fold_convert_loc (loc, etype, low));
4806
4807 if (TREE_CODE (exp) == BIT_AND_EXPR
4808 && maskable_range_p (low, high, etype, &mask, &value))
4809 return fold_build2_loc (loc, EQ_EXPR, type,
4810 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4811 exp, mask),
4812 value);
4813
4814 if (integer_zerop (low))
4815 {
4816 if (! TYPE_UNSIGNED (etype))
4817 {
4818 etype = unsigned_type_for (etype);
4819 high = fold_convert_loc (loc, etype, high);
4820 exp = fold_convert_loc (loc, etype, exp);
4821 }
4822 return build_range_check (loc, type, exp, 1, 0, high);
4823 }
4824
4825 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4826 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4827 {
4828 int prec = TYPE_PRECISION (etype);
4829
4830 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4831 {
4832 if (TYPE_UNSIGNED (etype))
4833 {
4834 tree signed_etype = signed_type_for (etype);
4835 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4836 etype
4837 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4838 else
4839 etype = signed_etype;
4840 exp = fold_convert_loc (loc, etype, exp);
4841 }
4842 return fold_build2_loc (loc, GT_EXPR, type, exp,
4843 build_int_cst (etype, 0));
4844 }
4845 }
4846
4847 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4848 This requires wrap-around arithmetics for the type of the expression. */
4849 etype = range_check_type (etype);
4850 if (etype == NULL_TREE)
4851 return NULL_TREE;
4852
4853 if (POINTER_TYPE_P (etype))
4854 etype = unsigned_type_for (etype);
4855
4856 high = fold_convert_loc (loc, etype, high);
4857 low = fold_convert_loc (loc, etype, low);
4858 exp = fold_convert_loc (loc, etype, exp);
4859
4860 value = const_binop (MINUS_EXPR, high, low);
4861
4862 if (value != 0 && !TREE_OVERFLOW (value))
4863 return build_range_check (loc, type,
4864 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4865 1, build_int_cst (etype, 0), value);
4866
4867 return 0;
4868 }
4869 \f
4870 /* Return the predecessor of VAL in its type, handling the infinite case. */
4871
4872 static tree
4873 range_predecessor (tree val)
4874 {
4875 tree type = TREE_TYPE (val);
4876
4877 if (INTEGRAL_TYPE_P (type)
4878 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4879 return 0;
4880 else
4881 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4882 build_int_cst (TREE_TYPE (val), 1), 0);
4883 }
4884
4885 /* Return the successor of VAL in its type, handling the infinite case. */
4886
4887 static tree
4888 range_successor (tree val)
4889 {
4890 tree type = TREE_TYPE (val);
4891
4892 if (INTEGRAL_TYPE_P (type)
4893 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4894 return 0;
4895 else
4896 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4897 build_int_cst (TREE_TYPE (val), 1), 0);
4898 }
4899
4900 /* Given two ranges, see if we can merge them into one. Return 1 if we
4901 can, 0 if we can't. Set the output range into the specified parameters. */
4902
4903 bool
4904 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4905 tree high0, int in1_p, tree low1, tree high1)
4906 {
4907 int no_overlap;
4908 int subset;
4909 int temp;
4910 tree tem;
4911 int in_p;
4912 tree low, high;
4913 int lowequal = ((low0 == 0 && low1 == 0)
4914 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4915 low0, 0, low1, 0)));
4916 int highequal = ((high0 == 0 && high1 == 0)
4917 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4918 high0, 1, high1, 1)));
4919
4920 /* Make range 0 be the range that starts first, or ends last if they
4921 start at the same value. Swap them if it isn't. */
4922 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4923 low0, 0, low1, 0))
4924 || (lowequal
4925 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4926 high1, 1, high0, 1))))
4927 {
4928 temp = in0_p, in0_p = in1_p, in1_p = temp;
4929 tem = low0, low0 = low1, low1 = tem;
4930 tem = high0, high0 = high1, high1 = tem;
4931 }
4932
4933 /* Now flag two cases, whether the ranges are disjoint or whether the
4934 second range is totally subsumed in the first. Note that the tests
4935 below are simplified by the ones above. */
4936 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4937 high0, 1, low1, 0));
4938 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4939 high1, 1, high0, 1));
4940
4941 /* We now have four cases, depending on whether we are including or
4942 excluding the two ranges. */
4943 if (in0_p && in1_p)
4944 {
4945 /* If they don't overlap, the result is false. If the second range
4946 is a subset it is the result. Otherwise, the range is from the start
4947 of the second to the end of the first. */
4948 if (no_overlap)
4949 in_p = 0, low = high = 0;
4950 else if (subset)
4951 in_p = 1, low = low1, high = high1;
4952 else
4953 in_p = 1, low = low1, high = high0;
4954 }
4955
4956 else if (in0_p && ! in1_p)
4957 {
4958 /* If they don't overlap, the result is the first range. If they are
4959 equal, the result is false. If the second range is a subset of the
4960 first, and the ranges begin at the same place, we go from just after
4961 the end of the second range to the end of the first. If the second
4962 range is not a subset of the first, or if it is a subset and both
4963 ranges end at the same place, the range starts at the start of the
4964 first range and ends just before the second range.
4965 Otherwise, we can't describe this as a single range. */
4966 if (no_overlap)
4967 in_p = 1, low = low0, high = high0;
4968 else if (lowequal && highequal)
4969 in_p = 0, low = high = 0;
4970 else if (subset && lowequal)
4971 {
4972 low = range_successor (high1);
4973 high = high0;
4974 in_p = 1;
4975 if (low == 0)
4976 {
4977 /* We are in the weird situation where high0 > high1 but
4978 high1 has no successor. Punt. */
4979 return 0;
4980 }
4981 }
4982 else if (! subset || highequal)
4983 {
4984 low = low0;
4985 high = range_predecessor (low1);
4986 in_p = 1;
4987 if (high == 0)
4988 {
4989 /* low0 < low1 but low1 has no predecessor. Punt. */
4990 return 0;
4991 }
4992 }
4993 else
4994 return 0;
4995 }
4996
4997 else if (! in0_p && in1_p)
4998 {
4999 /* If they don't overlap, the result is the second range. If the second
5000 is a subset of the first, the result is false. Otherwise,
5001 the range starts just after the first range and ends at the
5002 end of the second. */
5003 if (no_overlap)
5004 in_p = 1, low = low1, high = high1;
5005 else if (subset || highequal)
5006 in_p = 0, low = high = 0;
5007 else
5008 {
5009 low = range_successor (high0);
5010 high = high1;
5011 in_p = 1;
5012 if (low == 0)
5013 {
5014 /* high1 > high0 but high0 has no successor. Punt. */
5015 return 0;
5016 }
5017 }
5018 }
5019
5020 else
5021 {
5022 /* The case where we are excluding both ranges. Here the complex case
5023 is if they don't overlap. In that case, the only time we have a
5024 range is if they are adjacent. If the second is a subset of the
5025 first, the result is the first. Otherwise, the range to exclude
5026 starts at the beginning of the first range and ends at the end of the
5027 second. */
5028 if (no_overlap)
5029 {
5030 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5031 range_successor (high0),
5032 1, low1, 0)))
5033 in_p = 0, low = low0, high = high1;
5034 else
5035 {
5036 /* Canonicalize - [min, x] into - [-, x]. */
5037 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5038 switch (TREE_CODE (TREE_TYPE (low0)))
5039 {
5040 case ENUMERAL_TYPE:
5041 if (TYPE_PRECISION (TREE_TYPE (low0))
5042 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5043 break;
5044 /* FALLTHROUGH */
5045 case INTEGER_TYPE:
5046 if (tree_int_cst_equal (low0,
5047 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5048 low0 = 0;
5049 break;
5050 case POINTER_TYPE:
5051 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5052 && integer_zerop (low0))
5053 low0 = 0;
5054 break;
5055 default:
5056 break;
5057 }
5058
5059 /* Canonicalize - [x, max] into - [x, -]. */
5060 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5061 switch (TREE_CODE (TREE_TYPE (high1)))
5062 {
5063 case ENUMERAL_TYPE:
5064 if (TYPE_PRECISION (TREE_TYPE (high1))
5065 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5066 break;
5067 /* FALLTHROUGH */
5068 case INTEGER_TYPE:
5069 if (tree_int_cst_equal (high1,
5070 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5071 high1 = 0;
5072 break;
5073 case POINTER_TYPE:
5074 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5075 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5076 high1, 1,
5077 build_int_cst (TREE_TYPE (high1), 1),
5078 1)))
5079 high1 = 0;
5080 break;
5081 default:
5082 break;
5083 }
5084
5085 /* The ranges might be also adjacent between the maximum and
5086 minimum values of the given type. For
5087 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5088 return + [x + 1, y - 1]. */
5089 if (low0 == 0 && high1 == 0)
5090 {
5091 low = range_successor (high0);
5092 high = range_predecessor (low1);
5093 if (low == 0 || high == 0)
5094 return 0;
5095
5096 in_p = 1;
5097 }
5098 else
5099 return 0;
5100 }
5101 }
5102 else if (subset)
5103 in_p = 0, low = low0, high = high0;
5104 else
5105 in_p = 0, low = low0, high = high1;
5106 }
5107
5108 *pin_p = in_p, *plow = low, *phigh = high;
5109 return 1;
5110 }
5111 \f
5112
5113 /* Subroutine of fold, looking inside expressions of the form
5114 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5115 of the COND_EXPR. This function is being used also to optimize
5116 A op B ? C : A, by reversing the comparison first.
5117
5118 Return a folded expression whose code is not a COND_EXPR
5119 anymore, or NULL_TREE if no folding opportunity is found. */
5120
5121 static tree
5122 fold_cond_expr_with_comparison (location_t loc, tree type,
5123 tree arg0, tree arg1, tree arg2)
5124 {
5125 enum tree_code comp_code = TREE_CODE (arg0);
5126 tree arg00 = TREE_OPERAND (arg0, 0);
5127 tree arg01 = TREE_OPERAND (arg0, 1);
5128 tree arg1_type = TREE_TYPE (arg1);
5129 tree tem;
5130
5131 STRIP_NOPS (arg1);
5132 STRIP_NOPS (arg2);
5133
5134 /* If we have A op 0 ? A : -A, consider applying the following
5135 transformations:
5136
5137 A == 0? A : -A same as -A
5138 A != 0? A : -A same as A
5139 A >= 0? A : -A same as abs (A)
5140 A > 0? A : -A same as abs (A)
5141 A <= 0? A : -A same as -abs (A)
5142 A < 0? A : -A same as -abs (A)
5143
5144 None of these transformations work for modes with signed
5145 zeros. If A is +/-0, the first two transformations will
5146 change the sign of the result (from +0 to -0, or vice
5147 versa). The last four will fix the sign of the result,
5148 even though the original expressions could be positive or
5149 negative, depending on the sign of A.
5150
5151 Note that all these transformations are correct if A is
5152 NaN, since the two alternatives (A and -A) are also NaNs. */
5153 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5154 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5155 ? real_zerop (arg01)
5156 : integer_zerop (arg01))
5157 && ((TREE_CODE (arg2) == NEGATE_EXPR
5158 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5159 /* In the case that A is of the form X-Y, '-A' (arg2) may
5160 have already been folded to Y-X, check for that. */
5161 || (TREE_CODE (arg1) == MINUS_EXPR
5162 && TREE_CODE (arg2) == MINUS_EXPR
5163 && operand_equal_p (TREE_OPERAND (arg1, 0),
5164 TREE_OPERAND (arg2, 1), 0)
5165 && operand_equal_p (TREE_OPERAND (arg1, 1),
5166 TREE_OPERAND (arg2, 0), 0))))
5167 switch (comp_code)
5168 {
5169 case EQ_EXPR:
5170 case UNEQ_EXPR:
5171 tem = fold_convert_loc (loc, arg1_type, arg1);
5172 return fold_convert_loc (loc, type, negate_expr (tem));
5173 case NE_EXPR:
5174 case LTGT_EXPR:
5175 return fold_convert_loc (loc, type, arg1);
5176 case UNGE_EXPR:
5177 case UNGT_EXPR:
5178 if (flag_trapping_math)
5179 break;
5180 /* Fall through. */
5181 case GE_EXPR:
5182 case GT_EXPR:
5183 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5184 break;
5185 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5186 return fold_convert_loc (loc, type, tem);
5187 case UNLE_EXPR:
5188 case UNLT_EXPR:
5189 if (flag_trapping_math)
5190 break;
5191 /* FALLTHRU */
5192 case LE_EXPR:
5193 case LT_EXPR:
5194 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5195 break;
5196 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5197 return negate_expr (fold_convert_loc (loc, type, tem));
5198 default:
5199 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5200 break;
5201 }
5202
5203 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5204 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5205 both transformations are correct when A is NaN: A != 0
5206 is then true, and A == 0 is false. */
5207
5208 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5209 && integer_zerop (arg01) && integer_zerop (arg2))
5210 {
5211 if (comp_code == NE_EXPR)
5212 return fold_convert_loc (loc, type, arg1);
5213 else if (comp_code == EQ_EXPR)
5214 return build_zero_cst (type);
5215 }
5216
5217 /* Try some transformations of A op B ? A : B.
5218
5219 A == B? A : B same as B
5220 A != B? A : B same as A
5221 A >= B? A : B same as max (A, B)
5222 A > B? A : B same as max (B, A)
5223 A <= B? A : B same as min (A, B)
5224 A < B? A : B same as min (B, A)
5225
5226 As above, these transformations don't work in the presence
5227 of signed zeros. For example, if A and B are zeros of
5228 opposite sign, the first two transformations will change
5229 the sign of the result. In the last four, the original
5230 expressions give different results for (A=+0, B=-0) and
5231 (A=-0, B=+0), but the transformed expressions do not.
5232
5233 The first two transformations are correct if either A or B
5234 is a NaN. In the first transformation, the condition will
5235 be false, and B will indeed be chosen. In the case of the
5236 second transformation, the condition A != B will be true,
5237 and A will be chosen.
5238
5239 The conversions to max() and min() are not correct if B is
5240 a number and A is not. The conditions in the original
5241 expressions will be false, so all four give B. The min()
5242 and max() versions would give a NaN instead. */
5243 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5244 && operand_equal_for_comparison_p (arg01, arg2)
5245 /* Avoid these transformations if the COND_EXPR may be used
5246 as an lvalue in the C++ front-end. PR c++/19199. */
5247 && (in_gimple_form
5248 || VECTOR_TYPE_P (type)
5249 || (! lang_GNU_CXX ()
5250 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5251 || ! maybe_lvalue_p (arg1)
5252 || ! maybe_lvalue_p (arg2)))
5253 {
5254 tree comp_op0 = arg00;
5255 tree comp_op1 = arg01;
5256 tree comp_type = TREE_TYPE (comp_op0);
5257
5258 switch (comp_code)
5259 {
5260 case EQ_EXPR:
5261 return fold_convert_loc (loc, type, arg2);
5262 case NE_EXPR:
5263 return fold_convert_loc (loc, type, arg1);
5264 case LE_EXPR:
5265 case LT_EXPR:
5266 case UNLE_EXPR:
5267 case UNLT_EXPR:
5268 /* In C++ a ?: expression can be an lvalue, so put the
5269 operand which will be used if they are equal first
5270 so that we can convert this back to the
5271 corresponding COND_EXPR. */
5272 if (!HONOR_NANS (arg1))
5273 {
5274 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5275 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5276 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5277 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5278 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5279 comp_op1, comp_op0);
5280 return fold_convert_loc (loc, type, tem);
5281 }
5282 break;
5283 case GE_EXPR:
5284 case GT_EXPR:
5285 case UNGE_EXPR:
5286 case UNGT_EXPR:
5287 if (!HONOR_NANS (arg1))
5288 {
5289 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5290 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5291 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5292 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5293 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5294 comp_op1, comp_op0);
5295 return fold_convert_loc (loc, type, tem);
5296 }
5297 break;
5298 case UNEQ_EXPR:
5299 if (!HONOR_NANS (arg1))
5300 return fold_convert_loc (loc, type, arg2);
5301 break;
5302 case LTGT_EXPR:
5303 if (!HONOR_NANS (arg1))
5304 return fold_convert_loc (loc, type, arg1);
5305 break;
5306 default:
5307 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5308 break;
5309 }
5310 }
5311
5312 return NULL_TREE;
5313 }
5314
5315
5316 \f
5317 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5318 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5319 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5320 false) >= 2)
5321 #endif
5322
5323 /* EXP is some logical combination of boolean tests. See if we can
5324 merge it into some range test. Return the new tree if so. */
5325
5326 static tree
5327 fold_range_test (location_t loc, enum tree_code code, tree type,
5328 tree op0, tree op1)
5329 {
5330 int or_op = (code == TRUTH_ORIF_EXPR
5331 || code == TRUTH_OR_EXPR);
5332 int in0_p, in1_p, in_p;
5333 tree low0, low1, low, high0, high1, high;
5334 bool strict_overflow_p = false;
5335 tree tem, lhs, rhs;
5336 const char * const warnmsg = G_("assuming signed overflow does not occur "
5337 "when simplifying range test");
5338
5339 if (!INTEGRAL_TYPE_P (type))
5340 return 0;
5341
5342 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5343 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5344
5345 /* If this is an OR operation, invert both sides; we will invert
5346 again at the end. */
5347 if (or_op)
5348 in0_p = ! in0_p, in1_p = ! in1_p;
5349
5350 /* If both expressions are the same, if we can merge the ranges, and we
5351 can build the range test, return it or it inverted. If one of the
5352 ranges is always true or always false, consider it to be the same
5353 expression as the other. */
5354 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5355 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5356 in1_p, low1, high1)
5357 && 0 != (tem = (build_range_check (loc, type,
5358 lhs != 0 ? lhs
5359 : rhs != 0 ? rhs : integer_zero_node,
5360 in_p, low, high))))
5361 {
5362 if (strict_overflow_p)
5363 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5364 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5365 }
5366
5367 /* On machines where the branch cost is expensive, if this is a
5368 short-circuited branch and the underlying object on both sides
5369 is the same, make a non-short-circuit operation. */
5370 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5371 && !flag_sanitize_coverage
5372 && lhs != 0 && rhs != 0
5373 && (code == TRUTH_ANDIF_EXPR
5374 || code == TRUTH_ORIF_EXPR)
5375 && operand_equal_p (lhs, rhs, 0))
5376 {
5377 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5378 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5379 which cases we can't do this. */
5380 if (simple_operand_p (lhs))
5381 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5382 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5383 type, op0, op1);
5384
5385 else if (!lang_hooks.decls.global_bindings_p ()
5386 && !CONTAINS_PLACEHOLDER_P (lhs))
5387 {
5388 tree common = save_expr (lhs);
5389
5390 if (0 != (lhs = build_range_check (loc, type, common,
5391 or_op ? ! in0_p : in0_p,
5392 low0, high0))
5393 && (0 != (rhs = build_range_check (loc, type, common,
5394 or_op ? ! in1_p : in1_p,
5395 low1, high1))))
5396 {
5397 if (strict_overflow_p)
5398 fold_overflow_warning (warnmsg,
5399 WARN_STRICT_OVERFLOW_COMPARISON);
5400 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5401 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5402 type, lhs, rhs);
5403 }
5404 }
5405 }
5406
5407 return 0;
5408 }
5409 \f
5410 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5411 bit value. Arrange things so the extra bits will be set to zero if and
5412 only if C is signed-extended to its full width. If MASK is nonzero,
5413 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5414
5415 static tree
5416 unextend (tree c, int p, int unsignedp, tree mask)
5417 {
5418 tree type = TREE_TYPE (c);
5419 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5420 tree temp;
5421
5422 if (p == modesize || unsignedp)
5423 return c;
5424
5425 /* We work by getting just the sign bit into the low-order bit, then
5426 into the high-order bit, then sign-extend. We then XOR that value
5427 with C. */
5428 temp = build_int_cst (TREE_TYPE (c),
5429 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5430
5431 /* We must use a signed type in order to get an arithmetic right shift.
5432 However, we must also avoid introducing accidental overflows, so that
5433 a subsequent call to integer_zerop will work. Hence we must
5434 do the type conversion here. At this point, the constant is either
5435 zero or one, and the conversion to a signed type can never overflow.
5436 We could get an overflow if this conversion is done anywhere else. */
5437 if (TYPE_UNSIGNED (type))
5438 temp = fold_convert (signed_type_for (type), temp);
5439
5440 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5441 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5442 if (mask != 0)
5443 temp = const_binop (BIT_AND_EXPR, temp,
5444 fold_convert (TREE_TYPE (c), mask));
5445 /* If necessary, convert the type back to match the type of C. */
5446 if (TYPE_UNSIGNED (type))
5447 temp = fold_convert (type, temp);
5448
5449 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5450 }
5451 \f
5452 /* For an expression that has the form
5453 (A && B) || ~B
5454 or
5455 (A || B) && ~B,
5456 we can drop one of the inner expressions and simplify to
5457 A || ~B
5458 or
5459 A && ~B
5460 LOC is the location of the resulting expression. OP is the inner
5461 logical operation; the left-hand side in the examples above, while CMPOP
5462 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5463 removing a condition that guards another, as in
5464 (A != NULL && A->...) || A == NULL
5465 which we must not transform. If RHS_ONLY is true, only eliminate the
5466 right-most operand of the inner logical operation. */
5467
5468 static tree
5469 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5470 bool rhs_only)
5471 {
5472 tree type = TREE_TYPE (cmpop);
5473 enum tree_code code = TREE_CODE (cmpop);
5474 enum tree_code truthop_code = TREE_CODE (op);
5475 tree lhs = TREE_OPERAND (op, 0);
5476 tree rhs = TREE_OPERAND (op, 1);
5477 tree orig_lhs = lhs, orig_rhs = rhs;
5478 enum tree_code rhs_code = TREE_CODE (rhs);
5479 enum tree_code lhs_code = TREE_CODE (lhs);
5480 enum tree_code inv_code;
5481
5482 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5483 return NULL_TREE;
5484
5485 if (TREE_CODE_CLASS (code) != tcc_comparison)
5486 return NULL_TREE;
5487
5488 if (rhs_code == truthop_code)
5489 {
5490 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5491 if (newrhs != NULL_TREE)
5492 {
5493 rhs = newrhs;
5494 rhs_code = TREE_CODE (rhs);
5495 }
5496 }
5497 if (lhs_code == truthop_code && !rhs_only)
5498 {
5499 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5500 if (newlhs != NULL_TREE)
5501 {
5502 lhs = newlhs;
5503 lhs_code = TREE_CODE (lhs);
5504 }
5505 }
5506
5507 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5508 if (inv_code == rhs_code
5509 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5510 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5511 return lhs;
5512 if (!rhs_only && inv_code == lhs_code
5513 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5514 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5515 return rhs;
5516 if (rhs != orig_rhs || lhs != orig_lhs)
5517 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5518 lhs, rhs);
5519 return NULL_TREE;
5520 }
5521
5522 /* Find ways of folding logical expressions of LHS and RHS:
5523 Try to merge two comparisons to the same innermost item.
5524 Look for range tests like "ch >= '0' && ch <= '9'".
5525 Look for combinations of simple terms on machines with expensive branches
5526 and evaluate the RHS unconditionally.
5527
5528 For example, if we have p->a == 2 && p->b == 4 and we can make an
5529 object large enough to span both A and B, we can do this with a comparison
5530 against the object ANDed with the a mask.
5531
5532 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5533 operations to do this with one comparison.
5534
5535 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5536 function and the one above.
5537
5538 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5539 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5540
5541 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5542 two operands.
5543
5544 We return the simplified tree or 0 if no optimization is possible. */
5545
5546 static tree
5547 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5548 tree lhs, tree rhs)
5549 {
5550 /* If this is the "or" of two comparisons, we can do something if
5551 the comparisons are NE_EXPR. If this is the "and", we can do something
5552 if the comparisons are EQ_EXPR. I.e.,
5553 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5554
5555 WANTED_CODE is this operation code. For single bit fields, we can
5556 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5557 comparison for one-bit fields. */
5558
5559 enum tree_code wanted_code;
5560 enum tree_code lcode, rcode;
5561 tree ll_arg, lr_arg, rl_arg, rr_arg;
5562 tree ll_inner, lr_inner, rl_inner, rr_inner;
5563 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5564 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5565 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5566 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5567 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5568 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5569 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5570 scalar_int_mode lnmode, rnmode;
5571 tree ll_mask, lr_mask, rl_mask, rr_mask;
5572 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5573 tree l_const, r_const;
5574 tree lntype, rntype, result;
5575 HOST_WIDE_INT first_bit, end_bit;
5576 int volatilep;
5577
5578 /* Start by getting the comparison codes. Fail if anything is volatile.
5579 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5580 it were surrounded with a NE_EXPR. */
5581
5582 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5583 return 0;
5584
5585 lcode = TREE_CODE (lhs);
5586 rcode = TREE_CODE (rhs);
5587
5588 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5589 {
5590 lhs = build2 (NE_EXPR, truth_type, lhs,
5591 build_int_cst (TREE_TYPE (lhs), 0));
5592 lcode = NE_EXPR;
5593 }
5594
5595 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5596 {
5597 rhs = build2 (NE_EXPR, truth_type, rhs,
5598 build_int_cst (TREE_TYPE (rhs), 0));
5599 rcode = NE_EXPR;
5600 }
5601
5602 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5603 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5604 return 0;
5605
5606 ll_arg = TREE_OPERAND (lhs, 0);
5607 lr_arg = TREE_OPERAND (lhs, 1);
5608 rl_arg = TREE_OPERAND (rhs, 0);
5609 rr_arg = TREE_OPERAND (rhs, 1);
5610
5611 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5612 if (simple_operand_p (ll_arg)
5613 && simple_operand_p (lr_arg))
5614 {
5615 if (operand_equal_p (ll_arg, rl_arg, 0)
5616 && operand_equal_p (lr_arg, rr_arg, 0))
5617 {
5618 result = combine_comparisons (loc, code, lcode, rcode,
5619 truth_type, ll_arg, lr_arg);
5620 if (result)
5621 return result;
5622 }
5623 else if (operand_equal_p (ll_arg, rr_arg, 0)
5624 && operand_equal_p (lr_arg, rl_arg, 0))
5625 {
5626 result = combine_comparisons (loc, code, lcode,
5627 swap_tree_comparison (rcode),
5628 truth_type, ll_arg, lr_arg);
5629 if (result)
5630 return result;
5631 }
5632 }
5633
5634 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5635 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5636
5637 /* If the RHS can be evaluated unconditionally and its operands are
5638 simple, it wins to evaluate the RHS unconditionally on machines
5639 with expensive branches. In this case, this isn't a comparison
5640 that can be merged. */
5641
5642 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5643 false) >= 2
5644 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5645 && simple_operand_p (rl_arg)
5646 && simple_operand_p (rr_arg))
5647 {
5648 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5649 if (code == TRUTH_OR_EXPR
5650 && lcode == NE_EXPR && integer_zerop (lr_arg)
5651 && rcode == NE_EXPR && integer_zerop (rr_arg)
5652 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5653 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5654 return build2_loc (loc, NE_EXPR, truth_type,
5655 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5656 ll_arg, rl_arg),
5657 build_int_cst (TREE_TYPE (ll_arg), 0));
5658
5659 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5660 if (code == TRUTH_AND_EXPR
5661 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5662 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5663 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5664 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5665 return build2_loc (loc, EQ_EXPR, truth_type,
5666 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5667 ll_arg, rl_arg),
5668 build_int_cst (TREE_TYPE (ll_arg), 0));
5669 }
5670
5671 /* See if the comparisons can be merged. Then get all the parameters for
5672 each side. */
5673
5674 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5675 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5676 return 0;
5677
5678 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5679 volatilep = 0;
5680 ll_inner = decode_field_reference (loc, &ll_arg,
5681 &ll_bitsize, &ll_bitpos, &ll_mode,
5682 &ll_unsignedp, &ll_reversep, &volatilep,
5683 &ll_mask, &ll_and_mask);
5684 lr_inner = decode_field_reference (loc, &lr_arg,
5685 &lr_bitsize, &lr_bitpos, &lr_mode,
5686 &lr_unsignedp, &lr_reversep, &volatilep,
5687 &lr_mask, &lr_and_mask);
5688 rl_inner = decode_field_reference (loc, &rl_arg,
5689 &rl_bitsize, &rl_bitpos, &rl_mode,
5690 &rl_unsignedp, &rl_reversep, &volatilep,
5691 &rl_mask, &rl_and_mask);
5692 rr_inner = decode_field_reference (loc, &rr_arg,
5693 &rr_bitsize, &rr_bitpos, &rr_mode,
5694 &rr_unsignedp, &rr_reversep, &volatilep,
5695 &rr_mask, &rr_and_mask);
5696
5697 /* It must be true that the inner operation on the lhs of each
5698 comparison must be the same if we are to be able to do anything.
5699 Then see if we have constants. If not, the same must be true for
5700 the rhs's. */
5701 if (volatilep
5702 || ll_reversep != rl_reversep
5703 || ll_inner == 0 || rl_inner == 0
5704 || ! operand_equal_p (ll_inner, rl_inner, 0))
5705 return 0;
5706
5707 if (TREE_CODE (lr_arg) == INTEGER_CST
5708 && TREE_CODE (rr_arg) == INTEGER_CST)
5709 {
5710 l_const = lr_arg, r_const = rr_arg;
5711 lr_reversep = ll_reversep;
5712 }
5713 else if (lr_reversep != rr_reversep
5714 || lr_inner == 0 || rr_inner == 0
5715 || ! operand_equal_p (lr_inner, rr_inner, 0))
5716 return 0;
5717 else
5718 l_const = r_const = 0;
5719
5720 /* If either comparison code is not correct for our logical operation,
5721 fail. However, we can convert a one-bit comparison against zero into
5722 the opposite comparison against that bit being set in the field. */
5723
5724 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5725 if (lcode != wanted_code)
5726 {
5727 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5728 {
5729 /* Make the left operand unsigned, since we are only interested
5730 in the value of one bit. Otherwise we are doing the wrong
5731 thing below. */
5732 ll_unsignedp = 1;
5733 l_const = ll_mask;
5734 }
5735 else
5736 return 0;
5737 }
5738
5739 /* This is analogous to the code for l_const above. */
5740 if (rcode != wanted_code)
5741 {
5742 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5743 {
5744 rl_unsignedp = 1;
5745 r_const = rl_mask;
5746 }
5747 else
5748 return 0;
5749 }
5750
5751 /* See if we can find a mode that contains both fields being compared on
5752 the left. If we can't, fail. Otherwise, update all constants and masks
5753 to be relative to a field of that size. */
5754 first_bit = MIN (ll_bitpos, rl_bitpos);
5755 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5756 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5757 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5758 volatilep, &lnmode))
5759 return 0;
5760
5761 lnbitsize = GET_MODE_BITSIZE (lnmode);
5762 lnbitpos = first_bit & ~ (lnbitsize - 1);
5763 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5764 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5765
5766 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5767 {
5768 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5769 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5770 }
5771
5772 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5773 size_int (xll_bitpos));
5774 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5775 size_int (xrl_bitpos));
5776
5777 if (l_const)
5778 {
5779 l_const = fold_convert_loc (loc, lntype, l_const);
5780 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5781 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5782 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5783 fold_build1_loc (loc, BIT_NOT_EXPR,
5784 lntype, ll_mask))))
5785 {
5786 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5787
5788 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5789 }
5790 }
5791 if (r_const)
5792 {
5793 r_const = fold_convert_loc (loc, lntype, r_const);
5794 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5795 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5796 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5797 fold_build1_loc (loc, BIT_NOT_EXPR,
5798 lntype, rl_mask))))
5799 {
5800 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5801
5802 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5803 }
5804 }
5805
5806 /* If the right sides are not constant, do the same for it. Also,
5807 disallow this optimization if a size or signedness mismatch occurs
5808 between the left and right sides. */
5809 if (l_const == 0)
5810 {
5811 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5812 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5813 /* Make sure the two fields on the right
5814 correspond to the left without being swapped. */
5815 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5816 return 0;
5817
5818 first_bit = MIN (lr_bitpos, rr_bitpos);
5819 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5820 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5821 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5822 volatilep, &rnmode))
5823 return 0;
5824
5825 rnbitsize = GET_MODE_BITSIZE (rnmode);
5826 rnbitpos = first_bit & ~ (rnbitsize - 1);
5827 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5828 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5829
5830 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5831 {
5832 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5833 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5834 }
5835
5836 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5837 rntype, lr_mask),
5838 size_int (xlr_bitpos));
5839 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5840 rntype, rr_mask),
5841 size_int (xrr_bitpos));
5842
5843 /* Make a mask that corresponds to both fields being compared.
5844 Do this for both items being compared. If the operands are the
5845 same size and the bits being compared are in the same position
5846 then we can do this by masking both and comparing the masked
5847 results. */
5848 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5849 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5850 if (lnbitsize == rnbitsize
5851 && xll_bitpos == xlr_bitpos
5852 && lnbitpos >= 0
5853 && rnbitpos >= 0)
5854 {
5855 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5856 lntype, lnbitsize, lnbitpos,
5857 ll_unsignedp || rl_unsignedp, ll_reversep);
5858 if (! all_ones_mask_p (ll_mask, lnbitsize))
5859 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5860
5861 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5862 rntype, rnbitsize, rnbitpos,
5863 lr_unsignedp || rr_unsignedp, lr_reversep);
5864 if (! all_ones_mask_p (lr_mask, rnbitsize))
5865 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5866
5867 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5868 }
5869
5870 /* There is still another way we can do something: If both pairs of
5871 fields being compared are adjacent, we may be able to make a wider
5872 field containing them both.
5873
5874 Note that we still must mask the lhs/rhs expressions. Furthermore,
5875 the mask must be shifted to account for the shift done by
5876 make_bit_field_ref. */
5877 if (((ll_bitsize + ll_bitpos == rl_bitpos
5878 && lr_bitsize + lr_bitpos == rr_bitpos)
5879 || (ll_bitpos == rl_bitpos + rl_bitsize
5880 && lr_bitpos == rr_bitpos + rr_bitsize))
5881 && ll_bitpos >= 0
5882 && rl_bitpos >= 0
5883 && lr_bitpos >= 0
5884 && rr_bitpos >= 0)
5885 {
5886 tree type;
5887
5888 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5889 ll_bitsize + rl_bitsize,
5890 MIN (ll_bitpos, rl_bitpos),
5891 ll_unsignedp, ll_reversep);
5892 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5893 lr_bitsize + rr_bitsize,
5894 MIN (lr_bitpos, rr_bitpos),
5895 lr_unsignedp, lr_reversep);
5896
5897 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5898 size_int (MIN (xll_bitpos, xrl_bitpos)));
5899 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5900 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5901
5902 /* Convert to the smaller type before masking out unwanted bits. */
5903 type = lntype;
5904 if (lntype != rntype)
5905 {
5906 if (lnbitsize > rnbitsize)
5907 {
5908 lhs = fold_convert_loc (loc, rntype, lhs);
5909 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5910 type = rntype;
5911 }
5912 else if (lnbitsize < rnbitsize)
5913 {
5914 rhs = fold_convert_loc (loc, lntype, rhs);
5915 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5916 type = lntype;
5917 }
5918 }
5919
5920 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5921 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5922
5923 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5924 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5925
5926 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5927 }
5928
5929 return 0;
5930 }
5931
5932 /* Handle the case of comparisons with constants. If there is something in
5933 common between the masks, those bits of the constants must be the same.
5934 If not, the condition is always false. Test for this to avoid generating
5935 incorrect code below. */
5936 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5937 if (! integer_zerop (result)
5938 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5939 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5940 {
5941 if (wanted_code == NE_EXPR)
5942 {
5943 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5944 return constant_boolean_node (true, truth_type);
5945 }
5946 else
5947 {
5948 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5949 return constant_boolean_node (false, truth_type);
5950 }
5951 }
5952
5953 if (lnbitpos < 0)
5954 return 0;
5955
5956 /* Construct the expression we will return. First get the component
5957 reference we will make. Unless the mask is all ones the width of
5958 that field, perform the mask operation. Then compare with the
5959 merged constant. */
5960 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5961 lntype, lnbitsize, lnbitpos,
5962 ll_unsignedp || rl_unsignedp, ll_reversep);
5963
5964 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5965 if (! all_ones_mask_p (ll_mask, lnbitsize))
5966 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5967
5968 return build2_loc (loc, wanted_code, truth_type, result,
5969 const_binop (BIT_IOR_EXPR, l_const, r_const));
5970 }
5971 \f
5972 /* T is an integer expression that is being multiplied, divided, or taken a
5973 modulus (CODE says which and what kind of divide or modulus) by a
5974 constant C. See if we can eliminate that operation by folding it with
5975 other operations already in T. WIDE_TYPE, if non-null, is a type that
5976 should be used for the computation if wider than our type.
5977
5978 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5979 (X * 2) + (Y * 4). We must, however, be assured that either the original
5980 expression would not overflow or that overflow is undefined for the type
5981 in the language in question.
5982
5983 If we return a non-null expression, it is an equivalent form of the
5984 original computation, but need not be in the original type.
5985
5986 We set *STRICT_OVERFLOW_P to true if the return values depends on
5987 signed overflow being undefined. Otherwise we do not change
5988 *STRICT_OVERFLOW_P. */
5989
5990 static tree
5991 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5992 bool *strict_overflow_p)
5993 {
5994 /* To avoid exponential search depth, refuse to allow recursion past
5995 three levels. Beyond that (1) it's highly unlikely that we'll find
5996 something interesting and (2) we've probably processed it before
5997 when we built the inner expression. */
5998
5999 static int depth;
6000 tree ret;
6001
6002 if (depth > 3)
6003 return NULL;
6004
6005 depth++;
6006 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6007 depth--;
6008
6009 return ret;
6010 }
6011
6012 static tree
6013 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6014 bool *strict_overflow_p)
6015 {
6016 tree type = TREE_TYPE (t);
6017 enum tree_code tcode = TREE_CODE (t);
6018 tree ctype = (wide_type != 0
6019 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6020 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6021 ? wide_type : type);
6022 tree t1, t2;
6023 int same_p = tcode == code;
6024 tree op0 = NULL_TREE, op1 = NULL_TREE;
6025 bool sub_strict_overflow_p;
6026
6027 /* Don't deal with constants of zero here; they confuse the code below. */
6028 if (integer_zerop (c))
6029 return NULL_TREE;
6030
6031 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6032 op0 = TREE_OPERAND (t, 0);
6033
6034 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6035 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6036
6037 /* Note that we need not handle conditional operations here since fold
6038 already handles those cases. So just do arithmetic here. */
6039 switch (tcode)
6040 {
6041 case INTEGER_CST:
6042 /* For a constant, we can always simplify if we are a multiply
6043 or (for divide and modulus) if it is a multiple of our constant. */
6044 if (code == MULT_EXPR
6045 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6046 TYPE_SIGN (type)))
6047 {
6048 tree tem = const_binop (code, fold_convert (ctype, t),
6049 fold_convert (ctype, c));
6050 /* If the multiplication overflowed, we lost information on it.
6051 See PR68142 and PR69845. */
6052 if (TREE_OVERFLOW (tem))
6053 return NULL_TREE;
6054 return tem;
6055 }
6056 break;
6057
6058 CASE_CONVERT: case NON_LVALUE_EXPR:
6059 /* If op0 is an expression ... */
6060 if ((COMPARISON_CLASS_P (op0)
6061 || UNARY_CLASS_P (op0)
6062 || BINARY_CLASS_P (op0)
6063 || VL_EXP_CLASS_P (op0)
6064 || EXPRESSION_CLASS_P (op0))
6065 /* ... and has wrapping overflow, and its type is smaller
6066 than ctype, then we cannot pass through as widening. */
6067 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6068 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6069 && (TYPE_PRECISION (ctype)
6070 > TYPE_PRECISION (TREE_TYPE (op0))))
6071 /* ... or this is a truncation (t is narrower than op0),
6072 then we cannot pass through this narrowing. */
6073 || (TYPE_PRECISION (type)
6074 < TYPE_PRECISION (TREE_TYPE (op0)))
6075 /* ... or signedness changes for division or modulus,
6076 then we cannot pass through this conversion. */
6077 || (code != MULT_EXPR
6078 && (TYPE_UNSIGNED (ctype)
6079 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6080 /* ... or has undefined overflow while the converted to
6081 type has not, we cannot do the operation in the inner type
6082 as that would introduce undefined overflow. */
6083 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6084 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6085 && !TYPE_OVERFLOW_UNDEFINED (type))))
6086 break;
6087
6088 /* Pass the constant down and see if we can make a simplification. If
6089 we can, replace this expression with the inner simplification for
6090 possible later conversion to our or some other type. */
6091 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6092 && TREE_CODE (t2) == INTEGER_CST
6093 && !TREE_OVERFLOW (t2)
6094 && (0 != (t1 = extract_muldiv (op0, t2, code,
6095 code == MULT_EXPR
6096 ? ctype : NULL_TREE,
6097 strict_overflow_p))))
6098 return t1;
6099 break;
6100
6101 case ABS_EXPR:
6102 /* If widening the type changes it from signed to unsigned, then we
6103 must avoid building ABS_EXPR itself as unsigned. */
6104 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6105 {
6106 tree cstype = (*signed_type_for) (ctype);
6107 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6108 != 0)
6109 {
6110 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6111 return fold_convert (ctype, t1);
6112 }
6113 break;
6114 }
6115 /* If the constant is negative, we cannot simplify this. */
6116 if (tree_int_cst_sgn (c) == -1)
6117 break;
6118 /* FALLTHROUGH */
6119 case NEGATE_EXPR:
6120 /* For division and modulus, type can't be unsigned, as e.g.
6121 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6122 For signed types, even with wrapping overflow, this is fine. */
6123 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6124 break;
6125 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6126 != 0)
6127 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6128 break;
6129
6130 case MIN_EXPR: case MAX_EXPR:
6131 /* If widening the type changes the signedness, then we can't perform
6132 this optimization as that changes the result. */
6133 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6134 break;
6135
6136 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6137 sub_strict_overflow_p = false;
6138 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6139 &sub_strict_overflow_p)) != 0
6140 && (t2 = extract_muldiv (op1, c, code, wide_type,
6141 &sub_strict_overflow_p)) != 0)
6142 {
6143 if (tree_int_cst_sgn (c) < 0)
6144 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6145 if (sub_strict_overflow_p)
6146 *strict_overflow_p = true;
6147 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6148 fold_convert (ctype, t2));
6149 }
6150 break;
6151
6152 case LSHIFT_EXPR: case RSHIFT_EXPR:
6153 /* If the second operand is constant, this is a multiplication
6154 or floor division, by a power of two, so we can treat it that
6155 way unless the multiplier or divisor overflows. Signed
6156 left-shift overflow is implementation-defined rather than
6157 undefined in C90, so do not convert signed left shift into
6158 multiplication. */
6159 if (TREE_CODE (op1) == INTEGER_CST
6160 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6161 /* const_binop may not detect overflow correctly,
6162 so check for it explicitly here. */
6163 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6164 wi::to_wide (op1))
6165 && 0 != (t1 = fold_convert (ctype,
6166 const_binop (LSHIFT_EXPR,
6167 size_one_node,
6168 op1)))
6169 && !TREE_OVERFLOW (t1))
6170 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6171 ? MULT_EXPR : FLOOR_DIV_EXPR,
6172 ctype,
6173 fold_convert (ctype, op0),
6174 t1),
6175 c, code, wide_type, strict_overflow_p);
6176 break;
6177
6178 case PLUS_EXPR: case MINUS_EXPR:
6179 /* See if we can eliminate the operation on both sides. If we can, we
6180 can return a new PLUS or MINUS. If we can't, the only remaining
6181 cases where we can do anything are if the second operand is a
6182 constant. */
6183 sub_strict_overflow_p = false;
6184 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6185 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6186 if (t1 != 0 && t2 != 0
6187 && TYPE_OVERFLOW_WRAPS (ctype)
6188 && (code == MULT_EXPR
6189 /* If not multiplication, we can only do this if both operands
6190 are divisible by c. */
6191 || (multiple_of_p (ctype, op0, c)
6192 && multiple_of_p (ctype, op1, c))))
6193 {
6194 if (sub_strict_overflow_p)
6195 *strict_overflow_p = true;
6196 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6197 fold_convert (ctype, t2));
6198 }
6199
6200 /* If this was a subtraction, negate OP1 and set it to be an addition.
6201 This simplifies the logic below. */
6202 if (tcode == MINUS_EXPR)
6203 {
6204 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6205 /* If OP1 was not easily negatable, the constant may be OP0. */
6206 if (TREE_CODE (op0) == INTEGER_CST)
6207 {
6208 std::swap (op0, op1);
6209 std::swap (t1, t2);
6210 }
6211 }
6212
6213 if (TREE_CODE (op1) != INTEGER_CST)
6214 break;
6215
6216 /* If either OP1 or C are negative, this optimization is not safe for
6217 some of the division and remainder types while for others we need
6218 to change the code. */
6219 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6220 {
6221 if (code == CEIL_DIV_EXPR)
6222 code = FLOOR_DIV_EXPR;
6223 else if (code == FLOOR_DIV_EXPR)
6224 code = CEIL_DIV_EXPR;
6225 else if (code != MULT_EXPR
6226 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6227 break;
6228 }
6229
6230 /* If it's a multiply or a division/modulus operation of a multiple
6231 of our constant, do the operation and verify it doesn't overflow. */
6232 if (code == MULT_EXPR
6233 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6234 TYPE_SIGN (type)))
6235 {
6236 op1 = const_binop (code, fold_convert (ctype, op1),
6237 fold_convert (ctype, c));
6238 /* We allow the constant to overflow with wrapping semantics. */
6239 if (op1 == 0
6240 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6241 break;
6242 }
6243 else
6244 break;
6245
6246 /* If we have an unsigned type, we cannot widen the operation since it
6247 will change the result if the original computation overflowed. */
6248 if (TYPE_UNSIGNED (ctype) && ctype != type)
6249 break;
6250
6251 /* The last case is if we are a multiply. In that case, we can
6252 apply the distributive law to commute the multiply and addition
6253 if the multiplication of the constants doesn't overflow
6254 and overflow is defined. With undefined overflow
6255 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6256 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6257 return fold_build2 (tcode, ctype,
6258 fold_build2 (code, ctype,
6259 fold_convert (ctype, op0),
6260 fold_convert (ctype, c)),
6261 op1);
6262
6263 break;
6264
6265 case MULT_EXPR:
6266 /* We have a special case here if we are doing something like
6267 (C * 8) % 4 since we know that's zero. */
6268 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6269 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6270 /* If the multiplication can overflow we cannot optimize this. */
6271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6272 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6273 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6274 TYPE_SIGN (type)))
6275 {
6276 *strict_overflow_p = true;
6277 return omit_one_operand (type, integer_zero_node, op0);
6278 }
6279
6280 /* ... fall through ... */
6281
6282 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6283 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6284 /* If we can extract our operation from the LHS, do so and return a
6285 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6286 do something only if the second operand is a constant. */
6287 if (same_p
6288 && TYPE_OVERFLOW_WRAPS (ctype)
6289 && (t1 = extract_muldiv (op0, c, code, wide_type,
6290 strict_overflow_p)) != 0)
6291 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6292 fold_convert (ctype, op1));
6293 else if (tcode == MULT_EXPR && code == MULT_EXPR
6294 && TYPE_OVERFLOW_WRAPS (ctype)
6295 && (t1 = extract_muldiv (op1, c, code, wide_type,
6296 strict_overflow_p)) != 0)
6297 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6298 fold_convert (ctype, t1));
6299 else if (TREE_CODE (op1) != INTEGER_CST)
6300 return 0;
6301
6302 /* If these are the same operation types, we can associate them
6303 assuming no overflow. */
6304 if (tcode == code)
6305 {
6306 bool overflow_p = false;
6307 bool overflow_mul_p;
6308 signop sign = TYPE_SIGN (ctype);
6309 unsigned prec = TYPE_PRECISION (ctype);
6310 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6311 wi::to_wide (c, prec),
6312 sign, &overflow_mul_p);
6313 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6314 if (overflow_mul_p
6315 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6316 overflow_p = true;
6317 if (!overflow_p)
6318 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6319 wide_int_to_tree (ctype, mul));
6320 }
6321
6322 /* If these operations "cancel" each other, we have the main
6323 optimizations of this pass, which occur when either constant is a
6324 multiple of the other, in which case we replace this with either an
6325 operation or CODE or TCODE.
6326
6327 If we have an unsigned type, we cannot do this since it will change
6328 the result if the original computation overflowed. */
6329 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6330 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6331 || (tcode == MULT_EXPR
6332 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6333 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6334 && code != MULT_EXPR)))
6335 {
6336 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6337 TYPE_SIGN (type)))
6338 {
6339 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6340 *strict_overflow_p = true;
6341 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6342 fold_convert (ctype,
6343 const_binop (TRUNC_DIV_EXPR,
6344 op1, c)));
6345 }
6346 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6347 TYPE_SIGN (type)))
6348 {
6349 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6350 *strict_overflow_p = true;
6351 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6352 fold_convert (ctype,
6353 const_binop (TRUNC_DIV_EXPR,
6354 c, op1)));
6355 }
6356 }
6357 break;
6358
6359 default:
6360 break;
6361 }
6362
6363 return 0;
6364 }
6365 \f
6366 /* Return a node which has the indicated constant VALUE (either 0 or
6367 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6368 and is of the indicated TYPE. */
6369
6370 tree
6371 constant_boolean_node (bool value, tree type)
6372 {
6373 if (type == integer_type_node)
6374 return value ? integer_one_node : integer_zero_node;
6375 else if (type == boolean_type_node)
6376 return value ? boolean_true_node : boolean_false_node;
6377 else if (TREE_CODE (type) == VECTOR_TYPE)
6378 return build_vector_from_val (type,
6379 build_int_cst (TREE_TYPE (type),
6380 value ? -1 : 0));
6381 else
6382 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6383 }
6384
6385
6386 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6387 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6388 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6389 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6390 COND is the first argument to CODE; otherwise (as in the example
6391 given here), it is the second argument. TYPE is the type of the
6392 original expression. Return NULL_TREE if no simplification is
6393 possible. */
6394
6395 static tree
6396 fold_binary_op_with_conditional_arg (location_t loc,
6397 enum tree_code code,
6398 tree type, tree op0, tree op1,
6399 tree cond, tree arg, int cond_first_p)
6400 {
6401 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6402 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6403 tree test, true_value, false_value;
6404 tree lhs = NULL_TREE;
6405 tree rhs = NULL_TREE;
6406 enum tree_code cond_code = COND_EXPR;
6407
6408 if (TREE_CODE (cond) == COND_EXPR
6409 || TREE_CODE (cond) == VEC_COND_EXPR)
6410 {
6411 test = TREE_OPERAND (cond, 0);
6412 true_value = TREE_OPERAND (cond, 1);
6413 false_value = TREE_OPERAND (cond, 2);
6414 /* If this operand throws an expression, then it does not make
6415 sense to try to perform a logical or arithmetic operation
6416 involving it. */
6417 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6418 lhs = true_value;
6419 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6420 rhs = false_value;
6421 }
6422 else if (!(TREE_CODE (type) != VECTOR_TYPE
6423 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6424 {
6425 tree testtype = TREE_TYPE (cond);
6426 test = cond;
6427 true_value = constant_boolean_node (true, testtype);
6428 false_value = constant_boolean_node (false, testtype);
6429 }
6430 else
6431 /* Detect the case of mixing vector and scalar types - bail out. */
6432 return NULL_TREE;
6433
6434 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6435 cond_code = VEC_COND_EXPR;
6436
6437 /* This transformation is only worthwhile if we don't have to wrap ARG
6438 in a SAVE_EXPR and the operation can be simplified without recursing
6439 on at least one of the branches once its pushed inside the COND_EXPR. */
6440 if (!TREE_CONSTANT (arg)
6441 && (TREE_SIDE_EFFECTS (arg)
6442 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6443 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6444 return NULL_TREE;
6445
6446 arg = fold_convert_loc (loc, arg_type, arg);
6447 if (lhs == 0)
6448 {
6449 true_value = fold_convert_loc (loc, cond_type, true_value);
6450 if (cond_first_p)
6451 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6452 else
6453 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6454 }
6455 if (rhs == 0)
6456 {
6457 false_value = fold_convert_loc (loc, cond_type, false_value);
6458 if (cond_first_p)
6459 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6460 else
6461 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6462 }
6463
6464 /* Check that we have simplified at least one of the branches. */
6465 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6466 return NULL_TREE;
6467
6468 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6469 }
6470
6471 \f
6472 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6473
6474 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6475 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6476 ADDEND is the same as X.
6477
6478 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6479 and finite. The problematic cases are when X is zero, and its mode
6480 has signed zeros. In the case of rounding towards -infinity,
6481 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6482 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6483
6484 bool
6485 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6486 {
6487 if (!real_zerop (addend))
6488 return false;
6489
6490 /* Don't allow the fold with -fsignaling-nans. */
6491 if (HONOR_SNANS (element_mode (type)))
6492 return false;
6493
6494 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6495 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6496 return true;
6497
6498 /* In a vector or complex, we would need to check the sign of all zeros. */
6499 if (TREE_CODE (addend) != REAL_CST)
6500 return false;
6501
6502 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6503 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6504 negate = !negate;
6505
6506 /* The mode has signed zeros, and we have to honor their sign.
6507 In this situation, there is only one case we can return true for.
6508 X - 0 is the same as X unless rounding towards -infinity is
6509 supported. */
6510 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6511 }
6512
6513 /* Subroutine of match.pd that optimizes comparisons of a division by
6514 a nonzero integer constant against an integer constant, i.e.
6515 X/C1 op C2.
6516
6517 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6518 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6519
6520 enum tree_code
6521 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6522 tree *hi, bool *neg_overflow)
6523 {
6524 tree prod, tmp, type = TREE_TYPE (c1);
6525 signop sign = TYPE_SIGN (type);
6526 bool overflow;
6527
6528 /* We have to do this the hard way to detect unsigned overflow.
6529 prod = int_const_binop (MULT_EXPR, c1, c2); */
6530 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6531 prod = force_fit_type (type, val, -1, overflow);
6532 *neg_overflow = false;
6533
6534 if (sign == UNSIGNED)
6535 {
6536 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6537 *lo = prod;
6538
6539 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6540 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6541 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6542 }
6543 else if (tree_int_cst_sgn (c1) >= 0)
6544 {
6545 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6546 switch (tree_int_cst_sgn (c2))
6547 {
6548 case -1:
6549 *neg_overflow = true;
6550 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6551 *hi = prod;
6552 break;
6553
6554 case 0:
6555 *lo = fold_negate_const (tmp, type);
6556 *hi = tmp;
6557 break;
6558
6559 case 1:
6560 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6561 *lo = prod;
6562 break;
6563
6564 default:
6565 gcc_unreachable ();
6566 }
6567 }
6568 else
6569 {
6570 /* A negative divisor reverses the relational operators. */
6571 code = swap_tree_comparison (code);
6572
6573 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6574 switch (tree_int_cst_sgn (c2))
6575 {
6576 case -1:
6577 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6578 *lo = prod;
6579 break;
6580
6581 case 0:
6582 *hi = fold_negate_const (tmp, type);
6583 *lo = tmp;
6584 break;
6585
6586 case 1:
6587 *neg_overflow = true;
6588 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6589 *hi = prod;
6590 break;
6591
6592 default:
6593 gcc_unreachable ();
6594 }
6595 }
6596
6597 if (code != EQ_EXPR && code != NE_EXPR)
6598 return code;
6599
6600 if (TREE_OVERFLOW (*lo)
6601 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6602 *lo = NULL_TREE;
6603 if (TREE_OVERFLOW (*hi)
6604 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6605 *hi = NULL_TREE;
6606
6607 return code;
6608 }
6609
6610
6611 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6612 equality/inequality test, then return a simplified form of the test
6613 using a sign testing. Otherwise return NULL. TYPE is the desired
6614 result type. */
6615
6616 static tree
6617 fold_single_bit_test_into_sign_test (location_t loc,
6618 enum tree_code code, tree arg0, tree arg1,
6619 tree result_type)
6620 {
6621 /* If this is testing a single bit, we can optimize the test. */
6622 if ((code == NE_EXPR || code == EQ_EXPR)
6623 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6624 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6625 {
6626 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6627 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6628 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6629
6630 if (arg00 != NULL_TREE
6631 /* This is only a win if casting to a signed type is cheap,
6632 i.e. when arg00's type is not a partial mode. */
6633 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6634 {
6635 tree stype = signed_type_for (TREE_TYPE (arg00));
6636 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6637 result_type,
6638 fold_convert_loc (loc, stype, arg00),
6639 build_int_cst (stype, 0));
6640 }
6641 }
6642
6643 return NULL_TREE;
6644 }
6645
6646 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6647 equality/inequality test, then return a simplified form of
6648 the test using shifts and logical operations. Otherwise return
6649 NULL. TYPE is the desired result type. */
6650
6651 tree
6652 fold_single_bit_test (location_t loc, enum tree_code code,
6653 tree arg0, tree arg1, tree result_type)
6654 {
6655 /* If this is testing a single bit, we can optimize the test. */
6656 if ((code == NE_EXPR || code == EQ_EXPR)
6657 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6658 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6659 {
6660 tree inner = TREE_OPERAND (arg0, 0);
6661 tree type = TREE_TYPE (arg0);
6662 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6663 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6664 int ops_unsigned;
6665 tree signed_type, unsigned_type, intermediate_type;
6666 tree tem, one;
6667
6668 /* First, see if we can fold the single bit test into a sign-bit
6669 test. */
6670 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6671 result_type);
6672 if (tem)
6673 return tem;
6674
6675 /* Otherwise we have (A & C) != 0 where C is a single bit,
6676 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6677 Similarly for (A & C) == 0. */
6678
6679 /* If INNER is a right shift of a constant and it plus BITNUM does
6680 not overflow, adjust BITNUM and INNER. */
6681 if (TREE_CODE (inner) == RSHIFT_EXPR
6682 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6683 && bitnum < TYPE_PRECISION (type)
6684 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6685 TYPE_PRECISION (type) - bitnum))
6686 {
6687 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6688 inner = TREE_OPERAND (inner, 0);
6689 }
6690
6691 /* If we are going to be able to omit the AND below, we must do our
6692 operations as unsigned. If we must use the AND, we have a choice.
6693 Normally unsigned is faster, but for some machines signed is. */
6694 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6695 && !flag_syntax_only) ? 0 : 1;
6696
6697 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6698 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6699 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6700 inner = fold_convert_loc (loc, intermediate_type, inner);
6701
6702 if (bitnum != 0)
6703 inner = build2 (RSHIFT_EXPR, intermediate_type,
6704 inner, size_int (bitnum));
6705
6706 one = build_int_cst (intermediate_type, 1);
6707
6708 if (code == EQ_EXPR)
6709 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6710
6711 /* Put the AND last so it can combine with more things. */
6712 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6713
6714 /* Make sure to return the proper type. */
6715 inner = fold_convert_loc (loc, result_type, inner);
6716
6717 return inner;
6718 }
6719 return NULL_TREE;
6720 }
6721
6722 /* Test whether it is preferable two swap two operands, ARG0 and
6723 ARG1, for example because ARG0 is an integer constant and ARG1
6724 isn't. */
6725
6726 bool
6727 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6728 {
6729 if (CONSTANT_CLASS_P (arg1))
6730 return 0;
6731 if (CONSTANT_CLASS_P (arg0))
6732 return 1;
6733
6734 STRIP_NOPS (arg0);
6735 STRIP_NOPS (arg1);
6736
6737 if (TREE_CONSTANT (arg1))
6738 return 0;
6739 if (TREE_CONSTANT (arg0))
6740 return 1;
6741
6742 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6743 for commutative and comparison operators. Ensuring a canonical
6744 form allows the optimizers to find additional redundancies without
6745 having to explicitly check for both orderings. */
6746 if (TREE_CODE (arg0) == SSA_NAME
6747 && TREE_CODE (arg1) == SSA_NAME
6748 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6749 return 1;
6750
6751 /* Put SSA_NAMEs last. */
6752 if (TREE_CODE (arg1) == SSA_NAME)
6753 return 0;
6754 if (TREE_CODE (arg0) == SSA_NAME)
6755 return 1;
6756
6757 /* Put variables last. */
6758 if (DECL_P (arg1))
6759 return 0;
6760 if (DECL_P (arg0))
6761 return 1;
6762
6763 return 0;
6764 }
6765
6766
6767 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6768 means A >= Y && A != MAX, but in this case we know that
6769 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6770
6771 static tree
6772 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6773 {
6774 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6775
6776 if (TREE_CODE (bound) == LT_EXPR)
6777 a = TREE_OPERAND (bound, 0);
6778 else if (TREE_CODE (bound) == GT_EXPR)
6779 a = TREE_OPERAND (bound, 1);
6780 else
6781 return NULL_TREE;
6782
6783 typea = TREE_TYPE (a);
6784 if (!INTEGRAL_TYPE_P (typea)
6785 && !POINTER_TYPE_P (typea))
6786 return NULL_TREE;
6787
6788 if (TREE_CODE (ineq) == LT_EXPR)
6789 {
6790 a1 = TREE_OPERAND (ineq, 1);
6791 y = TREE_OPERAND (ineq, 0);
6792 }
6793 else if (TREE_CODE (ineq) == GT_EXPR)
6794 {
6795 a1 = TREE_OPERAND (ineq, 0);
6796 y = TREE_OPERAND (ineq, 1);
6797 }
6798 else
6799 return NULL_TREE;
6800
6801 if (TREE_TYPE (a1) != typea)
6802 return NULL_TREE;
6803
6804 if (POINTER_TYPE_P (typea))
6805 {
6806 /* Convert the pointer types into integer before taking the difference. */
6807 tree ta = fold_convert_loc (loc, ssizetype, a);
6808 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6809 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6810 }
6811 else
6812 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6813
6814 if (!diff || !integer_onep (diff))
6815 return NULL_TREE;
6816
6817 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6818 }
6819
6820 /* Fold a sum or difference of at least one multiplication.
6821 Returns the folded tree or NULL if no simplification could be made. */
6822
6823 static tree
6824 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6825 tree arg0, tree arg1)
6826 {
6827 tree arg00, arg01, arg10, arg11;
6828 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6829
6830 /* (A * C) +- (B * C) -> (A+-B) * C.
6831 (A * C) +- A -> A * (C+-1).
6832 We are most concerned about the case where C is a constant,
6833 but other combinations show up during loop reduction. Since
6834 it is not difficult, try all four possibilities. */
6835
6836 if (TREE_CODE (arg0) == MULT_EXPR)
6837 {
6838 arg00 = TREE_OPERAND (arg0, 0);
6839 arg01 = TREE_OPERAND (arg0, 1);
6840 }
6841 else if (TREE_CODE (arg0) == INTEGER_CST)
6842 {
6843 arg00 = build_one_cst (type);
6844 arg01 = arg0;
6845 }
6846 else
6847 {
6848 /* We cannot generate constant 1 for fract. */
6849 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6850 return NULL_TREE;
6851 arg00 = arg0;
6852 arg01 = build_one_cst (type);
6853 }
6854 if (TREE_CODE (arg1) == MULT_EXPR)
6855 {
6856 arg10 = TREE_OPERAND (arg1, 0);
6857 arg11 = TREE_OPERAND (arg1, 1);
6858 }
6859 else if (TREE_CODE (arg1) == INTEGER_CST)
6860 {
6861 arg10 = build_one_cst (type);
6862 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6863 the purpose of this canonicalization. */
6864 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6865 && negate_expr_p (arg1)
6866 && code == PLUS_EXPR)
6867 {
6868 arg11 = negate_expr (arg1);
6869 code = MINUS_EXPR;
6870 }
6871 else
6872 arg11 = arg1;
6873 }
6874 else
6875 {
6876 /* We cannot generate constant 1 for fract. */
6877 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6878 return NULL_TREE;
6879 arg10 = arg1;
6880 arg11 = build_one_cst (type);
6881 }
6882 same = NULL_TREE;
6883
6884 /* Prefer factoring a common non-constant. */
6885 if (operand_equal_p (arg00, arg10, 0))
6886 same = arg00, alt0 = arg01, alt1 = arg11;
6887 else if (operand_equal_p (arg01, arg11, 0))
6888 same = arg01, alt0 = arg00, alt1 = arg10;
6889 else if (operand_equal_p (arg00, arg11, 0))
6890 same = arg00, alt0 = arg01, alt1 = arg10;
6891 else if (operand_equal_p (arg01, arg10, 0))
6892 same = arg01, alt0 = arg00, alt1 = arg11;
6893
6894 /* No identical multiplicands; see if we can find a common
6895 power-of-two factor in non-power-of-two multiplies. This
6896 can help in multi-dimensional array access. */
6897 else if (tree_fits_shwi_p (arg01)
6898 && tree_fits_shwi_p (arg11))
6899 {
6900 HOST_WIDE_INT int01, int11, tmp;
6901 bool swap = false;
6902 tree maybe_same;
6903 int01 = tree_to_shwi (arg01);
6904 int11 = tree_to_shwi (arg11);
6905
6906 /* Move min of absolute values to int11. */
6907 if (absu_hwi (int01) < absu_hwi (int11))
6908 {
6909 tmp = int01, int01 = int11, int11 = tmp;
6910 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6911 maybe_same = arg01;
6912 swap = true;
6913 }
6914 else
6915 maybe_same = arg11;
6916
6917 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6918 /* The remainder should not be a constant, otherwise we
6919 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6920 increased the number of multiplications necessary. */
6921 && TREE_CODE (arg10) != INTEGER_CST)
6922 {
6923 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6924 build_int_cst (TREE_TYPE (arg00),
6925 int01 / int11));
6926 alt1 = arg10;
6927 same = maybe_same;
6928 if (swap)
6929 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6930 }
6931 }
6932
6933 if (!same)
6934 return NULL_TREE;
6935
6936 if (! INTEGRAL_TYPE_P (type)
6937 || TYPE_OVERFLOW_WRAPS (type)
6938 /* We are neither factoring zero nor minus one. */
6939 || TREE_CODE (same) == INTEGER_CST)
6940 return fold_build2_loc (loc, MULT_EXPR, type,
6941 fold_build2_loc (loc, code, type,
6942 fold_convert_loc (loc, type, alt0),
6943 fold_convert_loc (loc, type, alt1)),
6944 fold_convert_loc (loc, type, same));
6945
6946 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6947 same may be minus one and thus the multiplication may overflow. Perform
6948 the operations in an unsigned type. */
6949 tree utype = unsigned_type_for (type);
6950 tree tem = fold_build2_loc (loc, code, utype,
6951 fold_convert_loc (loc, utype, alt0),
6952 fold_convert_loc (loc, utype, alt1));
6953 /* If the sum evaluated to a constant that is not -INF the multiplication
6954 cannot overflow. */
6955 if (TREE_CODE (tem) == INTEGER_CST
6956 && (wi::to_wide (tem)
6957 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6958 return fold_build2_loc (loc, MULT_EXPR, type,
6959 fold_convert (type, tem), same);
6960
6961 return fold_convert_loc (loc, type,
6962 fold_build2_loc (loc, MULT_EXPR, utype, tem,
6963 fold_convert_loc (loc, utype, same)));
6964 }
6965
6966 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6967 specified by EXPR into the buffer PTR of length LEN bytes.
6968 Return the number of bytes placed in the buffer, or zero
6969 upon failure. */
6970
6971 static int
6972 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6973 {
6974 tree type = TREE_TYPE (expr);
6975 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
6976 int byte, offset, word, words;
6977 unsigned char value;
6978
6979 if ((off == -1 && total_bytes > len) || off >= total_bytes)
6980 return 0;
6981 if (off == -1)
6982 off = 0;
6983
6984 if (ptr == NULL)
6985 /* Dry run. */
6986 return MIN (len, total_bytes - off);
6987
6988 words = total_bytes / UNITS_PER_WORD;
6989
6990 for (byte = 0; byte < total_bytes; byte++)
6991 {
6992 int bitpos = byte * BITS_PER_UNIT;
6993 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6994 number of bytes. */
6995 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6996
6997 if (total_bytes > UNITS_PER_WORD)
6998 {
6999 word = byte / UNITS_PER_WORD;
7000 if (WORDS_BIG_ENDIAN)
7001 word = (words - 1) - word;
7002 offset = word * UNITS_PER_WORD;
7003 if (BYTES_BIG_ENDIAN)
7004 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7005 else
7006 offset += byte % UNITS_PER_WORD;
7007 }
7008 else
7009 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7010 if (offset >= off && offset - off < len)
7011 ptr[offset - off] = value;
7012 }
7013 return MIN (len, total_bytes - off);
7014 }
7015
7016
7017 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7018 specified by EXPR into the buffer PTR of length LEN bytes.
7019 Return the number of bytes placed in the buffer, or zero
7020 upon failure. */
7021
7022 static int
7023 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7024 {
7025 tree type = TREE_TYPE (expr);
7026 scalar_mode mode = SCALAR_TYPE_MODE (type);
7027 int total_bytes = GET_MODE_SIZE (mode);
7028 FIXED_VALUE_TYPE value;
7029 tree i_value, i_type;
7030
7031 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7032 return 0;
7033
7034 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7035
7036 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7037 return 0;
7038
7039 value = TREE_FIXED_CST (expr);
7040 i_value = double_int_to_tree (i_type, value.data);
7041
7042 return native_encode_int (i_value, ptr, len, off);
7043 }
7044
7045
7046 /* Subroutine of native_encode_expr. Encode the REAL_CST
7047 specified by EXPR into the buffer PTR of length LEN bytes.
7048 Return the number of bytes placed in the buffer, or zero
7049 upon failure. */
7050
7051 static int
7052 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7053 {
7054 tree type = TREE_TYPE (expr);
7055 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7056 int byte, offset, word, words, bitpos;
7057 unsigned char value;
7058
7059 /* There are always 32 bits in each long, no matter the size of
7060 the hosts long. We handle floating point representations with
7061 up to 192 bits. */
7062 long tmp[6];
7063
7064 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7065 return 0;
7066 if (off == -1)
7067 off = 0;
7068
7069 if (ptr == NULL)
7070 /* Dry run. */
7071 return MIN (len, total_bytes - off);
7072
7073 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7074
7075 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7076
7077 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7078 bitpos += BITS_PER_UNIT)
7079 {
7080 byte = (bitpos / BITS_PER_UNIT) & 3;
7081 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7082
7083 if (UNITS_PER_WORD < 4)
7084 {
7085 word = byte / UNITS_PER_WORD;
7086 if (WORDS_BIG_ENDIAN)
7087 word = (words - 1) - word;
7088 offset = word * UNITS_PER_WORD;
7089 if (BYTES_BIG_ENDIAN)
7090 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7091 else
7092 offset += byte % UNITS_PER_WORD;
7093 }
7094 else
7095 {
7096 offset = byte;
7097 if (BYTES_BIG_ENDIAN)
7098 {
7099 /* Reverse bytes within each long, or within the entire float
7100 if it's smaller than a long (for HFmode). */
7101 offset = MIN (3, total_bytes - 1) - offset;
7102 gcc_assert (offset >= 0);
7103 }
7104 }
7105 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7106 if (offset >= off
7107 && offset - off < len)
7108 ptr[offset - off] = value;
7109 }
7110 return MIN (len, total_bytes - off);
7111 }
7112
7113 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7114 specified by EXPR into the buffer PTR of length LEN bytes.
7115 Return the number of bytes placed in the buffer, or zero
7116 upon failure. */
7117
7118 static int
7119 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7120 {
7121 int rsize, isize;
7122 tree part;
7123
7124 part = TREE_REALPART (expr);
7125 rsize = native_encode_expr (part, ptr, len, off);
7126 if (off == -1 && rsize == 0)
7127 return 0;
7128 part = TREE_IMAGPART (expr);
7129 if (off != -1)
7130 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7131 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7132 len - rsize, off);
7133 if (off == -1 && isize != rsize)
7134 return 0;
7135 return rsize + isize;
7136 }
7137
7138
7139 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7140 specified by EXPR into the buffer PTR of length LEN bytes.
7141 Return the number of bytes placed in the buffer, or zero
7142 upon failure. */
7143
7144 static int
7145 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7146 {
7147 unsigned i, count;
7148 int size, offset;
7149 tree itype, elem;
7150
7151 offset = 0;
7152 count = VECTOR_CST_NELTS (expr);
7153 itype = TREE_TYPE (TREE_TYPE (expr));
7154 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7155 for (i = 0; i < count; i++)
7156 {
7157 if (off >= size)
7158 {
7159 off -= size;
7160 continue;
7161 }
7162 elem = VECTOR_CST_ELT (expr, i);
7163 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7164 len - offset, off);
7165 if ((off == -1 && res != size) || res == 0)
7166 return 0;
7167 offset += res;
7168 if (offset >= len)
7169 return offset;
7170 if (off != -1)
7171 off = 0;
7172 }
7173 return offset;
7174 }
7175
7176
7177 /* Subroutine of native_encode_expr. Encode the STRING_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7180 upon failure. */
7181
7182 static int
7183 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7184 {
7185 tree type = TREE_TYPE (expr);
7186
7187 /* Wide-char strings are encoded in target byte-order so native
7188 encoding them is trivial. */
7189 if (BITS_PER_UNIT != CHAR_BIT
7190 || TREE_CODE (type) != ARRAY_TYPE
7191 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7192 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7193 return 0;
7194
7195 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7196 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7197 return 0;
7198 if (off == -1)
7199 off = 0;
7200 if (ptr == NULL)
7201 /* Dry run. */;
7202 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7203 {
7204 int written = 0;
7205 if (off < TREE_STRING_LENGTH (expr))
7206 {
7207 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7208 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7209 }
7210 memset (ptr + written, 0,
7211 MIN (total_bytes - written, len - written));
7212 }
7213 else
7214 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7215 return MIN (total_bytes - off, len);
7216 }
7217
7218
7219 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7220 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7221 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7222 anything, just do a dry run. If OFF is not -1 then start
7223 the encoding at byte offset OFF and encode at most LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero upon failure. */
7225
7226 int
7227 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7228 {
7229 /* We don't support starting at negative offset and -1 is special. */
7230 if (off < -1)
7231 return 0;
7232
7233 switch (TREE_CODE (expr))
7234 {
7235 case INTEGER_CST:
7236 return native_encode_int (expr, ptr, len, off);
7237
7238 case REAL_CST:
7239 return native_encode_real (expr, ptr, len, off);
7240
7241 case FIXED_CST:
7242 return native_encode_fixed (expr, ptr, len, off);
7243
7244 case COMPLEX_CST:
7245 return native_encode_complex (expr, ptr, len, off);
7246
7247 case VECTOR_CST:
7248 return native_encode_vector (expr, ptr, len, off);
7249
7250 case STRING_CST:
7251 return native_encode_string (expr, ptr, len, off);
7252
7253 default:
7254 return 0;
7255 }
7256 }
7257
7258
7259 /* Subroutine of native_interpret_expr. Interpret the contents of
7260 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7261 If the buffer cannot be interpreted, return NULL_TREE. */
7262
7263 static tree
7264 native_interpret_int (tree type, const unsigned char *ptr, int len)
7265 {
7266 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7267
7268 if (total_bytes > len
7269 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7270 return NULL_TREE;
7271
7272 wide_int result = wi::from_buffer (ptr, total_bytes);
7273
7274 return wide_int_to_tree (type, result);
7275 }
7276
7277
7278 /* Subroutine of native_interpret_expr. Interpret the contents of
7279 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7280 If the buffer cannot be interpreted, return NULL_TREE. */
7281
7282 static tree
7283 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7284 {
7285 scalar_mode mode = SCALAR_TYPE_MODE (type);
7286 int total_bytes = GET_MODE_SIZE (mode);
7287 double_int result;
7288 FIXED_VALUE_TYPE fixed_value;
7289
7290 if (total_bytes > len
7291 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7292 return NULL_TREE;
7293
7294 result = double_int::from_buffer (ptr, total_bytes);
7295 fixed_value = fixed_from_double_int (result, mode);
7296
7297 return build_fixed (type, fixed_value);
7298 }
7299
7300
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7304
7305 static tree
7306 native_interpret_real (tree type, const unsigned char *ptr, int len)
7307 {
7308 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7309 int total_bytes = GET_MODE_SIZE (mode);
7310 unsigned char value;
7311 /* There are always 32 bits in each long, no matter the size of
7312 the hosts long. We handle floating point representations with
7313 up to 192 bits. */
7314 REAL_VALUE_TYPE r;
7315 long tmp[6];
7316
7317 if (total_bytes > len || total_bytes > 24)
7318 return NULL_TREE;
7319 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7320
7321 memset (tmp, 0, sizeof (tmp));
7322 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7323 bitpos += BITS_PER_UNIT)
7324 {
7325 /* Both OFFSET and BYTE index within a long;
7326 bitpos indexes the whole float. */
7327 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7328 if (UNITS_PER_WORD < 4)
7329 {
7330 int word = byte / UNITS_PER_WORD;
7331 if (WORDS_BIG_ENDIAN)
7332 word = (words - 1) - word;
7333 offset = word * UNITS_PER_WORD;
7334 if (BYTES_BIG_ENDIAN)
7335 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7336 else
7337 offset += byte % UNITS_PER_WORD;
7338 }
7339 else
7340 {
7341 offset = byte;
7342 if (BYTES_BIG_ENDIAN)
7343 {
7344 /* Reverse bytes within each long, or within the entire float
7345 if it's smaller than a long (for HFmode). */
7346 offset = MIN (3, total_bytes - 1) - offset;
7347 gcc_assert (offset >= 0);
7348 }
7349 }
7350 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7351
7352 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7353 }
7354
7355 real_from_target (&r, tmp, mode);
7356 return build_real (type, r);
7357 }
7358
7359
7360 /* Subroutine of native_interpret_expr. Interpret the contents of
7361 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7362 If the buffer cannot be interpreted, return NULL_TREE. */
7363
7364 static tree
7365 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7366 {
7367 tree etype, rpart, ipart;
7368 int size;
7369
7370 etype = TREE_TYPE (type);
7371 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7372 if (size * 2 > len)
7373 return NULL_TREE;
7374 rpart = native_interpret_expr (etype, ptr, size);
7375 if (!rpart)
7376 return NULL_TREE;
7377 ipart = native_interpret_expr (etype, ptr+size, size);
7378 if (!ipart)
7379 return NULL_TREE;
7380 return build_complex (type, rpart, ipart);
7381 }
7382
7383
7384 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */
7387
7388 static tree
7389 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7390 {
7391 tree etype, elem;
7392 int i, size, count;
7393
7394 etype = TREE_TYPE (type);
7395 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7396 count = TYPE_VECTOR_SUBPARTS (type);
7397 if (size * count > len)
7398 return NULL_TREE;
7399
7400 auto_vec<tree, 32> elements (count);
7401 for (i = 0; i < count; ++i)
7402 {
7403 elem = native_interpret_expr (etype, ptr+(i*size), size);
7404 if (!elem)
7405 return NULL_TREE;
7406 elements.quick_push (elem);
7407 }
7408 return build_vector (type, elements);
7409 }
7410
7411
7412 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7413 the buffer PTR of length LEN as a constant of type TYPE. For
7414 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7415 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7416 return NULL_TREE. */
7417
7418 tree
7419 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7420 {
7421 switch (TREE_CODE (type))
7422 {
7423 case INTEGER_TYPE:
7424 case ENUMERAL_TYPE:
7425 case BOOLEAN_TYPE:
7426 case POINTER_TYPE:
7427 case REFERENCE_TYPE:
7428 return native_interpret_int (type, ptr, len);
7429
7430 case REAL_TYPE:
7431 return native_interpret_real (type, ptr, len);
7432
7433 case FIXED_POINT_TYPE:
7434 return native_interpret_fixed (type, ptr, len);
7435
7436 case COMPLEX_TYPE:
7437 return native_interpret_complex (type, ptr, len);
7438
7439 case VECTOR_TYPE:
7440 return native_interpret_vector (type, ptr, len);
7441
7442 default:
7443 return NULL_TREE;
7444 }
7445 }
7446
7447 /* Returns true if we can interpret the contents of a native encoding
7448 as TYPE. */
7449
7450 static bool
7451 can_native_interpret_type_p (tree type)
7452 {
7453 switch (TREE_CODE (type))
7454 {
7455 case INTEGER_TYPE:
7456 case ENUMERAL_TYPE:
7457 case BOOLEAN_TYPE:
7458 case POINTER_TYPE:
7459 case REFERENCE_TYPE:
7460 case FIXED_POINT_TYPE:
7461 case REAL_TYPE:
7462 case COMPLEX_TYPE:
7463 case VECTOR_TYPE:
7464 return true;
7465 default:
7466 return false;
7467 }
7468 }
7469
7470
7471 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7472 TYPE at compile-time. If we're unable to perform the conversion
7473 return NULL_TREE. */
7474
7475 static tree
7476 fold_view_convert_expr (tree type, tree expr)
7477 {
7478 /* We support up to 512-bit values (for V8DFmode). */
7479 unsigned char buffer[64];
7480 int len;
7481
7482 /* Check that the host and target are sane. */
7483 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7484 return NULL_TREE;
7485
7486 len = native_encode_expr (expr, buffer, sizeof (buffer));
7487 if (len == 0)
7488 return NULL_TREE;
7489
7490 return native_interpret_expr (type, buffer, len);
7491 }
7492
7493 /* Build an expression for the address of T. Folds away INDIRECT_REF
7494 to avoid confusing the gimplify process. */
7495
7496 tree
7497 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7498 {
7499 /* The size of the object is not relevant when talking about its address. */
7500 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7501 t = TREE_OPERAND (t, 0);
7502
7503 if (TREE_CODE (t) == INDIRECT_REF)
7504 {
7505 t = TREE_OPERAND (t, 0);
7506
7507 if (TREE_TYPE (t) != ptrtype)
7508 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7509 }
7510 else if (TREE_CODE (t) == MEM_REF
7511 && integer_zerop (TREE_OPERAND (t, 1)))
7512 return TREE_OPERAND (t, 0);
7513 else if (TREE_CODE (t) == MEM_REF
7514 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7515 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7516 TREE_OPERAND (t, 0),
7517 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7518 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7519 {
7520 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7521
7522 if (TREE_TYPE (t) != ptrtype)
7523 t = fold_convert_loc (loc, ptrtype, t);
7524 }
7525 else
7526 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7527
7528 return t;
7529 }
7530
7531 /* Build an expression for the address of T. */
7532
7533 tree
7534 build_fold_addr_expr_loc (location_t loc, tree t)
7535 {
7536 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7537
7538 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7539 }
7540
7541 /* Fold a unary expression of code CODE and type TYPE with operand
7542 OP0. Return the folded expression if folding is successful.
7543 Otherwise, return NULL_TREE. */
7544
7545 tree
7546 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7547 {
7548 tree tem;
7549 tree arg0;
7550 enum tree_code_class kind = TREE_CODE_CLASS (code);
7551
7552 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7553 && TREE_CODE_LENGTH (code) == 1);
7554
7555 arg0 = op0;
7556 if (arg0)
7557 {
7558 if (CONVERT_EXPR_CODE_P (code)
7559 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7560 {
7561 /* Don't use STRIP_NOPS, because signedness of argument type
7562 matters. */
7563 STRIP_SIGN_NOPS (arg0);
7564 }
7565 else
7566 {
7567 /* Strip any conversions that don't change the mode. This
7568 is safe for every expression, except for a comparison
7569 expression because its signedness is derived from its
7570 operands.
7571
7572 Note that this is done as an internal manipulation within
7573 the constant folder, in order to find the simplest
7574 representation of the arguments so that their form can be
7575 studied. In any cases, the appropriate type conversions
7576 should be put back in the tree that will get out of the
7577 constant folder. */
7578 STRIP_NOPS (arg0);
7579 }
7580
7581 if (CONSTANT_CLASS_P (arg0))
7582 {
7583 tree tem = const_unop (code, type, arg0);
7584 if (tem)
7585 {
7586 if (TREE_TYPE (tem) != type)
7587 tem = fold_convert_loc (loc, type, tem);
7588 return tem;
7589 }
7590 }
7591 }
7592
7593 tem = generic_simplify (loc, code, type, op0);
7594 if (tem)
7595 return tem;
7596
7597 if (TREE_CODE_CLASS (code) == tcc_unary)
7598 {
7599 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7600 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7601 fold_build1_loc (loc, code, type,
7602 fold_convert_loc (loc, TREE_TYPE (op0),
7603 TREE_OPERAND (arg0, 1))));
7604 else if (TREE_CODE (arg0) == COND_EXPR)
7605 {
7606 tree arg01 = TREE_OPERAND (arg0, 1);
7607 tree arg02 = TREE_OPERAND (arg0, 2);
7608 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7609 arg01 = fold_build1_loc (loc, code, type,
7610 fold_convert_loc (loc,
7611 TREE_TYPE (op0), arg01));
7612 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7613 arg02 = fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc,
7615 TREE_TYPE (op0), arg02));
7616 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7617 arg01, arg02);
7618
7619 /* If this was a conversion, and all we did was to move into
7620 inside the COND_EXPR, bring it back out. But leave it if
7621 it is a conversion from integer to integer and the
7622 result precision is no wider than a word since such a
7623 conversion is cheap and may be optimized away by combine,
7624 while it couldn't if it were outside the COND_EXPR. Then return
7625 so we don't get into an infinite recursion loop taking the
7626 conversion out and then back in. */
7627
7628 if ((CONVERT_EXPR_CODE_P (code)
7629 || code == NON_LVALUE_EXPR)
7630 && TREE_CODE (tem) == COND_EXPR
7631 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7632 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7633 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7634 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7635 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7636 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7637 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7638 && (INTEGRAL_TYPE_P
7639 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7640 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7641 || flag_syntax_only))
7642 tem = build1_loc (loc, code, type,
7643 build3 (COND_EXPR,
7644 TREE_TYPE (TREE_OPERAND
7645 (TREE_OPERAND (tem, 1), 0)),
7646 TREE_OPERAND (tem, 0),
7647 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7648 TREE_OPERAND (TREE_OPERAND (tem, 2),
7649 0)));
7650 return tem;
7651 }
7652 }
7653
7654 switch (code)
7655 {
7656 case NON_LVALUE_EXPR:
7657 if (!maybe_lvalue_p (op0))
7658 return fold_convert_loc (loc, type, op0);
7659 return NULL_TREE;
7660
7661 CASE_CONVERT:
7662 case FLOAT_EXPR:
7663 case FIX_TRUNC_EXPR:
7664 if (COMPARISON_CLASS_P (op0))
7665 {
7666 /* If we have (type) (a CMP b) and type is an integral type, return
7667 new expression involving the new type. Canonicalize
7668 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7669 non-integral type.
7670 Do not fold the result as that would not simplify further, also
7671 folding again results in recursions. */
7672 if (TREE_CODE (type) == BOOLEAN_TYPE)
7673 return build2_loc (loc, TREE_CODE (op0), type,
7674 TREE_OPERAND (op0, 0),
7675 TREE_OPERAND (op0, 1));
7676 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7677 && TREE_CODE (type) != VECTOR_TYPE)
7678 return build3_loc (loc, COND_EXPR, type, op0,
7679 constant_boolean_node (true, type),
7680 constant_boolean_node (false, type));
7681 }
7682
7683 /* Handle (T *)&A.B.C for A being of type T and B and C
7684 living at offset zero. This occurs frequently in
7685 C++ upcasting and then accessing the base. */
7686 if (TREE_CODE (op0) == ADDR_EXPR
7687 && POINTER_TYPE_P (type)
7688 && handled_component_p (TREE_OPERAND (op0, 0)))
7689 {
7690 HOST_WIDE_INT bitsize, bitpos;
7691 tree offset;
7692 machine_mode mode;
7693 int unsignedp, reversep, volatilep;
7694 tree base
7695 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7696 &offset, &mode, &unsignedp, &reversep,
7697 &volatilep);
7698 /* If the reference was to a (constant) zero offset, we can use
7699 the address of the base if it has the same base type
7700 as the result type and the pointer type is unqualified. */
7701 if (! offset && bitpos == 0
7702 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7703 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7704 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7705 return fold_convert_loc (loc, type,
7706 build_fold_addr_expr_loc (loc, base));
7707 }
7708
7709 if (TREE_CODE (op0) == MODIFY_EXPR
7710 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7711 /* Detect assigning a bitfield. */
7712 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7713 && DECL_BIT_FIELD
7714 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7715 {
7716 /* Don't leave an assignment inside a conversion
7717 unless assigning a bitfield. */
7718 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7719 /* First do the assignment, then return converted constant. */
7720 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7721 TREE_NO_WARNING (tem) = 1;
7722 TREE_USED (tem) = 1;
7723 return tem;
7724 }
7725
7726 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7727 constants (if x has signed type, the sign bit cannot be set
7728 in c). This folds extension into the BIT_AND_EXPR.
7729 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7730 very likely don't have maximal range for their precision and this
7731 transformation effectively doesn't preserve non-maximal ranges. */
7732 if (TREE_CODE (type) == INTEGER_TYPE
7733 && TREE_CODE (op0) == BIT_AND_EXPR
7734 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7735 {
7736 tree and_expr = op0;
7737 tree and0 = TREE_OPERAND (and_expr, 0);
7738 tree and1 = TREE_OPERAND (and_expr, 1);
7739 int change = 0;
7740
7741 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7742 || (TYPE_PRECISION (type)
7743 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7744 change = 1;
7745 else if (TYPE_PRECISION (TREE_TYPE (and1))
7746 <= HOST_BITS_PER_WIDE_INT
7747 && tree_fits_uhwi_p (and1))
7748 {
7749 unsigned HOST_WIDE_INT cst;
7750
7751 cst = tree_to_uhwi (and1);
7752 cst &= HOST_WIDE_INT_M1U
7753 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7754 change = (cst == 0);
7755 if (change
7756 && !flag_syntax_only
7757 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7758 == ZERO_EXTEND))
7759 {
7760 tree uns = unsigned_type_for (TREE_TYPE (and0));
7761 and0 = fold_convert_loc (loc, uns, and0);
7762 and1 = fold_convert_loc (loc, uns, and1);
7763 }
7764 }
7765 if (change)
7766 {
7767 tem = force_fit_type (type, wi::to_widest (and1), 0,
7768 TREE_OVERFLOW (and1));
7769 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7770 fold_convert_loc (loc, type, and0), tem);
7771 }
7772 }
7773
7774 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7775 cast (T1)X will fold away. We assume that this happens when X itself
7776 is a cast. */
7777 if (POINTER_TYPE_P (type)
7778 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7779 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7780 {
7781 tree arg00 = TREE_OPERAND (arg0, 0);
7782 tree arg01 = TREE_OPERAND (arg0, 1);
7783
7784 return fold_build_pointer_plus_loc
7785 (loc, fold_convert_loc (loc, type, arg00), arg01);
7786 }
7787
7788 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7789 of the same precision, and X is an integer type not narrower than
7790 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7791 if (INTEGRAL_TYPE_P (type)
7792 && TREE_CODE (op0) == BIT_NOT_EXPR
7793 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7794 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7795 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7796 {
7797 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7798 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7799 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7800 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7801 fold_convert_loc (loc, type, tem));
7802 }
7803
7804 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7805 type of X and Y (integer types only). */
7806 if (INTEGRAL_TYPE_P (type)
7807 && TREE_CODE (op0) == MULT_EXPR
7808 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7809 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7810 {
7811 /* Be careful not to introduce new overflows. */
7812 tree mult_type;
7813 if (TYPE_OVERFLOW_WRAPS (type))
7814 mult_type = type;
7815 else
7816 mult_type = unsigned_type_for (type);
7817
7818 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7819 {
7820 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7821 fold_convert_loc (loc, mult_type,
7822 TREE_OPERAND (op0, 0)),
7823 fold_convert_loc (loc, mult_type,
7824 TREE_OPERAND (op0, 1)));
7825 return fold_convert_loc (loc, type, tem);
7826 }
7827 }
7828
7829 return NULL_TREE;
7830
7831 case VIEW_CONVERT_EXPR:
7832 if (TREE_CODE (op0) == MEM_REF)
7833 {
7834 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7835 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7836 tem = fold_build2_loc (loc, MEM_REF, type,
7837 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7838 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7839 return tem;
7840 }
7841
7842 return NULL_TREE;
7843
7844 case NEGATE_EXPR:
7845 tem = fold_negate_expr (loc, arg0);
7846 if (tem)
7847 return fold_convert_loc (loc, type, tem);
7848 return NULL_TREE;
7849
7850 case ABS_EXPR:
7851 /* Convert fabs((double)float) into (double)fabsf(float). */
7852 if (TREE_CODE (arg0) == NOP_EXPR
7853 && TREE_CODE (type) == REAL_TYPE)
7854 {
7855 tree targ0 = strip_float_extensions (arg0);
7856 if (targ0 != arg0)
7857 return fold_convert_loc (loc, type,
7858 fold_build1_loc (loc, ABS_EXPR,
7859 TREE_TYPE (targ0),
7860 targ0));
7861 }
7862 return NULL_TREE;
7863
7864 case BIT_NOT_EXPR:
7865 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7866 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7867 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7868 fold_convert_loc (loc, type,
7869 TREE_OPERAND (arg0, 0)))))
7870 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7871 fold_convert_loc (loc, type,
7872 TREE_OPERAND (arg0, 1)));
7873 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7874 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7875 fold_convert_loc (loc, type,
7876 TREE_OPERAND (arg0, 1)))))
7877 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7878 fold_convert_loc (loc, type,
7879 TREE_OPERAND (arg0, 0)), tem);
7880
7881 return NULL_TREE;
7882
7883 case TRUTH_NOT_EXPR:
7884 /* Note that the operand of this must be an int
7885 and its values must be 0 or 1.
7886 ("true" is a fixed value perhaps depending on the language,
7887 but we don't handle values other than 1 correctly yet.) */
7888 tem = fold_truth_not_expr (loc, arg0);
7889 if (!tem)
7890 return NULL_TREE;
7891 return fold_convert_loc (loc, type, tem);
7892
7893 case INDIRECT_REF:
7894 /* Fold *&X to X if X is an lvalue. */
7895 if (TREE_CODE (op0) == ADDR_EXPR)
7896 {
7897 tree op00 = TREE_OPERAND (op0, 0);
7898 if ((VAR_P (op00)
7899 || TREE_CODE (op00) == PARM_DECL
7900 || TREE_CODE (op00) == RESULT_DECL)
7901 && !TREE_READONLY (op00))
7902 return op00;
7903 }
7904 return NULL_TREE;
7905
7906 default:
7907 return NULL_TREE;
7908 } /* switch (code) */
7909 }
7910
7911
7912 /* If the operation was a conversion do _not_ mark a resulting constant
7913 with TREE_OVERFLOW if the original constant was not. These conversions
7914 have implementation defined behavior and retaining the TREE_OVERFLOW
7915 flag here would confuse later passes such as VRP. */
7916 tree
7917 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7918 tree type, tree op0)
7919 {
7920 tree res = fold_unary_loc (loc, code, type, op0);
7921 if (res
7922 && TREE_CODE (res) == INTEGER_CST
7923 && TREE_CODE (op0) == INTEGER_CST
7924 && CONVERT_EXPR_CODE_P (code))
7925 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7926
7927 return res;
7928 }
7929
7930 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7931 operands OP0 and OP1. LOC is the location of the resulting expression.
7932 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7933 Return the folded expression if folding is successful. Otherwise,
7934 return NULL_TREE. */
7935 static tree
7936 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7937 tree arg0, tree arg1, tree op0, tree op1)
7938 {
7939 tree tem;
7940
7941 /* We only do these simplifications if we are optimizing. */
7942 if (!optimize)
7943 return NULL_TREE;
7944
7945 /* Check for things like (A || B) && (A || C). We can convert this
7946 to A || (B && C). Note that either operator can be any of the four
7947 truth and/or operations and the transformation will still be
7948 valid. Also note that we only care about order for the
7949 ANDIF and ORIF operators. If B contains side effects, this
7950 might change the truth-value of A. */
7951 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7952 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7953 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7954 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7955 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7956 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7957 {
7958 tree a00 = TREE_OPERAND (arg0, 0);
7959 tree a01 = TREE_OPERAND (arg0, 1);
7960 tree a10 = TREE_OPERAND (arg1, 0);
7961 tree a11 = TREE_OPERAND (arg1, 1);
7962 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7963 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7964 && (code == TRUTH_AND_EXPR
7965 || code == TRUTH_OR_EXPR));
7966
7967 if (operand_equal_p (a00, a10, 0))
7968 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7969 fold_build2_loc (loc, code, type, a01, a11));
7970 else if (commutative && operand_equal_p (a00, a11, 0))
7971 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7972 fold_build2_loc (loc, code, type, a01, a10));
7973 else if (commutative && operand_equal_p (a01, a10, 0))
7974 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7975 fold_build2_loc (loc, code, type, a00, a11));
7976
7977 /* This case if tricky because we must either have commutative
7978 operators or else A10 must not have side-effects. */
7979
7980 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7981 && operand_equal_p (a01, a11, 0))
7982 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7983 fold_build2_loc (loc, code, type, a00, a10),
7984 a01);
7985 }
7986
7987 /* See if we can build a range comparison. */
7988 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7989 return tem;
7990
7991 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7992 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7993 {
7994 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7995 if (tem)
7996 return fold_build2_loc (loc, code, type, tem, arg1);
7997 }
7998
7999 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8000 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8001 {
8002 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8003 if (tem)
8004 return fold_build2_loc (loc, code, type, arg0, tem);
8005 }
8006
8007 /* Check for the possibility of merging component references. If our
8008 lhs is another similar operation, try to merge its rhs with our
8009 rhs. Then try to merge our lhs and rhs. */
8010 if (TREE_CODE (arg0) == code
8011 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8012 TREE_OPERAND (arg0, 1), arg1)))
8013 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8014
8015 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8016 return tem;
8017
8018 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8019 && !flag_sanitize_coverage
8020 && (code == TRUTH_AND_EXPR
8021 || code == TRUTH_ANDIF_EXPR
8022 || code == TRUTH_OR_EXPR
8023 || code == TRUTH_ORIF_EXPR))
8024 {
8025 enum tree_code ncode, icode;
8026
8027 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8028 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8029 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8030
8031 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8032 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8033 We don't want to pack more than two leafs to a non-IF AND/OR
8034 expression.
8035 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8036 equal to IF-CODE, then we don't want to add right-hand operand.
8037 If the inner right-hand side of left-hand operand has
8038 side-effects, or isn't simple, then we can't add to it,
8039 as otherwise we might destroy if-sequence. */
8040 if (TREE_CODE (arg0) == icode
8041 && simple_operand_p_2 (arg1)
8042 /* Needed for sequence points to handle trappings, and
8043 side-effects. */
8044 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8045 {
8046 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8047 arg1);
8048 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8049 tem);
8050 }
8051 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8052 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8053 else if (TREE_CODE (arg1) == icode
8054 && simple_operand_p_2 (arg0)
8055 /* Needed for sequence points to handle trappings, and
8056 side-effects. */
8057 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8058 {
8059 tem = fold_build2_loc (loc, ncode, type,
8060 arg0, TREE_OPERAND (arg1, 0));
8061 return fold_build2_loc (loc, icode, type, tem,
8062 TREE_OPERAND (arg1, 1));
8063 }
8064 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8065 into (A OR B).
8066 For sequence point consistancy, we need to check for trapping,
8067 and side-effects. */
8068 else if (code == icode && simple_operand_p_2 (arg0)
8069 && simple_operand_p_2 (arg1))
8070 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8071 }
8072
8073 return NULL_TREE;
8074 }
8075
8076 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8077 by changing CODE to reduce the magnitude of constants involved in
8078 ARG0 of the comparison.
8079 Returns a canonicalized comparison tree if a simplification was
8080 possible, otherwise returns NULL_TREE.
8081 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8082 valid if signed overflow is undefined. */
8083
8084 static tree
8085 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8086 tree arg0, tree arg1,
8087 bool *strict_overflow_p)
8088 {
8089 enum tree_code code0 = TREE_CODE (arg0);
8090 tree t, cst0 = NULL_TREE;
8091 int sgn0;
8092
8093 /* Match A +- CST code arg1. We can change this only if overflow
8094 is undefined. */
8095 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8096 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8097 /* In principle pointers also have undefined overflow behavior,
8098 but that causes problems elsewhere. */
8099 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8100 && (code0 == MINUS_EXPR
8101 || code0 == PLUS_EXPR)
8102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8103 return NULL_TREE;
8104
8105 /* Identify the constant in arg0 and its sign. */
8106 cst0 = TREE_OPERAND (arg0, 1);
8107 sgn0 = tree_int_cst_sgn (cst0);
8108
8109 /* Overflowed constants and zero will cause problems. */
8110 if (integer_zerop (cst0)
8111 || TREE_OVERFLOW (cst0))
8112 return NULL_TREE;
8113
8114 /* See if we can reduce the magnitude of the constant in
8115 arg0 by changing the comparison code. */
8116 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8117 if (code == LT_EXPR
8118 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8119 code = LE_EXPR;
8120 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8121 else if (code == GT_EXPR
8122 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8123 code = GE_EXPR;
8124 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8125 else if (code == LE_EXPR
8126 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8127 code = LT_EXPR;
8128 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8129 else if (code == GE_EXPR
8130 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8131 code = GT_EXPR;
8132 else
8133 return NULL_TREE;
8134 *strict_overflow_p = true;
8135
8136 /* Now build the constant reduced in magnitude. But not if that
8137 would produce one outside of its types range. */
8138 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8139 && ((sgn0 == 1
8140 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8141 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8142 || (sgn0 == -1
8143 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8144 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8145 return NULL_TREE;
8146
8147 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8148 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8149 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8150 t = fold_convert (TREE_TYPE (arg1), t);
8151
8152 return fold_build2_loc (loc, code, type, t, arg1);
8153 }
8154
8155 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8156 overflow further. Try to decrease the magnitude of constants involved
8157 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8158 and put sole constants at the second argument position.
8159 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8160
8161 static tree
8162 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8163 tree arg0, tree arg1)
8164 {
8165 tree t;
8166 bool strict_overflow_p;
8167 const char * const warnmsg = G_("assuming signed overflow does not occur "
8168 "when reducing constant in comparison");
8169
8170 /* Try canonicalization by simplifying arg0. */
8171 strict_overflow_p = false;
8172 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8173 &strict_overflow_p);
8174 if (t)
8175 {
8176 if (strict_overflow_p)
8177 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8178 return t;
8179 }
8180
8181 /* Try canonicalization by simplifying arg1 using the swapped
8182 comparison. */
8183 code = swap_tree_comparison (code);
8184 strict_overflow_p = false;
8185 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8186 &strict_overflow_p);
8187 if (t && strict_overflow_p)
8188 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8189 return t;
8190 }
8191
8192 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8193 space. This is used to avoid issuing overflow warnings for
8194 expressions like &p->x which can not wrap. */
8195
8196 static bool
8197 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8198 {
8199 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8200 return true;
8201
8202 if (bitpos < 0)
8203 return true;
8204
8205 wide_int wi_offset;
8206 int precision = TYPE_PRECISION (TREE_TYPE (base));
8207 if (offset == NULL_TREE)
8208 wi_offset = wi::zero (precision);
8209 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8210 return true;
8211 else
8212 wi_offset = wi::to_wide (offset);
8213
8214 bool overflow;
8215 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8216 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8217 if (overflow)
8218 return true;
8219
8220 if (!wi::fits_uhwi_p (total))
8221 return true;
8222
8223 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8224 if (size <= 0)
8225 return true;
8226
8227 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8228 array. */
8229 if (TREE_CODE (base) == ADDR_EXPR)
8230 {
8231 HOST_WIDE_INT base_size;
8232
8233 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8234 if (base_size > 0 && size < base_size)
8235 size = base_size;
8236 }
8237
8238 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8239 }
8240
8241 /* Return a positive integer when the symbol DECL is known to have
8242 a nonzero address, zero when it's known not to (e.g., it's a weak
8243 symbol), and a negative integer when the symbol is not yet in the
8244 symbol table and so whether or not its address is zero is unknown.
8245 For function local objects always return positive integer. */
8246 static int
8247 maybe_nonzero_address (tree decl)
8248 {
8249 if (DECL_P (decl) && decl_in_symtab_p (decl))
8250 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8251 return symbol->nonzero_address ();
8252
8253 /* Function local objects are never NULL. */
8254 if (DECL_P (decl)
8255 && (DECL_CONTEXT (decl)
8256 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8257 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8258 return 1;
8259
8260 return -1;
8261 }
8262
8263 /* Subroutine of fold_binary. This routine performs all of the
8264 transformations that are common to the equality/inequality
8265 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8266 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8267 fold_binary should call fold_binary. Fold a comparison with
8268 tree code CODE and type TYPE with operands OP0 and OP1. Return
8269 the folded comparison or NULL_TREE. */
8270
8271 static tree
8272 fold_comparison (location_t loc, enum tree_code code, tree type,
8273 tree op0, tree op1)
8274 {
8275 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8276 tree arg0, arg1, tem;
8277
8278 arg0 = op0;
8279 arg1 = op1;
8280
8281 STRIP_SIGN_NOPS (arg0);
8282 STRIP_SIGN_NOPS (arg1);
8283
8284 /* For comparisons of pointers we can decompose it to a compile time
8285 comparison of the base objects and the offsets into the object.
8286 This requires at least one operand being an ADDR_EXPR or a
8287 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8288 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8289 && (TREE_CODE (arg0) == ADDR_EXPR
8290 || TREE_CODE (arg1) == ADDR_EXPR
8291 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8292 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8293 {
8294 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8295 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8296 machine_mode mode;
8297 int volatilep, reversep, unsignedp;
8298 bool indirect_base0 = false, indirect_base1 = false;
8299
8300 /* Get base and offset for the access. Strip ADDR_EXPR for
8301 get_inner_reference, but put it back by stripping INDIRECT_REF
8302 off the base object if possible. indirect_baseN will be true
8303 if baseN is not an address but refers to the object itself. */
8304 base0 = arg0;
8305 if (TREE_CODE (arg0) == ADDR_EXPR)
8306 {
8307 base0
8308 = get_inner_reference (TREE_OPERAND (arg0, 0),
8309 &bitsize, &bitpos0, &offset0, &mode,
8310 &unsignedp, &reversep, &volatilep);
8311 if (TREE_CODE (base0) == INDIRECT_REF)
8312 base0 = TREE_OPERAND (base0, 0);
8313 else
8314 indirect_base0 = true;
8315 }
8316 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8317 {
8318 base0 = TREE_OPERAND (arg0, 0);
8319 STRIP_SIGN_NOPS (base0);
8320 if (TREE_CODE (base0) == ADDR_EXPR)
8321 {
8322 base0
8323 = get_inner_reference (TREE_OPERAND (base0, 0),
8324 &bitsize, &bitpos0, &offset0, &mode,
8325 &unsignedp, &reversep, &volatilep);
8326 if (TREE_CODE (base0) == INDIRECT_REF)
8327 base0 = TREE_OPERAND (base0, 0);
8328 else
8329 indirect_base0 = true;
8330 }
8331 if (offset0 == NULL_TREE || integer_zerop (offset0))
8332 offset0 = TREE_OPERAND (arg0, 1);
8333 else
8334 offset0 = size_binop (PLUS_EXPR, offset0,
8335 TREE_OPERAND (arg0, 1));
8336 if (TREE_CODE (offset0) == INTEGER_CST)
8337 {
8338 offset_int tem = wi::sext (wi::to_offset (offset0),
8339 TYPE_PRECISION (sizetype));
8340 tem <<= LOG2_BITS_PER_UNIT;
8341 tem += bitpos0;
8342 if (wi::fits_shwi_p (tem))
8343 {
8344 bitpos0 = tem.to_shwi ();
8345 offset0 = NULL_TREE;
8346 }
8347 }
8348 }
8349
8350 base1 = arg1;
8351 if (TREE_CODE (arg1) == ADDR_EXPR)
8352 {
8353 base1
8354 = get_inner_reference (TREE_OPERAND (arg1, 0),
8355 &bitsize, &bitpos1, &offset1, &mode,
8356 &unsignedp, &reversep, &volatilep);
8357 if (TREE_CODE (base1) == INDIRECT_REF)
8358 base1 = TREE_OPERAND (base1, 0);
8359 else
8360 indirect_base1 = true;
8361 }
8362 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8363 {
8364 base1 = TREE_OPERAND (arg1, 0);
8365 STRIP_SIGN_NOPS (base1);
8366 if (TREE_CODE (base1) == ADDR_EXPR)
8367 {
8368 base1
8369 = get_inner_reference (TREE_OPERAND (base1, 0),
8370 &bitsize, &bitpos1, &offset1, &mode,
8371 &unsignedp, &reversep, &volatilep);
8372 if (TREE_CODE (base1) == INDIRECT_REF)
8373 base1 = TREE_OPERAND (base1, 0);
8374 else
8375 indirect_base1 = true;
8376 }
8377 if (offset1 == NULL_TREE || integer_zerop (offset1))
8378 offset1 = TREE_OPERAND (arg1, 1);
8379 else
8380 offset1 = size_binop (PLUS_EXPR, offset1,
8381 TREE_OPERAND (arg1, 1));
8382 if (TREE_CODE (offset1) == INTEGER_CST)
8383 {
8384 offset_int tem = wi::sext (wi::to_offset (offset1),
8385 TYPE_PRECISION (sizetype));
8386 tem <<= LOG2_BITS_PER_UNIT;
8387 tem += bitpos1;
8388 if (wi::fits_shwi_p (tem))
8389 {
8390 bitpos1 = tem.to_shwi ();
8391 offset1 = NULL_TREE;
8392 }
8393 }
8394 }
8395
8396 /* If we have equivalent bases we might be able to simplify. */
8397 if (indirect_base0 == indirect_base1
8398 && operand_equal_p (base0, base1,
8399 indirect_base0 ? OEP_ADDRESS_OF : 0))
8400 {
8401 /* We can fold this expression to a constant if the non-constant
8402 offset parts are equal. */
8403 if (offset0 == offset1
8404 || (offset0 && offset1
8405 && operand_equal_p (offset0, offset1, 0)))
8406 {
8407 if (!equality_code
8408 && bitpos0 != bitpos1
8409 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8410 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8411 fold_overflow_warning (("assuming pointer wraparound does not "
8412 "occur when comparing P +- C1 with "
8413 "P +- C2"),
8414 WARN_STRICT_OVERFLOW_CONDITIONAL);
8415
8416 switch (code)
8417 {
8418 case EQ_EXPR:
8419 return constant_boolean_node (bitpos0 == bitpos1, type);
8420 case NE_EXPR:
8421 return constant_boolean_node (bitpos0 != bitpos1, type);
8422 case LT_EXPR:
8423 return constant_boolean_node (bitpos0 < bitpos1, type);
8424 case LE_EXPR:
8425 return constant_boolean_node (bitpos0 <= bitpos1, type);
8426 case GE_EXPR:
8427 return constant_boolean_node (bitpos0 >= bitpos1, type);
8428 case GT_EXPR:
8429 return constant_boolean_node (bitpos0 > bitpos1, type);
8430 default:;
8431 }
8432 }
8433 /* We can simplify the comparison to a comparison of the variable
8434 offset parts if the constant offset parts are equal.
8435 Be careful to use signed sizetype here because otherwise we
8436 mess with array offsets in the wrong way. This is possible
8437 because pointer arithmetic is restricted to retain within an
8438 object and overflow on pointer differences is undefined as of
8439 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8440 else if (bitpos0 == bitpos1)
8441 {
8442 /* By converting to signed sizetype we cover middle-end pointer
8443 arithmetic which operates on unsigned pointer types of size
8444 type size and ARRAY_REF offsets which are properly sign or
8445 zero extended from their type in case it is narrower than
8446 sizetype. */
8447 if (offset0 == NULL_TREE)
8448 offset0 = build_int_cst (ssizetype, 0);
8449 else
8450 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8451 if (offset1 == NULL_TREE)
8452 offset1 = build_int_cst (ssizetype, 0);
8453 else
8454 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8455
8456 if (!equality_code
8457 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8458 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8459 fold_overflow_warning (("assuming pointer wraparound does not "
8460 "occur when comparing P +- C1 with "
8461 "P +- C2"),
8462 WARN_STRICT_OVERFLOW_COMPARISON);
8463
8464 return fold_build2_loc (loc, code, type, offset0, offset1);
8465 }
8466 }
8467 /* For equal offsets we can simplify to a comparison of the
8468 base addresses. */
8469 else if (bitpos0 == bitpos1
8470 && (indirect_base0
8471 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8472 && (indirect_base1
8473 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8474 && ((offset0 == offset1)
8475 || (offset0 && offset1
8476 && operand_equal_p (offset0, offset1, 0))))
8477 {
8478 if (indirect_base0)
8479 base0 = build_fold_addr_expr_loc (loc, base0);
8480 if (indirect_base1)
8481 base1 = build_fold_addr_expr_loc (loc, base1);
8482 return fold_build2_loc (loc, code, type, base0, base1);
8483 }
8484 /* Comparison between an ordinary (non-weak) symbol and a null
8485 pointer can be eliminated since such symbols must have a non
8486 null address. In C, relational expressions between pointers
8487 to objects and null pointers are undefined. The results
8488 below follow the C++ rules with the additional property that
8489 every object pointer compares greater than a null pointer.
8490 */
8491 else if (((DECL_P (base0)
8492 && maybe_nonzero_address (base0) > 0
8493 /* Avoid folding references to struct members at offset 0 to
8494 prevent tests like '&ptr->firstmember == 0' from getting
8495 eliminated. When ptr is null, although the -> expression
8496 is strictly speaking invalid, GCC retains it as a matter
8497 of QoI. See PR c/44555. */
8498 && (offset0 == NULL_TREE && bitpos0 != 0))
8499 || CONSTANT_CLASS_P (base0))
8500 && indirect_base0
8501 /* The caller guarantees that when one of the arguments is
8502 constant (i.e., null in this case) it is second. */
8503 && integer_zerop (arg1))
8504 {
8505 switch (code)
8506 {
8507 case EQ_EXPR:
8508 case LE_EXPR:
8509 case LT_EXPR:
8510 return constant_boolean_node (false, type);
8511 case GE_EXPR:
8512 case GT_EXPR:
8513 case NE_EXPR:
8514 return constant_boolean_node (true, type);
8515 default:
8516 gcc_unreachable ();
8517 }
8518 }
8519 }
8520
8521 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8522 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8523 the resulting offset is smaller in absolute value than the
8524 original one and has the same sign. */
8525 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8526 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8527 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8528 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8529 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8530 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8531 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8532 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8533 {
8534 tree const1 = TREE_OPERAND (arg0, 1);
8535 tree const2 = TREE_OPERAND (arg1, 1);
8536 tree variable1 = TREE_OPERAND (arg0, 0);
8537 tree variable2 = TREE_OPERAND (arg1, 0);
8538 tree cst;
8539 const char * const warnmsg = G_("assuming signed overflow does not "
8540 "occur when combining constants around "
8541 "a comparison");
8542
8543 /* Put the constant on the side where it doesn't overflow and is
8544 of lower absolute value and of same sign than before. */
8545 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8546 ? MINUS_EXPR : PLUS_EXPR,
8547 const2, const1);
8548 if (!TREE_OVERFLOW (cst)
8549 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8550 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8551 {
8552 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8553 return fold_build2_loc (loc, code, type,
8554 variable1,
8555 fold_build2_loc (loc, TREE_CODE (arg1),
8556 TREE_TYPE (arg1),
8557 variable2, cst));
8558 }
8559
8560 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8561 ? MINUS_EXPR : PLUS_EXPR,
8562 const1, const2);
8563 if (!TREE_OVERFLOW (cst)
8564 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8565 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8566 {
8567 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8568 return fold_build2_loc (loc, code, type,
8569 fold_build2_loc (loc, TREE_CODE (arg0),
8570 TREE_TYPE (arg0),
8571 variable1, cst),
8572 variable2);
8573 }
8574 }
8575
8576 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8577 if (tem)
8578 return tem;
8579
8580 /* If we are comparing an expression that just has comparisons
8581 of two integer values, arithmetic expressions of those comparisons,
8582 and constants, we can simplify it. There are only three cases
8583 to check: the two values can either be equal, the first can be
8584 greater, or the second can be greater. Fold the expression for
8585 those three values. Since each value must be 0 or 1, we have
8586 eight possibilities, each of which corresponds to the constant 0
8587 or 1 or one of the six possible comparisons.
8588
8589 This handles common cases like (a > b) == 0 but also handles
8590 expressions like ((x > y) - (y > x)) > 0, which supposedly
8591 occur in macroized code. */
8592
8593 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8594 {
8595 tree cval1 = 0, cval2 = 0;
8596 int save_p = 0;
8597
8598 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8599 /* Don't handle degenerate cases here; they should already
8600 have been handled anyway. */
8601 && cval1 != 0 && cval2 != 0
8602 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8603 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8604 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8605 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8606 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8607 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8608 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8609 {
8610 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8611 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8612
8613 /* We can't just pass T to eval_subst in case cval1 or cval2
8614 was the same as ARG1. */
8615
8616 tree high_result
8617 = fold_build2_loc (loc, code, type,
8618 eval_subst (loc, arg0, cval1, maxval,
8619 cval2, minval),
8620 arg1);
8621 tree equal_result
8622 = fold_build2_loc (loc, code, type,
8623 eval_subst (loc, arg0, cval1, maxval,
8624 cval2, maxval),
8625 arg1);
8626 tree low_result
8627 = fold_build2_loc (loc, code, type,
8628 eval_subst (loc, arg0, cval1, minval,
8629 cval2, maxval),
8630 arg1);
8631
8632 /* All three of these results should be 0 or 1. Confirm they are.
8633 Then use those values to select the proper code to use. */
8634
8635 if (TREE_CODE (high_result) == INTEGER_CST
8636 && TREE_CODE (equal_result) == INTEGER_CST
8637 && TREE_CODE (low_result) == INTEGER_CST)
8638 {
8639 /* Make a 3-bit mask with the high-order bit being the
8640 value for `>', the next for '=', and the low for '<'. */
8641 switch ((integer_onep (high_result) * 4)
8642 + (integer_onep (equal_result) * 2)
8643 + integer_onep (low_result))
8644 {
8645 case 0:
8646 /* Always false. */
8647 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8648 case 1:
8649 code = LT_EXPR;
8650 break;
8651 case 2:
8652 code = EQ_EXPR;
8653 break;
8654 case 3:
8655 code = LE_EXPR;
8656 break;
8657 case 4:
8658 code = GT_EXPR;
8659 break;
8660 case 5:
8661 code = NE_EXPR;
8662 break;
8663 case 6:
8664 code = GE_EXPR;
8665 break;
8666 case 7:
8667 /* Always true. */
8668 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8669 }
8670
8671 if (save_p)
8672 {
8673 tem = save_expr (build2 (code, type, cval1, cval2));
8674 protected_set_expr_location (tem, loc);
8675 return tem;
8676 }
8677 return fold_build2_loc (loc, code, type, cval1, cval2);
8678 }
8679 }
8680 }
8681
8682 return NULL_TREE;
8683 }
8684
8685
8686 /* Subroutine of fold_binary. Optimize complex multiplications of the
8687 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8688 argument EXPR represents the expression "z" of type TYPE. */
8689
8690 static tree
8691 fold_mult_zconjz (location_t loc, tree type, tree expr)
8692 {
8693 tree itype = TREE_TYPE (type);
8694 tree rpart, ipart, tem;
8695
8696 if (TREE_CODE (expr) == COMPLEX_EXPR)
8697 {
8698 rpart = TREE_OPERAND (expr, 0);
8699 ipart = TREE_OPERAND (expr, 1);
8700 }
8701 else if (TREE_CODE (expr) == COMPLEX_CST)
8702 {
8703 rpart = TREE_REALPART (expr);
8704 ipart = TREE_IMAGPART (expr);
8705 }
8706 else
8707 {
8708 expr = save_expr (expr);
8709 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8710 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8711 }
8712
8713 rpart = save_expr (rpart);
8714 ipart = save_expr (ipart);
8715 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8716 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8717 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8718 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8719 build_zero_cst (itype));
8720 }
8721
8722
8723 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8724 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8725 true if successful. */
8726
8727 static bool
8728 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8729 {
8730 unsigned int i;
8731
8732 if (TREE_CODE (arg) == VECTOR_CST)
8733 {
8734 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8735 elts[i] = VECTOR_CST_ELT (arg, i);
8736 }
8737 else if (TREE_CODE (arg) == CONSTRUCTOR)
8738 {
8739 constructor_elt *elt;
8740
8741 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8742 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8743 return false;
8744 else
8745 elts[i] = elt->value;
8746 }
8747 else
8748 return false;
8749 for (; i < nelts; i++)
8750 elts[i]
8751 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8752 return true;
8753 }
8754
8755 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8756 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8757 NULL_TREE otherwise. */
8758
8759 static tree
8760 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8761 {
8762 unsigned int i;
8763 bool need_ctor = false;
8764
8765 unsigned int nelts = sel.length ();
8766 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8767 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8768 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8769 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8770 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8771 return NULL_TREE;
8772
8773 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8774 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8775 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8776 return NULL_TREE;
8777
8778 auto_vec<tree, 32> out_elts (nelts);
8779 for (i = 0; i < nelts; i++)
8780 {
8781 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8782 need_ctor = true;
8783 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8784 }
8785
8786 if (need_ctor)
8787 {
8788 vec<constructor_elt, va_gc> *v;
8789 vec_alloc (v, nelts);
8790 for (i = 0; i < nelts; i++)
8791 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8792 return build_constructor (type, v);
8793 }
8794 else
8795 return build_vector (type, out_elts);
8796 }
8797
8798 /* Try to fold a pointer difference of type TYPE two address expressions of
8799 array references AREF0 and AREF1 using location LOC. Return a
8800 simplified expression for the difference or NULL_TREE. */
8801
8802 static tree
8803 fold_addr_of_array_ref_difference (location_t loc, tree type,
8804 tree aref0, tree aref1)
8805 {
8806 tree base0 = TREE_OPERAND (aref0, 0);
8807 tree base1 = TREE_OPERAND (aref1, 0);
8808 tree base_offset = build_int_cst (type, 0);
8809
8810 /* If the bases are array references as well, recurse. If the bases
8811 are pointer indirections compute the difference of the pointers.
8812 If the bases are equal, we are set. */
8813 if ((TREE_CODE (base0) == ARRAY_REF
8814 && TREE_CODE (base1) == ARRAY_REF
8815 && (base_offset
8816 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8817 || (INDIRECT_REF_P (base0)
8818 && INDIRECT_REF_P (base1)
8819 && (base_offset
8820 = fold_binary_loc (loc, MINUS_EXPR, type,
8821 fold_convert (type, TREE_OPERAND (base0, 0)),
8822 fold_convert (type,
8823 TREE_OPERAND (base1, 0)))))
8824 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8825 {
8826 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8827 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8828 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8829 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8830 return fold_build2_loc (loc, PLUS_EXPR, type,
8831 base_offset,
8832 fold_build2_loc (loc, MULT_EXPR, type,
8833 diff, esz));
8834 }
8835 return NULL_TREE;
8836 }
8837
8838 /* If the real or vector real constant CST of type TYPE has an exact
8839 inverse, return it, else return NULL. */
8840
8841 tree
8842 exact_inverse (tree type, tree cst)
8843 {
8844 REAL_VALUE_TYPE r;
8845 tree unit_type;
8846 machine_mode mode;
8847 unsigned vec_nelts, i;
8848
8849 switch (TREE_CODE (cst))
8850 {
8851 case REAL_CST:
8852 r = TREE_REAL_CST (cst);
8853
8854 if (exact_real_inverse (TYPE_MODE (type), &r))
8855 return build_real (type, r);
8856
8857 return NULL_TREE;
8858
8859 case VECTOR_CST:
8860 {
8861 vec_nelts = VECTOR_CST_NELTS (cst);
8862 unit_type = TREE_TYPE (type);
8863 mode = TYPE_MODE (unit_type);
8864
8865 auto_vec<tree, 32> elts (vec_nelts);
8866 for (i = 0; i < vec_nelts; i++)
8867 {
8868 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8869 if (!exact_real_inverse (mode, &r))
8870 return NULL_TREE;
8871 elts.quick_push (build_real (unit_type, r));
8872 }
8873
8874 return build_vector (type, elts);
8875 }
8876
8877 default:
8878 return NULL_TREE;
8879 }
8880 }
8881
8882 /* Mask out the tz least significant bits of X of type TYPE where
8883 tz is the number of trailing zeroes in Y. */
8884 static wide_int
8885 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8886 {
8887 int tz = wi::ctz (y);
8888 if (tz > 0)
8889 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8890 return x;
8891 }
8892
8893 /* Return true when T is an address and is known to be nonzero.
8894 For floating point we further ensure that T is not denormal.
8895 Similar logic is present in nonzero_address in rtlanal.h.
8896
8897 If the return value is based on the assumption that signed overflow
8898 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8899 change *STRICT_OVERFLOW_P. */
8900
8901 static bool
8902 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8903 {
8904 tree type = TREE_TYPE (t);
8905 enum tree_code code;
8906
8907 /* Doing something useful for floating point would need more work. */
8908 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8909 return false;
8910
8911 code = TREE_CODE (t);
8912 switch (TREE_CODE_CLASS (code))
8913 {
8914 case tcc_unary:
8915 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8916 strict_overflow_p);
8917 case tcc_binary:
8918 case tcc_comparison:
8919 return tree_binary_nonzero_warnv_p (code, type,
8920 TREE_OPERAND (t, 0),
8921 TREE_OPERAND (t, 1),
8922 strict_overflow_p);
8923 case tcc_constant:
8924 case tcc_declaration:
8925 case tcc_reference:
8926 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8927
8928 default:
8929 break;
8930 }
8931
8932 switch (code)
8933 {
8934 case TRUTH_NOT_EXPR:
8935 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8936 strict_overflow_p);
8937
8938 case TRUTH_AND_EXPR:
8939 case TRUTH_OR_EXPR:
8940 case TRUTH_XOR_EXPR:
8941 return tree_binary_nonzero_warnv_p (code, type,
8942 TREE_OPERAND (t, 0),
8943 TREE_OPERAND (t, 1),
8944 strict_overflow_p);
8945
8946 case COND_EXPR:
8947 case CONSTRUCTOR:
8948 case OBJ_TYPE_REF:
8949 case ASSERT_EXPR:
8950 case ADDR_EXPR:
8951 case WITH_SIZE_EXPR:
8952 case SSA_NAME:
8953 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8954
8955 case COMPOUND_EXPR:
8956 case MODIFY_EXPR:
8957 case BIND_EXPR:
8958 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8959 strict_overflow_p);
8960
8961 case SAVE_EXPR:
8962 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8963 strict_overflow_p);
8964
8965 case CALL_EXPR:
8966 {
8967 tree fndecl = get_callee_fndecl (t);
8968 if (!fndecl) return false;
8969 if (flag_delete_null_pointer_checks && !flag_check_new
8970 && DECL_IS_OPERATOR_NEW (fndecl)
8971 && !TREE_NOTHROW (fndecl))
8972 return true;
8973 if (flag_delete_null_pointer_checks
8974 && lookup_attribute ("returns_nonnull",
8975 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8976 return true;
8977 return alloca_call_p (t);
8978 }
8979
8980 default:
8981 break;
8982 }
8983 return false;
8984 }
8985
8986 /* Return true when T is an address and is known to be nonzero.
8987 Handle warnings about undefined signed overflow. */
8988
8989 bool
8990 tree_expr_nonzero_p (tree t)
8991 {
8992 bool ret, strict_overflow_p;
8993
8994 strict_overflow_p = false;
8995 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
8996 if (strict_overflow_p)
8997 fold_overflow_warning (("assuming signed overflow does not occur when "
8998 "determining that expression is always "
8999 "non-zero"),
9000 WARN_STRICT_OVERFLOW_MISC);
9001 return ret;
9002 }
9003
9004 /* Return true if T is known not to be equal to an integer W. */
9005
9006 bool
9007 expr_not_equal_to (tree t, const wide_int &w)
9008 {
9009 wide_int min, max, nz;
9010 value_range_type rtype;
9011 switch (TREE_CODE (t))
9012 {
9013 case INTEGER_CST:
9014 return wi::to_wide (t) != w;
9015
9016 case SSA_NAME:
9017 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9018 return false;
9019 rtype = get_range_info (t, &min, &max);
9020 if (rtype == VR_RANGE)
9021 {
9022 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9023 return true;
9024 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9025 return true;
9026 }
9027 else if (rtype == VR_ANTI_RANGE
9028 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9029 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9030 return true;
9031 /* If T has some known zero bits and W has any of those bits set,
9032 then T is known not to be equal to W. */
9033 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9034 TYPE_PRECISION (TREE_TYPE (t))), 0))
9035 return true;
9036 return false;
9037
9038 default:
9039 return false;
9040 }
9041 }
9042
9043 /* Fold a binary expression of code CODE and type TYPE with operands
9044 OP0 and OP1. LOC is the location of the resulting expression.
9045 Return the folded expression if folding is successful. Otherwise,
9046 return NULL_TREE. */
9047
9048 tree
9049 fold_binary_loc (location_t loc,
9050 enum tree_code code, tree type, tree op0, tree op1)
9051 {
9052 enum tree_code_class kind = TREE_CODE_CLASS (code);
9053 tree arg0, arg1, tem;
9054 tree t1 = NULL_TREE;
9055 bool strict_overflow_p;
9056 unsigned int prec;
9057
9058 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9059 && TREE_CODE_LENGTH (code) == 2
9060 && op0 != NULL_TREE
9061 && op1 != NULL_TREE);
9062
9063 arg0 = op0;
9064 arg1 = op1;
9065
9066 /* Strip any conversions that don't change the mode. This is
9067 safe for every expression, except for a comparison expression
9068 because its signedness is derived from its operands. So, in
9069 the latter case, only strip conversions that don't change the
9070 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9071 preserved.
9072
9073 Note that this is done as an internal manipulation within the
9074 constant folder, in order to find the simplest representation
9075 of the arguments so that their form can be studied. In any
9076 cases, the appropriate type conversions should be put back in
9077 the tree that will get out of the constant folder. */
9078
9079 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9080 {
9081 STRIP_SIGN_NOPS (arg0);
9082 STRIP_SIGN_NOPS (arg1);
9083 }
9084 else
9085 {
9086 STRIP_NOPS (arg0);
9087 STRIP_NOPS (arg1);
9088 }
9089
9090 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9091 constant but we can't do arithmetic on them. */
9092 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9093 {
9094 tem = const_binop (code, type, arg0, arg1);
9095 if (tem != NULL_TREE)
9096 {
9097 if (TREE_TYPE (tem) != type)
9098 tem = fold_convert_loc (loc, type, tem);
9099 return tem;
9100 }
9101 }
9102
9103 /* If this is a commutative operation, and ARG0 is a constant, move it
9104 to ARG1 to reduce the number of tests below. */
9105 if (commutative_tree_code (code)
9106 && tree_swap_operands_p (arg0, arg1))
9107 return fold_build2_loc (loc, code, type, op1, op0);
9108
9109 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9110 to ARG1 to reduce the number of tests below. */
9111 if (kind == tcc_comparison
9112 && tree_swap_operands_p (arg0, arg1))
9113 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9114
9115 tem = generic_simplify (loc, code, type, op0, op1);
9116 if (tem)
9117 return tem;
9118
9119 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9120
9121 First check for cases where an arithmetic operation is applied to a
9122 compound, conditional, or comparison operation. Push the arithmetic
9123 operation inside the compound or conditional to see if any folding
9124 can then be done. Convert comparison to conditional for this purpose.
9125 The also optimizes non-constant cases that used to be done in
9126 expand_expr.
9127
9128 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9129 one of the operands is a comparison and the other is a comparison, a
9130 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9131 code below would make the expression more complex. Change it to a
9132 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9133 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9134
9135 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9136 || code == EQ_EXPR || code == NE_EXPR)
9137 && TREE_CODE (type) != VECTOR_TYPE
9138 && ((truth_value_p (TREE_CODE (arg0))
9139 && (truth_value_p (TREE_CODE (arg1))
9140 || (TREE_CODE (arg1) == BIT_AND_EXPR
9141 && integer_onep (TREE_OPERAND (arg1, 1)))))
9142 || (truth_value_p (TREE_CODE (arg1))
9143 && (truth_value_p (TREE_CODE (arg0))
9144 || (TREE_CODE (arg0) == BIT_AND_EXPR
9145 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9146 {
9147 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9148 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9149 : TRUTH_XOR_EXPR,
9150 boolean_type_node,
9151 fold_convert_loc (loc, boolean_type_node, arg0),
9152 fold_convert_loc (loc, boolean_type_node, arg1));
9153
9154 if (code == EQ_EXPR)
9155 tem = invert_truthvalue_loc (loc, tem);
9156
9157 return fold_convert_loc (loc, type, tem);
9158 }
9159
9160 if (TREE_CODE_CLASS (code) == tcc_binary
9161 || TREE_CODE_CLASS (code) == tcc_comparison)
9162 {
9163 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9164 {
9165 tem = fold_build2_loc (loc, code, type,
9166 fold_convert_loc (loc, TREE_TYPE (op0),
9167 TREE_OPERAND (arg0, 1)), op1);
9168 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9169 tem);
9170 }
9171 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9172 {
9173 tem = fold_build2_loc (loc, code, type, op0,
9174 fold_convert_loc (loc, TREE_TYPE (op1),
9175 TREE_OPERAND (arg1, 1)));
9176 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9177 tem);
9178 }
9179
9180 if (TREE_CODE (arg0) == COND_EXPR
9181 || TREE_CODE (arg0) == VEC_COND_EXPR
9182 || COMPARISON_CLASS_P (arg0))
9183 {
9184 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9185 arg0, arg1,
9186 /*cond_first_p=*/1);
9187 if (tem != NULL_TREE)
9188 return tem;
9189 }
9190
9191 if (TREE_CODE (arg1) == COND_EXPR
9192 || TREE_CODE (arg1) == VEC_COND_EXPR
9193 || COMPARISON_CLASS_P (arg1))
9194 {
9195 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9196 arg1, arg0,
9197 /*cond_first_p=*/0);
9198 if (tem != NULL_TREE)
9199 return tem;
9200 }
9201 }
9202
9203 switch (code)
9204 {
9205 case MEM_REF:
9206 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9207 if (TREE_CODE (arg0) == ADDR_EXPR
9208 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9209 {
9210 tree iref = TREE_OPERAND (arg0, 0);
9211 return fold_build2 (MEM_REF, type,
9212 TREE_OPERAND (iref, 0),
9213 int_const_binop (PLUS_EXPR, arg1,
9214 TREE_OPERAND (iref, 1)));
9215 }
9216
9217 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9218 if (TREE_CODE (arg0) == ADDR_EXPR
9219 && handled_component_p (TREE_OPERAND (arg0, 0)))
9220 {
9221 tree base;
9222 HOST_WIDE_INT coffset;
9223 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9224 &coffset);
9225 if (!base)
9226 return NULL_TREE;
9227 return fold_build2 (MEM_REF, type,
9228 build_fold_addr_expr (base),
9229 int_const_binop (PLUS_EXPR, arg1,
9230 size_int (coffset)));
9231 }
9232
9233 return NULL_TREE;
9234
9235 case POINTER_PLUS_EXPR:
9236 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9237 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9238 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9239 return fold_convert_loc (loc, type,
9240 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9241 fold_convert_loc (loc, sizetype,
9242 arg1),
9243 fold_convert_loc (loc, sizetype,
9244 arg0)));
9245
9246 return NULL_TREE;
9247
9248 case PLUS_EXPR:
9249 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9250 {
9251 /* X + (X / CST) * -CST is X % CST. */
9252 if (TREE_CODE (arg1) == MULT_EXPR
9253 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9254 && operand_equal_p (arg0,
9255 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9256 {
9257 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9258 tree cst1 = TREE_OPERAND (arg1, 1);
9259 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9260 cst1, cst0);
9261 if (sum && integer_zerop (sum))
9262 return fold_convert_loc (loc, type,
9263 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9264 TREE_TYPE (arg0), arg0,
9265 cst0));
9266 }
9267 }
9268
9269 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9270 one. Make sure the type is not saturating and has the signedness of
9271 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9272 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9273 if ((TREE_CODE (arg0) == MULT_EXPR
9274 || TREE_CODE (arg1) == MULT_EXPR)
9275 && !TYPE_SATURATING (type)
9276 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9277 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9278 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9279 {
9280 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9281 if (tem)
9282 return tem;
9283 }
9284
9285 if (! FLOAT_TYPE_P (type))
9286 {
9287 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9288 (plus (plus (mult) (mult)) (foo)) so that we can
9289 take advantage of the factoring cases below. */
9290 if (ANY_INTEGRAL_TYPE_P (type)
9291 && TYPE_OVERFLOW_WRAPS (type)
9292 && (((TREE_CODE (arg0) == PLUS_EXPR
9293 || TREE_CODE (arg0) == MINUS_EXPR)
9294 && TREE_CODE (arg1) == MULT_EXPR)
9295 || ((TREE_CODE (arg1) == PLUS_EXPR
9296 || TREE_CODE (arg1) == MINUS_EXPR)
9297 && TREE_CODE (arg0) == MULT_EXPR)))
9298 {
9299 tree parg0, parg1, parg, marg;
9300 enum tree_code pcode;
9301
9302 if (TREE_CODE (arg1) == MULT_EXPR)
9303 parg = arg0, marg = arg1;
9304 else
9305 parg = arg1, marg = arg0;
9306 pcode = TREE_CODE (parg);
9307 parg0 = TREE_OPERAND (parg, 0);
9308 parg1 = TREE_OPERAND (parg, 1);
9309 STRIP_NOPS (parg0);
9310 STRIP_NOPS (parg1);
9311
9312 if (TREE_CODE (parg0) == MULT_EXPR
9313 && TREE_CODE (parg1) != MULT_EXPR)
9314 return fold_build2_loc (loc, pcode, type,
9315 fold_build2_loc (loc, PLUS_EXPR, type,
9316 fold_convert_loc (loc, type,
9317 parg0),
9318 fold_convert_loc (loc, type,
9319 marg)),
9320 fold_convert_loc (loc, type, parg1));
9321 if (TREE_CODE (parg0) != MULT_EXPR
9322 && TREE_CODE (parg1) == MULT_EXPR)
9323 return
9324 fold_build2_loc (loc, PLUS_EXPR, type,
9325 fold_convert_loc (loc, type, parg0),
9326 fold_build2_loc (loc, pcode, type,
9327 fold_convert_loc (loc, type, marg),
9328 fold_convert_loc (loc, type,
9329 parg1)));
9330 }
9331 }
9332 else
9333 {
9334 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9335 to __complex__ ( x, y ). This is not the same for SNaNs or
9336 if signed zeros are involved. */
9337 if (!HONOR_SNANS (element_mode (arg0))
9338 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9339 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9340 {
9341 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9342 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9343 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9344 bool arg0rz = false, arg0iz = false;
9345 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9346 || (arg0i && (arg0iz = real_zerop (arg0i))))
9347 {
9348 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9349 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9350 if (arg0rz && arg1i && real_zerop (arg1i))
9351 {
9352 tree rp = arg1r ? arg1r
9353 : build1 (REALPART_EXPR, rtype, arg1);
9354 tree ip = arg0i ? arg0i
9355 : build1 (IMAGPART_EXPR, rtype, arg0);
9356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9357 }
9358 else if (arg0iz && arg1r && real_zerop (arg1r))
9359 {
9360 tree rp = arg0r ? arg0r
9361 : build1 (REALPART_EXPR, rtype, arg0);
9362 tree ip = arg1i ? arg1i
9363 : build1 (IMAGPART_EXPR, rtype, arg1);
9364 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9365 }
9366 }
9367 }
9368
9369 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9370 We associate floats only if the user has specified
9371 -fassociative-math. */
9372 if (flag_associative_math
9373 && TREE_CODE (arg1) == PLUS_EXPR
9374 && TREE_CODE (arg0) != MULT_EXPR)
9375 {
9376 tree tree10 = TREE_OPERAND (arg1, 0);
9377 tree tree11 = TREE_OPERAND (arg1, 1);
9378 if (TREE_CODE (tree11) == MULT_EXPR
9379 && TREE_CODE (tree10) == MULT_EXPR)
9380 {
9381 tree tree0;
9382 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9383 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9384 }
9385 }
9386 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9387 We associate floats only if the user has specified
9388 -fassociative-math. */
9389 if (flag_associative_math
9390 && TREE_CODE (arg0) == PLUS_EXPR
9391 && TREE_CODE (arg1) != MULT_EXPR)
9392 {
9393 tree tree00 = TREE_OPERAND (arg0, 0);
9394 tree tree01 = TREE_OPERAND (arg0, 1);
9395 if (TREE_CODE (tree01) == MULT_EXPR
9396 && TREE_CODE (tree00) == MULT_EXPR)
9397 {
9398 tree tree0;
9399 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9400 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9401 }
9402 }
9403 }
9404
9405 bit_rotate:
9406 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9407 is a rotate of A by C1 bits. */
9408 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9409 is a rotate of A by B bits.
9410 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9411 though in this case CODE must be | and not + or ^, otherwise
9412 it doesn't return A when B is 0. */
9413 {
9414 enum tree_code code0, code1;
9415 tree rtype;
9416 code0 = TREE_CODE (arg0);
9417 code1 = TREE_CODE (arg1);
9418 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9419 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9420 && operand_equal_p (TREE_OPERAND (arg0, 0),
9421 TREE_OPERAND (arg1, 0), 0)
9422 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9423 TYPE_UNSIGNED (rtype))
9424 /* Only create rotates in complete modes. Other cases are not
9425 expanded properly. */
9426 && (element_precision (rtype)
9427 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9428 {
9429 tree tree01, tree11;
9430 tree orig_tree01, orig_tree11;
9431 enum tree_code code01, code11;
9432
9433 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9434 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9435 STRIP_NOPS (tree01);
9436 STRIP_NOPS (tree11);
9437 code01 = TREE_CODE (tree01);
9438 code11 = TREE_CODE (tree11);
9439 if (code11 != MINUS_EXPR
9440 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9441 {
9442 std::swap (code0, code1);
9443 std::swap (code01, code11);
9444 std::swap (tree01, tree11);
9445 std::swap (orig_tree01, orig_tree11);
9446 }
9447 if (code01 == INTEGER_CST
9448 && code11 == INTEGER_CST
9449 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9450 == element_precision (rtype)))
9451 {
9452 tem = build2_loc (loc, LROTATE_EXPR,
9453 rtype, TREE_OPERAND (arg0, 0),
9454 code0 == LSHIFT_EXPR
9455 ? orig_tree01 : orig_tree11);
9456 return fold_convert_loc (loc, type, tem);
9457 }
9458 else if (code11 == MINUS_EXPR)
9459 {
9460 tree tree110, tree111;
9461 tree110 = TREE_OPERAND (tree11, 0);
9462 tree111 = TREE_OPERAND (tree11, 1);
9463 STRIP_NOPS (tree110);
9464 STRIP_NOPS (tree111);
9465 if (TREE_CODE (tree110) == INTEGER_CST
9466 && 0 == compare_tree_int (tree110,
9467 element_precision (rtype))
9468 && operand_equal_p (tree01, tree111, 0))
9469 {
9470 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9471 ? LROTATE_EXPR : RROTATE_EXPR),
9472 rtype, TREE_OPERAND (arg0, 0),
9473 orig_tree01);
9474 return fold_convert_loc (loc, type, tem);
9475 }
9476 }
9477 else if (code == BIT_IOR_EXPR
9478 && code11 == BIT_AND_EXPR
9479 && pow2p_hwi (element_precision (rtype)))
9480 {
9481 tree tree110, tree111;
9482 tree110 = TREE_OPERAND (tree11, 0);
9483 tree111 = TREE_OPERAND (tree11, 1);
9484 STRIP_NOPS (tree110);
9485 STRIP_NOPS (tree111);
9486 if (TREE_CODE (tree110) == NEGATE_EXPR
9487 && TREE_CODE (tree111) == INTEGER_CST
9488 && 0 == compare_tree_int (tree111,
9489 element_precision (rtype) - 1)
9490 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9491 {
9492 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9493 ? LROTATE_EXPR : RROTATE_EXPR),
9494 rtype, TREE_OPERAND (arg0, 0),
9495 orig_tree01);
9496 return fold_convert_loc (loc, type, tem);
9497 }
9498 }
9499 }
9500 }
9501
9502 associate:
9503 /* In most languages, can't associate operations on floats through
9504 parentheses. Rather than remember where the parentheses were, we
9505 don't associate floats at all, unless the user has specified
9506 -fassociative-math.
9507 And, we need to make sure type is not saturating. */
9508
9509 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9510 && !TYPE_SATURATING (type))
9511 {
9512 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9513 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9514 tree atype = type;
9515 bool ok = true;
9516
9517 /* Split both trees into variables, constants, and literals. Then
9518 associate each group together, the constants with literals,
9519 then the result with variables. This increases the chances of
9520 literals being recombined later and of generating relocatable
9521 expressions for the sum of a constant and literal. */
9522 var0 = split_tree (arg0, type, code,
9523 &minus_var0, &con0, &minus_con0,
9524 &lit0, &minus_lit0, 0);
9525 var1 = split_tree (arg1, type, code,
9526 &minus_var1, &con1, &minus_con1,
9527 &lit1, &minus_lit1, code == MINUS_EXPR);
9528
9529 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9530 if (code == MINUS_EXPR)
9531 code = PLUS_EXPR;
9532
9533 /* With undefined overflow prefer doing association in a type
9534 which wraps on overflow, if that is one of the operand types. */
9535 if (POINTER_TYPE_P (type)
9536 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9537 {
9538 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9539 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9540 atype = TREE_TYPE (arg0);
9541 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9542 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9543 atype = TREE_TYPE (arg1);
9544 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9545 }
9546
9547 /* With undefined overflow we can only associate constants with one
9548 variable, and constants whose association doesn't overflow. */
9549 if (POINTER_TYPE_P (atype)
9550 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9551 {
9552 if ((var0 && var1) || (minus_var0 && minus_var1))
9553 {
9554 /* ??? If split_tree would handle NEGATE_EXPR we could
9555 simply reject these cases and the allowed cases would
9556 be the var0/minus_var1 ones. */
9557 tree tmp0 = var0 ? var0 : minus_var0;
9558 tree tmp1 = var1 ? var1 : minus_var1;
9559 bool one_neg = false;
9560
9561 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9562 {
9563 tmp0 = TREE_OPERAND (tmp0, 0);
9564 one_neg = !one_neg;
9565 }
9566 if (CONVERT_EXPR_P (tmp0)
9567 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9568 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9569 <= TYPE_PRECISION (atype)))
9570 tmp0 = TREE_OPERAND (tmp0, 0);
9571 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9572 {
9573 tmp1 = TREE_OPERAND (tmp1, 0);
9574 one_neg = !one_neg;
9575 }
9576 if (CONVERT_EXPR_P (tmp1)
9577 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9578 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9579 <= TYPE_PRECISION (atype)))
9580 tmp1 = TREE_OPERAND (tmp1, 0);
9581 /* The only case we can still associate with two variables
9582 is if they cancel out. */
9583 if (!one_neg
9584 || !operand_equal_p (tmp0, tmp1, 0))
9585 ok = false;
9586 }
9587 else if ((var0 && minus_var1
9588 && ! operand_equal_p (var0, minus_var1, 0))
9589 || (minus_var0 && var1
9590 && ! operand_equal_p (minus_var0, var1, 0)))
9591 ok = false;
9592 }
9593
9594 /* Only do something if we found more than two objects. Otherwise,
9595 nothing has changed and we risk infinite recursion. */
9596 if (ok
9597 && (2 < ((var0 != 0) + (var1 != 0)
9598 + (minus_var0 != 0) + (minus_var1 != 0)
9599 + (con0 != 0) + (con1 != 0)
9600 + (minus_con0 != 0) + (minus_con1 != 0)
9601 + (lit0 != 0) + (lit1 != 0)
9602 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9603 {
9604 var0 = associate_trees (loc, var0, var1, code, atype);
9605 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9606 code, atype);
9607 con0 = associate_trees (loc, con0, con1, code, atype);
9608 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9609 code, atype);
9610 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9611 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9612 code, atype);
9613
9614 if (minus_var0 && var0)
9615 {
9616 var0 = associate_trees (loc, var0, minus_var0,
9617 MINUS_EXPR, atype);
9618 minus_var0 = 0;
9619 }
9620 if (minus_con0 && con0)
9621 {
9622 con0 = associate_trees (loc, con0, minus_con0,
9623 MINUS_EXPR, atype);
9624 minus_con0 = 0;
9625 }
9626
9627 /* Preserve the MINUS_EXPR if the negative part of the literal is
9628 greater than the positive part. Otherwise, the multiplicative
9629 folding code (i.e extract_muldiv) may be fooled in case
9630 unsigned constants are subtracted, like in the following
9631 example: ((X*2 + 4) - 8U)/2. */
9632 if (minus_lit0 && lit0)
9633 {
9634 if (TREE_CODE (lit0) == INTEGER_CST
9635 && TREE_CODE (minus_lit0) == INTEGER_CST
9636 && tree_int_cst_lt (lit0, minus_lit0)
9637 /* But avoid ending up with only negated parts. */
9638 && (var0 || con0))
9639 {
9640 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9641 MINUS_EXPR, atype);
9642 lit0 = 0;
9643 }
9644 else
9645 {
9646 lit0 = associate_trees (loc, lit0, minus_lit0,
9647 MINUS_EXPR, atype);
9648 minus_lit0 = 0;
9649 }
9650 }
9651
9652 /* Don't introduce overflows through reassociation. */
9653 if ((lit0 && TREE_OVERFLOW_P (lit0))
9654 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9655 return NULL_TREE;
9656
9657 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9658 con0 = associate_trees (loc, con0, lit0, code, atype);
9659 lit0 = 0;
9660 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9661 code, atype);
9662 minus_lit0 = 0;
9663
9664 /* Eliminate minus_con0. */
9665 if (minus_con0)
9666 {
9667 if (con0)
9668 con0 = associate_trees (loc, con0, minus_con0,
9669 MINUS_EXPR, atype);
9670 else if (var0)
9671 var0 = associate_trees (loc, var0, minus_con0,
9672 MINUS_EXPR, atype);
9673 else
9674 gcc_unreachable ();
9675 minus_con0 = 0;
9676 }
9677
9678 /* Eliminate minus_var0. */
9679 if (minus_var0)
9680 {
9681 if (con0)
9682 con0 = associate_trees (loc, con0, minus_var0,
9683 MINUS_EXPR, atype);
9684 else
9685 gcc_unreachable ();
9686 minus_var0 = 0;
9687 }
9688
9689 return
9690 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9691 code, atype));
9692 }
9693 }
9694
9695 return NULL_TREE;
9696
9697 case MINUS_EXPR:
9698 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9699 if (TREE_CODE (arg0) == NEGATE_EXPR
9700 && negate_expr_p (op1))
9701 return fold_build2_loc (loc, MINUS_EXPR, type,
9702 negate_expr (op1),
9703 fold_convert_loc (loc, type,
9704 TREE_OPERAND (arg0, 0)));
9705
9706 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9707 __complex__ ( x, -y ). This is not the same for SNaNs or if
9708 signed zeros are involved. */
9709 if (!HONOR_SNANS (element_mode (arg0))
9710 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9711 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9712 {
9713 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9714 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9715 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9716 bool arg0rz = false, arg0iz = false;
9717 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9718 || (arg0i && (arg0iz = real_zerop (arg0i))))
9719 {
9720 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9721 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9722 if (arg0rz && arg1i && real_zerop (arg1i))
9723 {
9724 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9725 arg1r ? arg1r
9726 : build1 (REALPART_EXPR, rtype, arg1));
9727 tree ip = arg0i ? arg0i
9728 : build1 (IMAGPART_EXPR, rtype, arg0);
9729 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9730 }
9731 else if (arg0iz && arg1r && real_zerop (arg1r))
9732 {
9733 tree rp = arg0r ? arg0r
9734 : build1 (REALPART_EXPR, rtype, arg0);
9735 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9736 arg1i ? arg1i
9737 : build1 (IMAGPART_EXPR, rtype, arg1));
9738 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9739 }
9740 }
9741 }
9742
9743 /* A - B -> A + (-B) if B is easily negatable. */
9744 if (negate_expr_p (op1)
9745 && ! TYPE_OVERFLOW_SANITIZED (type)
9746 && ((FLOAT_TYPE_P (type)
9747 /* Avoid this transformation if B is a positive REAL_CST. */
9748 && (TREE_CODE (op1) != REAL_CST
9749 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9750 || INTEGRAL_TYPE_P (type)))
9751 return fold_build2_loc (loc, PLUS_EXPR, type,
9752 fold_convert_loc (loc, type, arg0),
9753 negate_expr (op1));
9754
9755 /* Fold &a[i] - &a[j] to i-j. */
9756 if (TREE_CODE (arg0) == ADDR_EXPR
9757 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9758 && TREE_CODE (arg1) == ADDR_EXPR
9759 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9760 {
9761 tree tem = fold_addr_of_array_ref_difference (loc, type,
9762 TREE_OPERAND (arg0, 0),
9763 TREE_OPERAND (arg1, 0));
9764 if (tem)
9765 return tem;
9766 }
9767
9768 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9769 one. Make sure the type is not saturating and has the signedness of
9770 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9771 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9772 if ((TREE_CODE (arg0) == MULT_EXPR
9773 || TREE_CODE (arg1) == MULT_EXPR)
9774 && !TYPE_SATURATING (type)
9775 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9776 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9777 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9778 {
9779 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9780 if (tem)
9781 return tem;
9782 }
9783
9784 goto associate;
9785
9786 case MULT_EXPR:
9787 if (! FLOAT_TYPE_P (type))
9788 {
9789 /* Transform x * -C into -x * C if x is easily negatable. */
9790 if (TREE_CODE (op1) == INTEGER_CST
9791 && tree_int_cst_sgn (op1) == -1
9792 && negate_expr_p (op0)
9793 && negate_expr_p (op1)
9794 && (tem = negate_expr (op1)) != op1
9795 && ! TREE_OVERFLOW (tem))
9796 return fold_build2_loc (loc, MULT_EXPR, type,
9797 fold_convert_loc (loc, type,
9798 negate_expr (op0)), tem);
9799
9800 strict_overflow_p = false;
9801 if (TREE_CODE (arg1) == INTEGER_CST
9802 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9803 &strict_overflow_p)))
9804 {
9805 if (strict_overflow_p)
9806 fold_overflow_warning (("assuming signed overflow does not "
9807 "occur when simplifying "
9808 "multiplication"),
9809 WARN_STRICT_OVERFLOW_MISC);
9810 return fold_convert_loc (loc, type, tem);
9811 }
9812
9813 /* Optimize z * conj(z) for integer complex numbers. */
9814 if (TREE_CODE (arg0) == CONJ_EXPR
9815 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9816 return fold_mult_zconjz (loc, type, arg1);
9817 if (TREE_CODE (arg1) == CONJ_EXPR
9818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9819 return fold_mult_zconjz (loc, type, arg0);
9820 }
9821 else
9822 {
9823 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9824 This is not the same for NaNs or if signed zeros are
9825 involved. */
9826 if (!HONOR_NANS (arg0)
9827 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9828 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9829 && TREE_CODE (arg1) == COMPLEX_CST
9830 && real_zerop (TREE_REALPART (arg1)))
9831 {
9832 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9833 if (real_onep (TREE_IMAGPART (arg1)))
9834 return
9835 fold_build2_loc (loc, COMPLEX_EXPR, type,
9836 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9837 rtype, arg0)),
9838 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9839 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9840 return
9841 fold_build2_loc (loc, COMPLEX_EXPR, type,
9842 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9843 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9844 rtype, arg0)));
9845 }
9846
9847 /* Optimize z * conj(z) for floating point complex numbers.
9848 Guarded by flag_unsafe_math_optimizations as non-finite
9849 imaginary components don't produce scalar results. */
9850 if (flag_unsafe_math_optimizations
9851 && TREE_CODE (arg0) == CONJ_EXPR
9852 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9853 return fold_mult_zconjz (loc, type, arg1);
9854 if (flag_unsafe_math_optimizations
9855 && TREE_CODE (arg1) == CONJ_EXPR
9856 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9857 return fold_mult_zconjz (loc, type, arg0);
9858 }
9859 goto associate;
9860
9861 case BIT_IOR_EXPR:
9862 /* Canonicalize (X & C1) | C2. */
9863 if (TREE_CODE (arg0) == BIT_AND_EXPR
9864 && TREE_CODE (arg1) == INTEGER_CST
9865 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9866 {
9867 int width = TYPE_PRECISION (type), w;
9868 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
9869 wide_int c2 = wi::to_wide (arg1);
9870
9871 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9872 if ((c1 & c2) == c1)
9873 return omit_one_operand_loc (loc, type, arg1,
9874 TREE_OPERAND (arg0, 0));
9875
9876 wide_int msk = wi::mask (width, false,
9877 TYPE_PRECISION (TREE_TYPE (arg1)));
9878
9879 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9880 if (wi::bit_and_not (msk, c1 | c2) == 0)
9881 {
9882 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9883 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9884 }
9885
9886 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9887 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9888 mode which allows further optimizations. */
9889 c1 &= msk;
9890 c2 &= msk;
9891 wide_int c3 = wi::bit_and_not (c1, c2);
9892 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9893 {
9894 wide_int mask = wi::mask (w, false,
9895 TYPE_PRECISION (type));
9896 if (((c1 | c2) & mask) == mask
9897 && wi::bit_and_not (c1, mask) == 0)
9898 {
9899 c3 = mask;
9900 break;
9901 }
9902 }
9903
9904 if (c3 != c1)
9905 {
9906 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9907 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9908 wide_int_to_tree (type, c3));
9909 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9910 }
9911 }
9912
9913 /* See if this can be simplified into a rotate first. If that
9914 is unsuccessful continue in the association code. */
9915 goto bit_rotate;
9916
9917 case BIT_XOR_EXPR:
9918 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9919 if (TREE_CODE (arg0) == BIT_AND_EXPR
9920 && INTEGRAL_TYPE_P (type)
9921 && integer_onep (TREE_OPERAND (arg0, 1))
9922 && integer_onep (arg1))
9923 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9924 build_zero_cst (TREE_TYPE (arg0)));
9925
9926 /* See if this can be simplified into a rotate first. If that
9927 is unsuccessful continue in the association code. */
9928 goto bit_rotate;
9929
9930 case BIT_AND_EXPR:
9931 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9932 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9933 && INTEGRAL_TYPE_P (type)
9934 && integer_onep (TREE_OPERAND (arg0, 1))
9935 && integer_onep (arg1))
9936 {
9937 tree tem2;
9938 tem = TREE_OPERAND (arg0, 0);
9939 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9940 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9941 tem, tem2);
9942 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9943 build_zero_cst (TREE_TYPE (tem)));
9944 }
9945 /* Fold ~X & 1 as (X & 1) == 0. */
9946 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9947 && INTEGRAL_TYPE_P (type)
9948 && integer_onep (arg1))
9949 {
9950 tree tem2;
9951 tem = TREE_OPERAND (arg0, 0);
9952 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9953 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9954 tem, tem2);
9955 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9956 build_zero_cst (TREE_TYPE (tem)));
9957 }
9958 /* Fold !X & 1 as X == 0. */
9959 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9960 && integer_onep (arg1))
9961 {
9962 tem = TREE_OPERAND (arg0, 0);
9963 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9964 build_zero_cst (TREE_TYPE (tem)));
9965 }
9966
9967 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9968 multiple of 1 << CST. */
9969 if (TREE_CODE (arg1) == INTEGER_CST)
9970 {
9971 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
9972 wide_int ncst1 = -cst1;
9973 if ((cst1 & ncst1) == ncst1
9974 && multiple_of_p (type, arg0,
9975 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
9976 return fold_convert_loc (loc, type, arg0);
9977 }
9978
9979 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
9980 bits from CST2. */
9981 if (TREE_CODE (arg1) == INTEGER_CST
9982 && TREE_CODE (arg0) == MULT_EXPR
9983 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9984 {
9985 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
9986 wide_int masked
9987 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
9988
9989 if (masked == 0)
9990 return omit_two_operands_loc (loc, type, build_zero_cst (type),
9991 arg0, arg1);
9992 else if (masked != warg1)
9993 {
9994 /* Avoid the transform if arg1 is a mask of some
9995 mode which allows further optimizations. */
9996 int pop = wi::popcount (warg1);
9997 if (!(pop >= BITS_PER_UNIT
9998 && pow2p_hwi (pop)
9999 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10000 return fold_build2_loc (loc, code, type, op0,
10001 wide_int_to_tree (type, masked));
10002 }
10003 }
10004
10005 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10006 ((A & N) + B) & M -> (A + B) & M
10007 Similarly if (N & M) == 0,
10008 ((A | N) + B) & M -> (A + B) & M
10009 and for - instead of + (or unary - instead of +)
10010 and/or ^ instead of |.
10011 If B is constant and (B & M) == 0, fold into A & M. */
10012 if (TREE_CODE (arg1) == INTEGER_CST)
10013 {
10014 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10015 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10016 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10017 && (TREE_CODE (arg0) == PLUS_EXPR
10018 || TREE_CODE (arg0) == MINUS_EXPR
10019 || TREE_CODE (arg0) == NEGATE_EXPR)
10020 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10021 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10022 {
10023 tree pmop[2];
10024 int which = 0;
10025 wide_int cst0;
10026
10027 /* Now we know that arg0 is (C + D) or (C - D) or
10028 -C and arg1 (M) is == (1LL << cst) - 1.
10029 Store C into PMOP[0] and D into PMOP[1]. */
10030 pmop[0] = TREE_OPERAND (arg0, 0);
10031 pmop[1] = NULL;
10032 if (TREE_CODE (arg0) != NEGATE_EXPR)
10033 {
10034 pmop[1] = TREE_OPERAND (arg0, 1);
10035 which = 1;
10036 }
10037
10038 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10039 which = -1;
10040
10041 for (; which >= 0; which--)
10042 switch (TREE_CODE (pmop[which]))
10043 {
10044 case BIT_AND_EXPR:
10045 case BIT_IOR_EXPR:
10046 case BIT_XOR_EXPR:
10047 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10048 != INTEGER_CST)
10049 break;
10050 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10051 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10052 {
10053 if (cst0 != cst1)
10054 break;
10055 }
10056 else if (cst0 != 0)
10057 break;
10058 /* If C or D is of the form (A & N) where
10059 (N & M) == M, or of the form (A | N) or
10060 (A ^ N) where (N & M) == 0, replace it with A. */
10061 pmop[which] = TREE_OPERAND (pmop[which], 0);
10062 break;
10063 case INTEGER_CST:
10064 /* If C or D is a N where (N & M) == 0, it can be
10065 omitted (assumed 0). */
10066 if ((TREE_CODE (arg0) == PLUS_EXPR
10067 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10068 && (cst1 & wi::to_wide (pmop[which])) == 0)
10069 pmop[which] = NULL;
10070 break;
10071 default:
10072 break;
10073 }
10074
10075 /* Only build anything new if we optimized one or both arguments
10076 above. */
10077 if (pmop[0] != TREE_OPERAND (arg0, 0)
10078 || (TREE_CODE (arg0) != NEGATE_EXPR
10079 && pmop[1] != TREE_OPERAND (arg0, 1)))
10080 {
10081 tree utype = TREE_TYPE (arg0);
10082 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10083 {
10084 /* Perform the operations in a type that has defined
10085 overflow behavior. */
10086 utype = unsigned_type_for (TREE_TYPE (arg0));
10087 if (pmop[0] != NULL)
10088 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10089 if (pmop[1] != NULL)
10090 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10091 }
10092
10093 if (TREE_CODE (arg0) == NEGATE_EXPR)
10094 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10095 else if (TREE_CODE (arg0) == PLUS_EXPR)
10096 {
10097 if (pmop[0] != NULL && pmop[1] != NULL)
10098 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10099 pmop[0], pmop[1]);
10100 else if (pmop[0] != NULL)
10101 tem = pmop[0];
10102 else if (pmop[1] != NULL)
10103 tem = pmop[1];
10104 else
10105 return build_int_cst (type, 0);
10106 }
10107 else if (pmop[0] == NULL)
10108 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10109 else
10110 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10111 pmop[0], pmop[1]);
10112 /* TEM is now the new binary +, - or unary - replacement. */
10113 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10114 fold_convert_loc (loc, utype, arg1));
10115 return fold_convert_loc (loc, type, tem);
10116 }
10117 }
10118 }
10119
10120 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10121 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10122 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10123 {
10124 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10125
10126 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10127 if (mask == -1)
10128 return
10129 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10130 }
10131
10132 goto associate;
10133
10134 case RDIV_EXPR:
10135 /* Don't touch a floating-point divide by zero unless the mode
10136 of the constant can represent infinity. */
10137 if (TREE_CODE (arg1) == REAL_CST
10138 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10139 && real_zerop (arg1))
10140 return NULL_TREE;
10141
10142 /* (-A) / (-B) -> A / B */
10143 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10144 return fold_build2_loc (loc, RDIV_EXPR, type,
10145 TREE_OPERAND (arg0, 0),
10146 negate_expr (arg1));
10147 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10148 return fold_build2_loc (loc, RDIV_EXPR, type,
10149 negate_expr (arg0),
10150 TREE_OPERAND (arg1, 0));
10151 return NULL_TREE;
10152
10153 case TRUNC_DIV_EXPR:
10154 /* Fall through */
10155
10156 case FLOOR_DIV_EXPR:
10157 /* Simplify A / (B << N) where A and B are positive and B is
10158 a power of 2, to A >> (N + log2(B)). */
10159 strict_overflow_p = false;
10160 if (TREE_CODE (arg1) == LSHIFT_EXPR
10161 && (TYPE_UNSIGNED (type)
10162 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10163 {
10164 tree sval = TREE_OPERAND (arg1, 0);
10165 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10166 {
10167 tree sh_cnt = TREE_OPERAND (arg1, 1);
10168 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10169 wi::exact_log2 (wi::to_wide (sval)));
10170
10171 if (strict_overflow_p)
10172 fold_overflow_warning (("assuming signed overflow does not "
10173 "occur when simplifying A / (B << N)"),
10174 WARN_STRICT_OVERFLOW_MISC);
10175
10176 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10177 sh_cnt, pow2);
10178 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10179 fold_convert_loc (loc, type, arg0), sh_cnt);
10180 }
10181 }
10182
10183 /* Fall through */
10184
10185 case ROUND_DIV_EXPR:
10186 case CEIL_DIV_EXPR:
10187 case EXACT_DIV_EXPR:
10188 if (integer_zerop (arg1))
10189 return NULL_TREE;
10190
10191 /* Convert -A / -B to A / B when the type is signed and overflow is
10192 undefined. */
10193 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10194 && TREE_CODE (op0) == NEGATE_EXPR
10195 && negate_expr_p (op1))
10196 {
10197 if (INTEGRAL_TYPE_P (type))
10198 fold_overflow_warning (("assuming signed overflow does not occur "
10199 "when distributing negation across "
10200 "division"),
10201 WARN_STRICT_OVERFLOW_MISC);
10202 return fold_build2_loc (loc, code, type,
10203 fold_convert_loc (loc, type,
10204 TREE_OPERAND (arg0, 0)),
10205 negate_expr (op1));
10206 }
10207 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10208 && TREE_CODE (arg1) == NEGATE_EXPR
10209 && negate_expr_p (op0))
10210 {
10211 if (INTEGRAL_TYPE_P (type))
10212 fold_overflow_warning (("assuming signed overflow does not occur "
10213 "when distributing negation across "
10214 "division"),
10215 WARN_STRICT_OVERFLOW_MISC);
10216 return fold_build2_loc (loc, code, type,
10217 negate_expr (op0),
10218 fold_convert_loc (loc, type,
10219 TREE_OPERAND (arg1, 0)));
10220 }
10221
10222 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10223 operation, EXACT_DIV_EXPR.
10224
10225 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10226 At one time others generated faster code, it's not clear if they do
10227 after the last round to changes to the DIV code in expmed.c. */
10228 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10229 && multiple_of_p (type, arg0, arg1))
10230 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10231 fold_convert (type, arg0),
10232 fold_convert (type, arg1));
10233
10234 strict_overflow_p = false;
10235 if (TREE_CODE (arg1) == INTEGER_CST
10236 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10237 &strict_overflow_p)))
10238 {
10239 if (strict_overflow_p)
10240 fold_overflow_warning (("assuming signed overflow does not occur "
10241 "when simplifying division"),
10242 WARN_STRICT_OVERFLOW_MISC);
10243 return fold_convert_loc (loc, type, tem);
10244 }
10245
10246 return NULL_TREE;
10247
10248 case CEIL_MOD_EXPR:
10249 case FLOOR_MOD_EXPR:
10250 case ROUND_MOD_EXPR:
10251 case TRUNC_MOD_EXPR:
10252 strict_overflow_p = false;
10253 if (TREE_CODE (arg1) == INTEGER_CST
10254 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10255 &strict_overflow_p)))
10256 {
10257 if (strict_overflow_p)
10258 fold_overflow_warning (("assuming signed overflow does not occur "
10259 "when simplifying modulus"),
10260 WARN_STRICT_OVERFLOW_MISC);
10261 return fold_convert_loc (loc, type, tem);
10262 }
10263
10264 return NULL_TREE;
10265
10266 case LROTATE_EXPR:
10267 case RROTATE_EXPR:
10268 case RSHIFT_EXPR:
10269 case LSHIFT_EXPR:
10270 /* Since negative shift count is not well-defined,
10271 don't try to compute it in the compiler. */
10272 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10273 return NULL_TREE;
10274
10275 prec = element_precision (type);
10276
10277 /* If we have a rotate of a bit operation with the rotate count and
10278 the second operand of the bit operation both constant,
10279 permute the two operations. */
10280 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10281 && (TREE_CODE (arg0) == BIT_AND_EXPR
10282 || TREE_CODE (arg0) == BIT_IOR_EXPR
10283 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10285 {
10286 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10287 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10288 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10289 fold_build2_loc (loc, code, type,
10290 arg00, arg1),
10291 fold_build2_loc (loc, code, type,
10292 arg01, arg1));
10293 }
10294
10295 /* Two consecutive rotates adding up to the some integer
10296 multiple of the precision of the type can be ignored. */
10297 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10298 && TREE_CODE (arg0) == RROTATE_EXPR
10299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10300 && wi::umod_trunc (wi::to_wide (arg1)
10301 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10302 prec) == 0)
10303 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10304
10305 return NULL_TREE;
10306
10307 case MIN_EXPR:
10308 case MAX_EXPR:
10309 goto associate;
10310
10311 case TRUTH_ANDIF_EXPR:
10312 /* Note that the operands of this must be ints
10313 and their values must be 0 or 1.
10314 ("true" is a fixed value perhaps depending on the language.) */
10315 /* If first arg is constant zero, return it. */
10316 if (integer_zerop (arg0))
10317 return fold_convert_loc (loc, type, arg0);
10318 /* FALLTHRU */
10319 case TRUTH_AND_EXPR:
10320 /* If either arg is constant true, drop it. */
10321 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10323 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10324 /* Preserve sequence points. */
10325 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10326 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10327 /* If second arg is constant zero, result is zero, but first arg
10328 must be evaluated. */
10329 if (integer_zerop (arg1))
10330 return omit_one_operand_loc (loc, type, arg1, arg0);
10331 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10332 case will be handled here. */
10333 if (integer_zerop (arg0))
10334 return omit_one_operand_loc (loc, type, arg0, arg1);
10335
10336 /* !X && X is always false. */
10337 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10338 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10339 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10340 /* X && !X is always false. */
10341 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10342 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10343 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10344
10345 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10346 means A >= Y && A != MAX, but in this case we know that
10347 A < X <= MAX. */
10348
10349 if (!TREE_SIDE_EFFECTS (arg0)
10350 && !TREE_SIDE_EFFECTS (arg1))
10351 {
10352 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10353 if (tem && !operand_equal_p (tem, arg0, 0))
10354 return fold_build2_loc (loc, code, type, tem, arg1);
10355
10356 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10357 if (tem && !operand_equal_p (tem, arg1, 0))
10358 return fold_build2_loc (loc, code, type, arg0, tem);
10359 }
10360
10361 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10362 != NULL_TREE)
10363 return tem;
10364
10365 return NULL_TREE;
10366
10367 case TRUTH_ORIF_EXPR:
10368 /* Note that the operands of this must be ints
10369 and their values must be 0 or true.
10370 ("true" is a fixed value perhaps depending on the language.) */
10371 /* If first arg is constant true, return it. */
10372 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10373 return fold_convert_loc (loc, type, arg0);
10374 /* FALLTHRU */
10375 case TRUTH_OR_EXPR:
10376 /* If either arg is constant zero, drop it. */
10377 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10378 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10379 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10380 /* Preserve sequence points. */
10381 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10382 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10383 /* If second arg is constant true, result is true, but we must
10384 evaluate first arg. */
10385 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10386 return omit_one_operand_loc (loc, type, arg1, arg0);
10387 /* Likewise for first arg, but note this only occurs here for
10388 TRUTH_OR_EXPR. */
10389 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10390 return omit_one_operand_loc (loc, type, arg0, arg1);
10391
10392 /* !X || X is always true. */
10393 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10394 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10395 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10396 /* X || !X is always true. */
10397 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10398 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10399 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10400
10401 /* (X && !Y) || (!X && Y) is X ^ Y */
10402 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10403 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10404 {
10405 tree a0, a1, l0, l1, n0, n1;
10406
10407 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10408 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10409
10410 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10411 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10412
10413 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10414 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10415
10416 if ((operand_equal_p (n0, a0, 0)
10417 && operand_equal_p (n1, a1, 0))
10418 || (operand_equal_p (n0, a1, 0)
10419 && operand_equal_p (n1, a0, 0)))
10420 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10421 }
10422
10423 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10424 != NULL_TREE)
10425 return tem;
10426
10427 return NULL_TREE;
10428
10429 case TRUTH_XOR_EXPR:
10430 /* If the second arg is constant zero, drop it. */
10431 if (integer_zerop (arg1))
10432 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10433 /* If the second arg is constant true, this is a logical inversion. */
10434 if (integer_onep (arg1))
10435 {
10436 tem = invert_truthvalue_loc (loc, arg0);
10437 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10438 }
10439 /* Identical arguments cancel to zero. */
10440 if (operand_equal_p (arg0, arg1, 0))
10441 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10442
10443 /* !X ^ X is always true. */
10444 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10445 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10446 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10447
10448 /* X ^ !X is always true. */
10449 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10450 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10451 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10452
10453 return NULL_TREE;
10454
10455 case EQ_EXPR:
10456 case NE_EXPR:
10457 STRIP_NOPS (arg0);
10458 STRIP_NOPS (arg1);
10459
10460 tem = fold_comparison (loc, code, type, op0, op1);
10461 if (tem != NULL_TREE)
10462 return tem;
10463
10464 /* bool_var != 1 becomes !bool_var. */
10465 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10466 && code == NE_EXPR)
10467 return fold_convert_loc (loc, type,
10468 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10469 TREE_TYPE (arg0), arg0));
10470
10471 /* bool_var == 0 becomes !bool_var. */
10472 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10473 && code == EQ_EXPR)
10474 return fold_convert_loc (loc, type,
10475 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10476 TREE_TYPE (arg0), arg0));
10477
10478 /* !exp != 0 becomes !exp */
10479 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10480 && code == NE_EXPR)
10481 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10482
10483 /* If this is an EQ or NE comparison with zero and ARG0 is
10484 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10485 two operations, but the latter can be done in one less insn
10486 on machines that have only two-operand insns or on which a
10487 constant cannot be the first operand. */
10488 if (TREE_CODE (arg0) == BIT_AND_EXPR
10489 && integer_zerop (arg1))
10490 {
10491 tree arg00 = TREE_OPERAND (arg0, 0);
10492 tree arg01 = TREE_OPERAND (arg0, 1);
10493 if (TREE_CODE (arg00) == LSHIFT_EXPR
10494 && integer_onep (TREE_OPERAND (arg00, 0)))
10495 {
10496 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10497 arg01, TREE_OPERAND (arg00, 1));
10498 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10499 build_int_cst (TREE_TYPE (arg0), 1));
10500 return fold_build2_loc (loc, code, type,
10501 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10502 arg1);
10503 }
10504 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10505 && integer_onep (TREE_OPERAND (arg01, 0)))
10506 {
10507 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10508 arg00, TREE_OPERAND (arg01, 1));
10509 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10510 build_int_cst (TREE_TYPE (arg0), 1));
10511 return fold_build2_loc (loc, code, type,
10512 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10513 arg1);
10514 }
10515 }
10516
10517 /* If this is an NE or EQ comparison of zero against the result of a
10518 signed MOD operation whose second operand is a power of 2, make
10519 the MOD operation unsigned since it is simpler and equivalent. */
10520 if (integer_zerop (arg1)
10521 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10522 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10523 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10524 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10525 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10526 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10527 {
10528 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10529 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10530 fold_convert_loc (loc, newtype,
10531 TREE_OPERAND (arg0, 0)),
10532 fold_convert_loc (loc, newtype,
10533 TREE_OPERAND (arg0, 1)));
10534
10535 return fold_build2_loc (loc, code, type, newmod,
10536 fold_convert_loc (loc, newtype, arg1));
10537 }
10538
10539 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10540 C1 is a valid shift constant, and C2 is a power of two, i.e.
10541 a single bit. */
10542 if (TREE_CODE (arg0) == BIT_AND_EXPR
10543 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10544 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10545 == INTEGER_CST
10546 && integer_pow2p (TREE_OPERAND (arg0, 1))
10547 && integer_zerop (arg1))
10548 {
10549 tree itype = TREE_TYPE (arg0);
10550 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10551 prec = TYPE_PRECISION (itype);
10552
10553 /* Check for a valid shift count. */
10554 if (wi::ltu_p (wi::to_wide (arg001), prec))
10555 {
10556 tree arg01 = TREE_OPERAND (arg0, 1);
10557 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10558 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10559 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10560 can be rewritten as (X & (C2 << C1)) != 0. */
10561 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10562 {
10563 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10564 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10565 return fold_build2_loc (loc, code, type, tem,
10566 fold_convert_loc (loc, itype, arg1));
10567 }
10568 /* Otherwise, for signed (arithmetic) shifts,
10569 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10570 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10571 else if (!TYPE_UNSIGNED (itype))
10572 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10573 arg000, build_int_cst (itype, 0));
10574 /* Otherwise, of unsigned (logical) shifts,
10575 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10576 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10577 else
10578 return omit_one_operand_loc (loc, type,
10579 code == EQ_EXPR ? integer_one_node
10580 : integer_zero_node,
10581 arg000);
10582 }
10583 }
10584
10585 /* If this is a comparison of a field, we may be able to simplify it. */
10586 if ((TREE_CODE (arg0) == COMPONENT_REF
10587 || TREE_CODE (arg0) == BIT_FIELD_REF)
10588 /* Handle the constant case even without -O
10589 to make sure the warnings are given. */
10590 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10591 {
10592 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10593 if (t1)
10594 return t1;
10595 }
10596
10597 /* Optimize comparisons of strlen vs zero to a compare of the
10598 first character of the string vs zero. To wit,
10599 strlen(ptr) == 0 => *ptr == 0
10600 strlen(ptr) != 0 => *ptr != 0
10601 Other cases should reduce to one of these two (or a constant)
10602 due to the return value of strlen being unsigned. */
10603 if (TREE_CODE (arg0) == CALL_EXPR
10604 && integer_zerop (arg1))
10605 {
10606 tree fndecl = get_callee_fndecl (arg0);
10607
10608 if (fndecl
10609 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10610 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10611 && call_expr_nargs (arg0) == 1
10612 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10613 {
10614 tree iref = build_fold_indirect_ref_loc (loc,
10615 CALL_EXPR_ARG (arg0, 0));
10616 return fold_build2_loc (loc, code, type, iref,
10617 build_int_cst (TREE_TYPE (iref), 0));
10618 }
10619 }
10620
10621 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10622 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10623 if (TREE_CODE (arg0) == RSHIFT_EXPR
10624 && integer_zerop (arg1)
10625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10626 {
10627 tree arg00 = TREE_OPERAND (arg0, 0);
10628 tree arg01 = TREE_OPERAND (arg0, 1);
10629 tree itype = TREE_TYPE (arg00);
10630 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10631 {
10632 if (TYPE_UNSIGNED (itype))
10633 {
10634 itype = signed_type_for (itype);
10635 arg00 = fold_convert_loc (loc, itype, arg00);
10636 }
10637 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10638 type, arg00, build_zero_cst (itype));
10639 }
10640 }
10641
10642 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10643 (X & C) == 0 when C is a single bit. */
10644 if (TREE_CODE (arg0) == BIT_AND_EXPR
10645 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10646 && integer_zerop (arg1)
10647 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10648 {
10649 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10650 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10651 TREE_OPERAND (arg0, 1));
10652 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10653 type, tem,
10654 fold_convert_loc (loc, TREE_TYPE (arg0),
10655 arg1));
10656 }
10657
10658 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10659 constant C is a power of two, i.e. a single bit. */
10660 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10661 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10662 && integer_zerop (arg1)
10663 && integer_pow2p (TREE_OPERAND (arg0, 1))
10664 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10665 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10666 {
10667 tree arg00 = TREE_OPERAND (arg0, 0);
10668 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10669 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10670 }
10671
10672 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10673 when is C is a power of two, i.e. a single bit. */
10674 if (TREE_CODE (arg0) == BIT_AND_EXPR
10675 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10676 && integer_zerop (arg1)
10677 && integer_pow2p (TREE_OPERAND (arg0, 1))
10678 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10679 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10680 {
10681 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10682 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10683 arg000, TREE_OPERAND (arg0, 1));
10684 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10685 tem, build_int_cst (TREE_TYPE (tem), 0));
10686 }
10687
10688 if (integer_zerop (arg1)
10689 && tree_expr_nonzero_p (arg0))
10690 {
10691 tree res = constant_boolean_node (code==NE_EXPR, type);
10692 return omit_one_operand_loc (loc, type, res, arg0);
10693 }
10694
10695 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10696 if (TREE_CODE (arg0) == BIT_AND_EXPR
10697 && TREE_CODE (arg1) == BIT_AND_EXPR)
10698 {
10699 tree arg00 = TREE_OPERAND (arg0, 0);
10700 tree arg01 = TREE_OPERAND (arg0, 1);
10701 tree arg10 = TREE_OPERAND (arg1, 0);
10702 tree arg11 = TREE_OPERAND (arg1, 1);
10703 tree itype = TREE_TYPE (arg0);
10704
10705 if (operand_equal_p (arg01, arg11, 0))
10706 {
10707 tem = fold_convert_loc (loc, itype, arg10);
10708 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10709 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10710 return fold_build2_loc (loc, code, type, tem,
10711 build_zero_cst (itype));
10712 }
10713 if (operand_equal_p (arg01, arg10, 0))
10714 {
10715 tem = fold_convert_loc (loc, itype, arg11);
10716 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10717 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10718 return fold_build2_loc (loc, code, type, tem,
10719 build_zero_cst (itype));
10720 }
10721 if (operand_equal_p (arg00, arg11, 0))
10722 {
10723 tem = fold_convert_loc (loc, itype, arg10);
10724 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10725 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10726 return fold_build2_loc (loc, code, type, tem,
10727 build_zero_cst (itype));
10728 }
10729 if (operand_equal_p (arg00, arg10, 0))
10730 {
10731 tem = fold_convert_loc (loc, itype, arg11);
10732 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10733 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10734 return fold_build2_loc (loc, code, type, tem,
10735 build_zero_cst (itype));
10736 }
10737 }
10738
10739 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10740 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10741 {
10742 tree arg00 = TREE_OPERAND (arg0, 0);
10743 tree arg01 = TREE_OPERAND (arg0, 1);
10744 tree arg10 = TREE_OPERAND (arg1, 0);
10745 tree arg11 = TREE_OPERAND (arg1, 1);
10746 tree itype = TREE_TYPE (arg0);
10747
10748 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10749 operand_equal_p guarantees no side-effects so we don't need
10750 to use omit_one_operand on Z. */
10751 if (operand_equal_p (arg01, arg11, 0))
10752 return fold_build2_loc (loc, code, type, arg00,
10753 fold_convert_loc (loc, TREE_TYPE (arg00),
10754 arg10));
10755 if (operand_equal_p (arg01, arg10, 0))
10756 return fold_build2_loc (loc, code, type, arg00,
10757 fold_convert_loc (loc, TREE_TYPE (arg00),
10758 arg11));
10759 if (operand_equal_p (arg00, arg11, 0))
10760 return fold_build2_loc (loc, code, type, arg01,
10761 fold_convert_loc (loc, TREE_TYPE (arg01),
10762 arg10));
10763 if (operand_equal_p (arg00, arg10, 0))
10764 return fold_build2_loc (loc, code, type, arg01,
10765 fold_convert_loc (loc, TREE_TYPE (arg01),
10766 arg11));
10767
10768 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10769 if (TREE_CODE (arg01) == INTEGER_CST
10770 && TREE_CODE (arg11) == INTEGER_CST)
10771 {
10772 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10773 fold_convert_loc (loc, itype, arg11));
10774 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10775 return fold_build2_loc (loc, code, type, tem,
10776 fold_convert_loc (loc, itype, arg10));
10777 }
10778 }
10779
10780 /* Attempt to simplify equality/inequality comparisons of complex
10781 values. Only lower the comparison if the result is known or
10782 can be simplified to a single scalar comparison. */
10783 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10784 || TREE_CODE (arg0) == COMPLEX_CST)
10785 && (TREE_CODE (arg1) == COMPLEX_EXPR
10786 || TREE_CODE (arg1) == COMPLEX_CST))
10787 {
10788 tree real0, imag0, real1, imag1;
10789 tree rcond, icond;
10790
10791 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10792 {
10793 real0 = TREE_OPERAND (arg0, 0);
10794 imag0 = TREE_OPERAND (arg0, 1);
10795 }
10796 else
10797 {
10798 real0 = TREE_REALPART (arg0);
10799 imag0 = TREE_IMAGPART (arg0);
10800 }
10801
10802 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10803 {
10804 real1 = TREE_OPERAND (arg1, 0);
10805 imag1 = TREE_OPERAND (arg1, 1);
10806 }
10807 else
10808 {
10809 real1 = TREE_REALPART (arg1);
10810 imag1 = TREE_IMAGPART (arg1);
10811 }
10812
10813 rcond = fold_binary_loc (loc, code, type, real0, real1);
10814 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10815 {
10816 if (integer_zerop (rcond))
10817 {
10818 if (code == EQ_EXPR)
10819 return omit_two_operands_loc (loc, type, boolean_false_node,
10820 imag0, imag1);
10821 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10822 }
10823 else
10824 {
10825 if (code == NE_EXPR)
10826 return omit_two_operands_loc (loc, type, boolean_true_node,
10827 imag0, imag1);
10828 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10829 }
10830 }
10831
10832 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10833 if (icond && TREE_CODE (icond) == INTEGER_CST)
10834 {
10835 if (integer_zerop (icond))
10836 {
10837 if (code == EQ_EXPR)
10838 return omit_two_operands_loc (loc, type, boolean_false_node,
10839 real0, real1);
10840 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10841 }
10842 else
10843 {
10844 if (code == NE_EXPR)
10845 return omit_two_operands_loc (loc, type, boolean_true_node,
10846 real0, real1);
10847 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10848 }
10849 }
10850 }
10851
10852 return NULL_TREE;
10853
10854 case LT_EXPR:
10855 case GT_EXPR:
10856 case LE_EXPR:
10857 case GE_EXPR:
10858 tem = fold_comparison (loc, code, type, op0, op1);
10859 if (tem != NULL_TREE)
10860 return tem;
10861
10862 /* Transform comparisons of the form X +- C CMP X. */
10863 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10864 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10865 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10866 && !HONOR_SNANS (arg0))
10867 {
10868 tree arg01 = TREE_OPERAND (arg0, 1);
10869 enum tree_code code0 = TREE_CODE (arg0);
10870 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10871
10872 /* (X - c) > X becomes false. */
10873 if (code == GT_EXPR
10874 && ((code0 == MINUS_EXPR && is_positive >= 0)
10875 || (code0 == PLUS_EXPR && is_positive <= 0)))
10876 return constant_boolean_node (0, type);
10877
10878 /* Likewise (X + c) < X becomes false. */
10879 if (code == LT_EXPR
10880 && ((code0 == PLUS_EXPR && is_positive >= 0)
10881 || (code0 == MINUS_EXPR && is_positive <= 0)))
10882 return constant_boolean_node (0, type);
10883
10884 /* Convert (X - c) <= X to true. */
10885 if (!HONOR_NANS (arg1)
10886 && code == LE_EXPR
10887 && ((code0 == MINUS_EXPR && is_positive >= 0)
10888 || (code0 == PLUS_EXPR && is_positive <= 0)))
10889 return constant_boolean_node (1, type);
10890
10891 /* Convert (X + c) >= X to true. */
10892 if (!HONOR_NANS (arg1)
10893 && code == GE_EXPR
10894 && ((code0 == PLUS_EXPR && is_positive >= 0)
10895 || (code0 == MINUS_EXPR && is_positive <= 0)))
10896 return constant_boolean_node (1, type);
10897 }
10898
10899 /* If we are comparing an ABS_EXPR with a constant, we can
10900 convert all the cases into explicit comparisons, but they may
10901 well not be faster than doing the ABS and one comparison.
10902 But ABS (X) <= C is a range comparison, which becomes a subtraction
10903 and a comparison, and is probably faster. */
10904 if (code == LE_EXPR
10905 && TREE_CODE (arg1) == INTEGER_CST
10906 && TREE_CODE (arg0) == ABS_EXPR
10907 && ! TREE_SIDE_EFFECTS (arg0)
10908 && (0 != (tem = negate_expr (arg1)))
10909 && TREE_CODE (tem) == INTEGER_CST
10910 && !TREE_OVERFLOW (tem))
10911 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10912 build2 (GE_EXPR, type,
10913 TREE_OPERAND (arg0, 0), tem),
10914 build2 (LE_EXPR, type,
10915 TREE_OPERAND (arg0, 0), arg1));
10916
10917 /* Convert ABS_EXPR<x> >= 0 to true. */
10918 strict_overflow_p = false;
10919 if (code == GE_EXPR
10920 && (integer_zerop (arg1)
10921 || (! HONOR_NANS (arg0)
10922 && real_zerop (arg1)))
10923 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10924 {
10925 if (strict_overflow_p)
10926 fold_overflow_warning (("assuming signed overflow does not occur "
10927 "when simplifying comparison of "
10928 "absolute value and zero"),
10929 WARN_STRICT_OVERFLOW_CONDITIONAL);
10930 return omit_one_operand_loc (loc, type,
10931 constant_boolean_node (true, type),
10932 arg0);
10933 }
10934
10935 /* Convert ABS_EXPR<x> < 0 to false. */
10936 strict_overflow_p = false;
10937 if (code == LT_EXPR
10938 && (integer_zerop (arg1) || real_zerop (arg1))
10939 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10940 {
10941 if (strict_overflow_p)
10942 fold_overflow_warning (("assuming signed overflow does not occur "
10943 "when simplifying comparison of "
10944 "absolute value and zero"),
10945 WARN_STRICT_OVERFLOW_CONDITIONAL);
10946 return omit_one_operand_loc (loc, type,
10947 constant_boolean_node (false, type),
10948 arg0);
10949 }
10950
10951 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10952 and similarly for >= into !=. */
10953 if ((code == LT_EXPR || code == GE_EXPR)
10954 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10955 && TREE_CODE (arg1) == LSHIFT_EXPR
10956 && integer_onep (TREE_OPERAND (arg1, 0)))
10957 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10958 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10959 TREE_OPERAND (arg1, 1)),
10960 build_zero_cst (TREE_TYPE (arg0)));
10961
10962 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
10963 otherwise Y might be >= # of bits in X's type and thus e.g.
10964 (unsigned char) (1 << Y) for Y 15 might be 0.
10965 If the cast is widening, then 1 << Y should have unsigned type,
10966 otherwise if Y is number of bits in the signed shift type minus 1,
10967 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
10968 31 might be 0xffffffff80000000. */
10969 if ((code == LT_EXPR || code == GE_EXPR)
10970 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10971 && CONVERT_EXPR_P (arg1)
10972 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10973 && (element_precision (TREE_TYPE (arg1))
10974 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
10975 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
10976 || (element_precision (TREE_TYPE (arg1))
10977 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
10978 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10979 {
10980 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10981 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
10982 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10983 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
10984 build_zero_cst (TREE_TYPE (arg0)));
10985 }
10986
10987 return NULL_TREE;
10988
10989 case UNORDERED_EXPR:
10990 case ORDERED_EXPR:
10991 case UNLT_EXPR:
10992 case UNLE_EXPR:
10993 case UNGT_EXPR:
10994 case UNGE_EXPR:
10995 case UNEQ_EXPR:
10996 case LTGT_EXPR:
10997 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10998 {
10999 tree targ0 = strip_float_extensions (arg0);
11000 tree targ1 = strip_float_extensions (arg1);
11001 tree newtype = TREE_TYPE (targ0);
11002
11003 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11004 newtype = TREE_TYPE (targ1);
11005
11006 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11007 return fold_build2_loc (loc, code, type,
11008 fold_convert_loc (loc, newtype, targ0),
11009 fold_convert_loc (loc, newtype, targ1));
11010 }
11011
11012 return NULL_TREE;
11013
11014 case COMPOUND_EXPR:
11015 /* When pedantic, a compound expression can be neither an lvalue
11016 nor an integer constant expression. */
11017 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11018 return NULL_TREE;
11019 /* Don't let (0, 0) be null pointer constant. */
11020 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11021 : fold_convert_loc (loc, type, arg1);
11022 return pedantic_non_lvalue_loc (loc, tem);
11023
11024 case ASSERT_EXPR:
11025 /* An ASSERT_EXPR should never be passed to fold_binary. */
11026 gcc_unreachable ();
11027
11028 default:
11029 return NULL_TREE;
11030 } /* switch (code) */
11031 }
11032
11033 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11034 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11035 of GOTO_EXPR. */
11036
11037 static tree
11038 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11039 {
11040 switch (TREE_CODE (*tp))
11041 {
11042 case LABEL_EXPR:
11043 return *tp;
11044
11045 case GOTO_EXPR:
11046 *walk_subtrees = 0;
11047
11048 /* fall through */
11049
11050 default:
11051 return NULL_TREE;
11052 }
11053 }
11054
11055 /* Return whether the sub-tree ST contains a label which is accessible from
11056 outside the sub-tree. */
11057
11058 static bool
11059 contains_label_p (tree st)
11060 {
11061 return
11062 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11063 }
11064
11065 /* Fold a ternary expression of code CODE and type TYPE with operands
11066 OP0, OP1, and OP2. Return the folded expression if folding is
11067 successful. Otherwise, return NULL_TREE. */
11068
11069 tree
11070 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11071 tree op0, tree op1, tree op2)
11072 {
11073 tree tem;
11074 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11075 enum tree_code_class kind = TREE_CODE_CLASS (code);
11076
11077 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11078 && TREE_CODE_LENGTH (code) == 3);
11079
11080 /* If this is a commutative operation, and OP0 is a constant, move it
11081 to OP1 to reduce the number of tests below. */
11082 if (commutative_ternary_tree_code (code)
11083 && tree_swap_operands_p (op0, op1))
11084 return fold_build3_loc (loc, code, type, op1, op0, op2);
11085
11086 tem = generic_simplify (loc, code, type, op0, op1, op2);
11087 if (tem)
11088 return tem;
11089
11090 /* Strip any conversions that don't change the mode. This is safe
11091 for every expression, except for a comparison expression because
11092 its signedness is derived from its operands. So, in the latter
11093 case, only strip conversions that don't change the signedness.
11094
11095 Note that this is done as an internal manipulation within the
11096 constant folder, in order to find the simplest representation of
11097 the arguments so that their form can be studied. In any cases,
11098 the appropriate type conversions should be put back in the tree
11099 that will get out of the constant folder. */
11100 if (op0)
11101 {
11102 arg0 = op0;
11103 STRIP_NOPS (arg0);
11104 }
11105
11106 if (op1)
11107 {
11108 arg1 = op1;
11109 STRIP_NOPS (arg1);
11110 }
11111
11112 if (op2)
11113 {
11114 arg2 = op2;
11115 STRIP_NOPS (arg2);
11116 }
11117
11118 switch (code)
11119 {
11120 case COMPONENT_REF:
11121 if (TREE_CODE (arg0) == CONSTRUCTOR
11122 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11123 {
11124 unsigned HOST_WIDE_INT idx;
11125 tree field, value;
11126 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11127 if (field == arg1)
11128 return value;
11129 }
11130 return NULL_TREE;
11131
11132 case COND_EXPR:
11133 case VEC_COND_EXPR:
11134 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11135 so all simple results must be passed through pedantic_non_lvalue. */
11136 if (TREE_CODE (arg0) == INTEGER_CST)
11137 {
11138 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11139 tem = integer_zerop (arg0) ? op2 : op1;
11140 /* Only optimize constant conditions when the selected branch
11141 has the same type as the COND_EXPR. This avoids optimizing
11142 away "c ? x : throw", where the throw has a void type.
11143 Avoid throwing away that operand which contains label. */
11144 if ((!TREE_SIDE_EFFECTS (unused_op)
11145 || !contains_label_p (unused_op))
11146 && (! VOID_TYPE_P (TREE_TYPE (tem))
11147 || VOID_TYPE_P (type)))
11148 return pedantic_non_lvalue_loc (loc, tem);
11149 return NULL_TREE;
11150 }
11151 else if (TREE_CODE (arg0) == VECTOR_CST)
11152 {
11153 if ((TREE_CODE (arg1) == VECTOR_CST
11154 || TREE_CODE (arg1) == CONSTRUCTOR)
11155 && (TREE_CODE (arg2) == VECTOR_CST
11156 || TREE_CODE (arg2) == CONSTRUCTOR))
11157 {
11158 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11159 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11160 auto_vec_perm_indices sel (nelts);
11161 for (i = 0; i < nelts; i++)
11162 {
11163 tree val = VECTOR_CST_ELT (arg0, i);
11164 if (integer_all_onesp (val))
11165 sel.quick_push (i);
11166 else if (integer_zerop (val))
11167 sel.quick_push (nelts + i);
11168 else /* Currently unreachable. */
11169 return NULL_TREE;
11170 }
11171 tree t = fold_vec_perm (type, arg1, arg2, sel);
11172 if (t != NULL_TREE)
11173 return t;
11174 }
11175 }
11176
11177 /* If we have A op B ? A : C, we may be able to convert this to a
11178 simpler expression, depending on the operation and the values
11179 of B and C. Signed zeros prevent all of these transformations,
11180 for reasons given above each one.
11181
11182 Also try swapping the arguments and inverting the conditional. */
11183 if (COMPARISON_CLASS_P (arg0)
11184 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11185 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11186 {
11187 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11188 if (tem)
11189 return tem;
11190 }
11191
11192 if (COMPARISON_CLASS_P (arg0)
11193 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11194 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11195 {
11196 location_t loc0 = expr_location_or (arg0, loc);
11197 tem = fold_invert_truthvalue (loc0, arg0);
11198 if (tem && COMPARISON_CLASS_P (tem))
11199 {
11200 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11201 if (tem)
11202 return tem;
11203 }
11204 }
11205
11206 /* If the second operand is simpler than the third, swap them
11207 since that produces better jump optimization results. */
11208 if (truth_value_p (TREE_CODE (arg0))
11209 && tree_swap_operands_p (op1, op2))
11210 {
11211 location_t loc0 = expr_location_or (arg0, loc);
11212 /* See if this can be inverted. If it can't, possibly because
11213 it was a floating-point inequality comparison, don't do
11214 anything. */
11215 tem = fold_invert_truthvalue (loc0, arg0);
11216 if (tem)
11217 return fold_build3_loc (loc, code, type, tem, op2, op1);
11218 }
11219
11220 /* Convert A ? 1 : 0 to simply A. */
11221 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11222 : (integer_onep (op1)
11223 && !VECTOR_TYPE_P (type)))
11224 && integer_zerop (op2)
11225 /* If we try to convert OP0 to our type, the
11226 call to fold will try to move the conversion inside
11227 a COND, which will recurse. In that case, the COND_EXPR
11228 is probably the best choice, so leave it alone. */
11229 && type == TREE_TYPE (arg0))
11230 return pedantic_non_lvalue_loc (loc, arg0);
11231
11232 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11233 over COND_EXPR in cases such as floating point comparisons. */
11234 if (integer_zerop (op1)
11235 && code == COND_EXPR
11236 && integer_onep (op2)
11237 && !VECTOR_TYPE_P (type)
11238 && truth_value_p (TREE_CODE (arg0)))
11239 return pedantic_non_lvalue_loc (loc,
11240 fold_convert_loc (loc, type,
11241 invert_truthvalue_loc (loc,
11242 arg0)));
11243
11244 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11245 if (TREE_CODE (arg0) == LT_EXPR
11246 && integer_zerop (TREE_OPERAND (arg0, 1))
11247 && integer_zerop (op2)
11248 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11249 {
11250 /* sign_bit_p looks through both zero and sign extensions,
11251 but for this optimization only sign extensions are
11252 usable. */
11253 tree tem2 = TREE_OPERAND (arg0, 0);
11254 while (tem != tem2)
11255 {
11256 if (TREE_CODE (tem2) != NOP_EXPR
11257 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11258 {
11259 tem = NULL_TREE;
11260 break;
11261 }
11262 tem2 = TREE_OPERAND (tem2, 0);
11263 }
11264 /* sign_bit_p only checks ARG1 bits within A's precision.
11265 If <sign bit of A> has wider type than A, bits outside
11266 of A's precision in <sign bit of A> need to be checked.
11267 If they are all 0, this optimization needs to be done
11268 in unsigned A's type, if they are all 1 in signed A's type,
11269 otherwise this can't be done. */
11270 if (tem
11271 && TYPE_PRECISION (TREE_TYPE (tem))
11272 < TYPE_PRECISION (TREE_TYPE (arg1))
11273 && TYPE_PRECISION (TREE_TYPE (tem))
11274 < TYPE_PRECISION (type))
11275 {
11276 int inner_width, outer_width;
11277 tree tem_type;
11278
11279 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11280 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11281 if (outer_width > TYPE_PRECISION (type))
11282 outer_width = TYPE_PRECISION (type);
11283
11284 wide_int mask = wi::shifted_mask
11285 (inner_width, outer_width - inner_width, false,
11286 TYPE_PRECISION (TREE_TYPE (arg1)));
11287
11288 wide_int common = mask & wi::to_wide (arg1);
11289 if (common == mask)
11290 {
11291 tem_type = signed_type_for (TREE_TYPE (tem));
11292 tem = fold_convert_loc (loc, tem_type, tem);
11293 }
11294 else if (common == 0)
11295 {
11296 tem_type = unsigned_type_for (TREE_TYPE (tem));
11297 tem = fold_convert_loc (loc, tem_type, tem);
11298 }
11299 else
11300 tem = NULL;
11301 }
11302
11303 if (tem)
11304 return
11305 fold_convert_loc (loc, type,
11306 fold_build2_loc (loc, BIT_AND_EXPR,
11307 TREE_TYPE (tem), tem,
11308 fold_convert_loc (loc,
11309 TREE_TYPE (tem),
11310 arg1)));
11311 }
11312
11313 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11314 already handled above. */
11315 if (TREE_CODE (arg0) == BIT_AND_EXPR
11316 && integer_onep (TREE_OPERAND (arg0, 1))
11317 && integer_zerop (op2)
11318 && integer_pow2p (arg1))
11319 {
11320 tree tem = TREE_OPERAND (arg0, 0);
11321 STRIP_NOPS (tem);
11322 if (TREE_CODE (tem) == RSHIFT_EXPR
11323 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11324 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11325 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11326 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11327 fold_convert_loc (loc, type,
11328 TREE_OPERAND (tem, 0)),
11329 op1);
11330 }
11331
11332 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11333 is probably obsolete because the first operand should be a
11334 truth value (that's why we have the two cases above), but let's
11335 leave it in until we can confirm this for all front-ends. */
11336 if (integer_zerop (op2)
11337 && TREE_CODE (arg0) == NE_EXPR
11338 && integer_zerop (TREE_OPERAND (arg0, 1))
11339 && integer_pow2p (arg1)
11340 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11342 arg1, OEP_ONLY_CONST))
11343 return pedantic_non_lvalue_loc (loc,
11344 fold_convert_loc (loc, type,
11345 TREE_OPERAND (arg0, 0)));
11346
11347 /* Disable the transformations below for vectors, since
11348 fold_binary_op_with_conditional_arg may undo them immediately,
11349 yielding an infinite loop. */
11350 if (code == VEC_COND_EXPR)
11351 return NULL_TREE;
11352
11353 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11354 if (integer_zerop (op2)
11355 && truth_value_p (TREE_CODE (arg0))
11356 && truth_value_p (TREE_CODE (arg1))
11357 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11358 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11359 : TRUTH_ANDIF_EXPR,
11360 type, fold_convert_loc (loc, type, arg0), op1);
11361
11362 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11363 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11364 && truth_value_p (TREE_CODE (arg0))
11365 && truth_value_p (TREE_CODE (arg1))
11366 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11367 {
11368 location_t loc0 = expr_location_or (arg0, loc);
11369 /* Only perform transformation if ARG0 is easily inverted. */
11370 tem = fold_invert_truthvalue (loc0, arg0);
11371 if (tem)
11372 return fold_build2_loc (loc, code == VEC_COND_EXPR
11373 ? BIT_IOR_EXPR
11374 : TRUTH_ORIF_EXPR,
11375 type, fold_convert_loc (loc, type, tem),
11376 op1);
11377 }
11378
11379 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11380 if (integer_zerop (arg1)
11381 && truth_value_p (TREE_CODE (arg0))
11382 && truth_value_p (TREE_CODE (op2))
11383 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11384 {
11385 location_t loc0 = expr_location_or (arg0, loc);
11386 /* Only perform transformation if ARG0 is easily inverted. */
11387 tem = fold_invert_truthvalue (loc0, arg0);
11388 if (tem)
11389 return fold_build2_loc (loc, code == VEC_COND_EXPR
11390 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11391 type, fold_convert_loc (loc, type, tem),
11392 op2);
11393 }
11394
11395 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11396 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11397 && truth_value_p (TREE_CODE (arg0))
11398 && truth_value_p (TREE_CODE (op2))
11399 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11400 return fold_build2_loc (loc, code == VEC_COND_EXPR
11401 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11402 type, fold_convert_loc (loc, type, arg0), op2);
11403
11404 return NULL_TREE;
11405
11406 case CALL_EXPR:
11407 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11408 of fold_ternary on them. */
11409 gcc_unreachable ();
11410
11411 case BIT_FIELD_REF:
11412 if (TREE_CODE (arg0) == VECTOR_CST
11413 && (type == TREE_TYPE (TREE_TYPE (arg0))
11414 || (TREE_CODE (type) == VECTOR_TYPE
11415 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11416 {
11417 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11418 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11419 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11420 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11421
11422 if (n != 0
11423 && (idx % width) == 0
11424 && (n % width) == 0
11425 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11426 {
11427 idx = idx / width;
11428 n = n / width;
11429
11430 if (TREE_CODE (arg0) == VECTOR_CST)
11431 {
11432 if (n == 1)
11433 return VECTOR_CST_ELT (arg0, idx);
11434
11435 auto_vec<tree, 32> vals (n);
11436 for (unsigned i = 0; i < n; ++i)
11437 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11438 return build_vector (type, vals);
11439 }
11440 }
11441 }
11442
11443 /* On constants we can use native encode/interpret to constant
11444 fold (nearly) all BIT_FIELD_REFs. */
11445 if (CONSTANT_CLASS_P (arg0)
11446 && can_native_interpret_type_p (type)
11447 && BITS_PER_UNIT == 8)
11448 {
11449 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11450 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11451 /* Limit us to a reasonable amount of work. To relax the
11452 other limitations we need bit-shifting of the buffer
11453 and rounding up the size. */
11454 if (bitpos % BITS_PER_UNIT == 0
11455 && bitsize % BITS_PER_UNIT == 0
11456 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11457 {
11458 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11459 unsigned HOST_WIDE_INT len
11460 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11461 bitpos / BITS_PER_UNIT);
11462 if (len > 0
11463 && len * BITS_PER_UNIT >= bitsize)
11464 {
11465 tree v = native_interpret_expr (type, b,
11466 bitsize / BITS_PER_UNIT);
11467 if (v)
11468 return v;
11469 }
11470 }
11471 }
11472
11473 return NULL_TREE;
11474
11475 case FMA_EXPR:
11476 /* For integers we can decompose the FMA if possible. */
11477 if (TREE_CODE (arg0) == INTEGER_CST
11478 && TREE_CODE (arg1) == INTEGER_CST)
11479 return fold_build2_loc (loc, PLUS_EXPR, type,
11480 const_binop (MULT_EXPR, arg0, arg1), arg2);
11481 if (integer_zerop (arg2))
11482 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11483
11484 return fold_fma (loc, type, arg0, arg1, arg2);
11485
11486 case VEC_PERM_EXPR:
11487 if (TREE_CODE (arg2) == VECTOR_CST)
11488 {
11489 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11490 bool need_mask_canon = false;
11491 bool need_mask_canon2 = false;
11492 bool all_in_vec0 = true;
11493 bool all_in_vec1 = true;
11494 bool maybe_identity = true;
11495 bool single_arg = (op0 == op1);
11496 bool changed = false;
11497
11498 mask2 = 2 * nelts - 1;
11499 mask = single_arg ? (nelts - 1) : mask2;
11500 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11501 auto_vec_perm_indices sel (nelts);
11502 auto_vec_perm_indices sel2 (nelts);
11503 for (i = 0; i < nelts; i++)
11504 {
11505 tree val = VECTOR_CST_ELT (arg2, i);
11506 if (TREE_CODE (val) != INTEGER_CST)
11507 return NULL_TREE;
11508
11509 /* Make sure that the perm value is in an acceptable
11510 range. */
11511 wi::tree_to_wide_ref t = wi::to_wide (val);
11512 need_mask_canon |= wi::gtu_p (t, mask);
11513 need_mask_canon2 |= wi::gtu_p (t, mask2);
11514 unsigned int elt = t.to_uhwi () & mask;
11515 unsigned int elt2 = t.to_uhwi () & mask2;
11516
11517 if (elt < nelts)
11518 all_in_vec1 = false;
11519 else
11520 all_in_vec0 = false;
11521
11522 if ((elt & (nelts - 1)) != i)
11523 maybe_identity = false;
11524
11525 sel.quick_push (elt);
11526 sel2.quick_push (elt2);
11527 }
11528
11529 if (maybe_identity)
11530 {
11531 if (all_in_vec0)
11532 return op0;
11533 if (all_in_vec1)
11534 return op1;
11535 }
11536
11537 if (all_in_vec0)
11538 op1 = op0;
11539 else if (all_in_vec1)
11540 {
11541 op0 = op1;
11542 for (i = 0; i < nelts; i++)
11543 sel[i] -= nelts;
11544 need_mask_canon = true;
11545 }
11546
11547 if ((TREE_CODE (op0) == VECTOR_CST
11548 || TREE_CODE (op0) == CONSTRUCTOR)
11549 && (TREE_CODE (op1) == VECTOR_CST
11550 || TREE_CODE (op1) == CONSTRUCTOR))
11551 {
11552 tree t = fold_vec_perm (type, op0, op1, sel);
11553 if (t != NULL_TREE)
11554 return t;
11555 }
11556
11557 if (op0 == op1 && !single_arg)
11558 changed = true;
11559
11560 /* Some targets are deficient and fail to expand a single
11561 argument permutation while still allowing an equivalent
11562 2-argument version. */
11563 if (need_mask_canon && arg2 == op2
11564 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11565 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11566 {
11567 need_mask_canon = need_mask_canon2;
11568 sel = sel2;
11569 }
11570
11571 if (need_mask_canon && arg2 == op2)
11572 {
11573 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11574 auto_vec<tree, 32> tsel (nelts);
11575 for (i = 0; i < nelts; i++)
11576 tsel.quick_push (build_int_cst (eltype, sel[i]));
11577 op2 = build_vector (TREE_TYPE (arg2), tsel);
11578 changed = true;
11579 }
11580
11581 if (changed)
11582 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11583 }
11584 return NULL_TREE;
11585
11586 case BIT_INSERT_EXPR:
11587 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11588 if (TREE_CODE (arg0) == INTEGER_CST
11589 && TREE_CODE (arg1) == INTEGER_CST)
11590 {
11591 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11592 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11593 wide_int tem = (wi::to_wide (arg0)
11594 & wi::shifted_mask (bitpos, bitsize, true,
11595 TYPE_PRECISION (type)));
11596 wide_int tem2
11597 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11598 bitsize), bitpos);
11599 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11600 }
11601 else if (TREE_CODE (arg0) == VECTOR_CST
11602 && CONSTANT_CLASS_P (arg1)
11603 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11604 TREE_TYPE (arg1)))
11605 {
11606 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11607 unsigned HOST_WIDE_INT elsize
11608 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11609 if (bitpos % elsize == 0)
11610 {
11611 unsigned k = bitpos / elsize;
11612 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11613 return arg0;
11614 else
11615 {
11616 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11617 auto_vec<tree, 32> elts (nelts);
11618 elts.quick_grow (nelts);
11619 memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
11620 sizeof (tree) * nelts);
11621 elts[k] = arg1;
11622 return build_vector (type, elts);
11623 }
11624 }
11625 }
11626 return NULL_TREE;
11627
11628 default:
11629 return NULL_TREE;
11630 } /* switch (code) */
11631 }
11632
11633 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11634 of an array (or vector). */
11635
11636 tree
11637 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11638 {
11639 tree index_type = NULL_TREE;
11640 offset_int low_bound = 0;
11641
11642 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11643 {
11644 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11645 if (domain_type && TYPE_MIN_VALUE (domain_type))
11646 {
11647 /* Static constructors for variably sized objects makes no sense. */
11648 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11649 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11650 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11651 }
11652 }
11653
11654 if (index_type)
11655 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11656 TYPE_SIGN (index_type));
11657
11658 offset_int index = low_bound - 1;
11659 if (index_type)
11660 index = wi::ext (index, TYPE_PRECISION (index_type),
11661 TYPE_SIGN (index_type));
11662
11663 offset_int max_index;
11664 unsigned HOST_WIDE_INT cnt;
11665 tree cfield, cval;
11666
11667 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11668 {
11669 /* Array constructor might explicitly set index, or specify a range,
11670 or leave index NULL meaning that it is next index after previous
11671 one. */
11672 if (cfield)
11673 {
11674 if (TREE_CODE (cfield) == INTEGER_CST)
11675 max_index = index = wi::to_offset (cfield);
11676 else
11677 {
11678 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11679 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11680 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11681 }
11682 }
11683 else
11684 {
11685 index += 1;
11686 if (index_type)
11687 index = wi::ext (index, TYPE_PRECISION (index_type),
11688 TYPE_SIGN (index_type));
11689 max_index = index;
11690 }
11691
11692 /* Do we have match? */
11693 if (wi::cmpu (access_index, index) >= 0
11694 && wi::cmpu (access_index, max_index) <= 0)
11695 return cval;
11696 }
11697 return NULL_TREE;
11698 }
11699
11700 /* Perform constant folding and related simplification of EXPR.
11701 The related simplifications include x*1 => x, x*0 => 0, etc.,
11702 and application of the associative law.
11703 NOP_EXPR conversions may be removed freely (as long as we
11704 are careful not to change the type of the overall expression).
11705 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11706 but we can constant-fold them if they have constant operands. */
11707
11708 #ifdef ENABLE_FOLD_CHECKING
11709 # define fold(x) fold_1 (x)
11710 static tree fold_1 (tree);
11711 static
11712 #endif
11713 tree
11714 fold (tree expr)
11715 {
11716 const tree t = expr;
11717 enum tree_code code = TREE_CODE (t);
11718 enum tree_code_class kind = TREE_CODE_CLASS (code);
11719 tree tem;
11720 location_t loc = EXPR_LOCATION (expr);
11721
11722 /* Return right away if a constant. */
11723 if (kind == tcc_constant)
11724 return t;
11725
11726 /* CALL_EXPR-like objects with variable numbers of operands are
11727 treated specially. */
11728 if (kind == tcc_vl_exp)
11729 {
11730 if (code == CALL_EXPR)
11731 {
11732 tem = fold_call_expr (loc, expr, false);
11733 return tem ? tem : expr;
11734 }
11735 return expr;
11736 }
11737
11738 if (IS_EXPR_CODE_CLASS (kind))
11739 {
11740 tree type = TREE_TYPE (t);
11741 tree op0, op1, op2;
11742
11743 switch (TREE_CODE_LENGTH (code))
11744 {
11745 case 1:
11746 op0 = TREE_OPERAND (t, 0);
11747 tem = fold_unary_loc (loc, code, type, op0);
11748 return tem ? tem : expr;
11749 case 2:
11750 op0 = TREE_OPERAND (t, 0);
11751 op1 = TREE_OPERAND (t, 1);
11752 tem = fold_binary_loc (loc, code, type, op0, op1);
11753 return tem ? tem : expr;
11754 case 3:
11755 op0 = TREE_OPERAND (t, 0);
11756 op1 = TREE_OPERAND (t, 1);
11757 op2 = TREE_OPERAND (t, 2);
11758 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11759 return tem ? tem : expr;
11760 default:
11761 break;
11762 }
11763 }
11764
11765 switch (code)
11766 {
11767 case ARRAY_REF:
11768 {
11769 tree op0 = TREE_OPERAND (t, 0);
11770 tree op1 = TREE_OPERAND (t, 1);
11771
11772 if (TREE_CODE (op1) == INTEGER_CST
11773 && TREE_CODE (op0) == CONSTRUCTOR
11774 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11775 {
11776 tree val = get_array_ctor_element_at_index (op0,
11777 wi::to_offset (op1));
11778 if (val)
11779 return val;
11780 }
11781
11782 return t;
11783 }
11784
11785 /* Return a VECTOR_CST if possible. */
11786 case CONSTRUCTOR:
11787 {
11788 tree type = TREE_TYPE (t);
11789 if (TREE_CODE (type) != VECTOR_TYPE)
11790 return t;
11791
11792 unsigned i;
11793 tree val;
11794 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11795 if (! CONSTANT_CLASS_P (val))
11796 return t;
11797
11798 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11799 }
11800
11801 case CONST_DECL:
11802 return fold (DECL_INITIAL (t));
11803
11804 default:
11805 return t;
11806 } /* switch (code) */
11807 }
11808
11809 #ifdef ENABLE_FOLD_CHECKING
11810 #undef fold
11811
11812 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11813 hash_table<nofree_ptr_hash<const tree_node> > *);
11814 static void fold_check_failed (const_tree, const_tree);
11815 void print_fold_checksum (const_tree);
11816
11817 /* When --enable-checking=fold, compute a digest of expr before
11818 and after actual fold call to see if fold did not accidentally
11819 change original expr. */
11820
11821 tree
11822 fold (tree expr)
11823 {
11824 tree ret;
11825 struct md5_ctx ctx;
11826 unsigned char checksum_before[16], checksum_after[16];
11827 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11828
11829 md5_init_ctx (&ctx);
11830 fold_checksum_tree (expr, &ctx, &ht);
11831 md5_finish_ctx (&ctx, checksum_before);
11832 ht.empty ();
11833
11834 ret = fold_1 (expr);
11835
11836 md5_init_ctx (&ctx);
11837 fold_checksum_tree (expr, &ctx, &ht);
11838 md5_finish_ctx (&ctx, checksum_after);
11839
11840 if (memcmp (checksum_before, checksum_after, 16))
11841 fold_check_failed (expr, ret);
11842
11843 return ret;
11844 }
11845
11846 void
11847 print_fold_checksum (const_tree expr)
11848 {
11849 struct md5_ctx ctx;
11850 unsigned char checksum[16], cnt;
11851 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11852
11853 md5_init_ctx (&ctx);
11854 fold_checksum_tree (expr, &ctx, &ht);
11855 md5_finish_ctx (&ctx, checksum);
11856 for (cnt = 0; cnt < 16; ++cnt)
11857 fprintf (stderr, "%02x", checksum[cnt]);
11858 putc ('\n', stderr);
11859 }
11860
11861 static void
11862 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
11863 {
11864 internal_error ("fold check: original tree changed by fold");
11865 }
11866
11867 static void
11868 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
11869 hash_table<nofree_ptr_hash <const tree_node> > *ht)
11870 {
11871 const tree_node **slot;
11872 enum tree_code code;
11873 union tree_node buf;
11874 int i, len;
11875
11876 recursive_label:
11877 if (expr == NULL)
11878 return;
11879 slot = ht->find_slot (expr, INSERT);
11880 if (*slot != NULL)
11881 return;
11882 *slot = expr;
11883 code = TREE_CODE (expr);
11884 if (TREE_CODE_CLASS (code) == tcc_declaration
11885 && HAS_DECL_ASSEMBLER_NAME_P (expr))
11886 {
11887 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11888 memcpy ((char *) &buf, expr, tree_size (expr));
11889 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
11890 buf.decl_with_vis.symtab_node = NULL;
11891 expr = (tree) &buf;
11892 }
11893 else if (TREE_CODE_CLASS (code) == tcc_type
11894 && (TYPE_POINTER_TO (expr)
11895 || TYPE_REFERENCE_TO (expr)
11896 || TYPE_CACHED_VALUES_P (expr)
11897 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
11898 || TYPE_NEXT_VARIANT (expr)
11899 || TYPE_ALIAS_SET_KNOWN_P (expr)))
11900 {
11901 /* Allow these fields to be modified. */
11902 tree tmp;
11903 memcpy ((char *) &buf, expr, tree_size (expr));
11904 expr = tmp = (tree) &buf;
11905 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
11906 TYPE_POINTER_TO (tmp) = NULL;
11907 TYPE_REFERENCE_TO (tmp) = NULL;
11908 TYPE_NEXT_VARIANT (tmp) = NULL;
11909 TYPE_ALIAS_SET (tmp) = -1;
11910 if (TYPE_CACHED_VALUES_P (tmp))
11911 {
11912 TYPE_CACHED_VALUES_P (tmp) = 0;
11913 TYPE_CACHED_VALUES (tmp) = NULL;
11914 }
11915 }
11916 md5_process_bytes (expr, tree_size (expr), ctx);
11917 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
11918 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11919 if (TREE_CODE_CLASS (code) != tcc_type
11920 && TREE_CODE_CLASS (code) != tcc_declaration
11921 && code != TREE_LIST
11922 && code != SSA_NAME
11923 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
11924 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11925 switch (TREE_CODE_CLASS (code))
11926 {
11927 case tcc_constant:
11928 switch (code)
11929 {
11930 case STRING_CST:
11931 md5_process_bytes (TREE_STRING_POINTER (expr),
11932 TREE_STRING_LENGTH (expr), ctx);
11933 break;
11934 case COMPLEX_CST:
11935 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11936 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11937 break;
11938 case VECTOR_CST:
11939 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
11940 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
11941 break;
11942 default:
11943 break;
11944 }
11945 break;
11946 case tcc_exceptional:
11947 switch (code)
11948 {
11949 case TREE_LIST:
11950 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11951 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11952 expr = TREE_CHAIN (expr);
11953 goto recursive_label;
11954 break;
11955 case TREE_VEC:
11956 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11957 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11958 break;
11959 default:
11960 break;
11961 }
11962 break;
11963 case tcc_expression:
11964 case tcc_reference:
11965 case tcc_comparison:
11966 case tcc_unary:
11967 case tcc_binary:
11968 case tcc_statement:
11969 case tcc_vl_exp:
11970 len = TREE_OPERAND_LENGTH (expr);
11971 for (i = 0; i < len; ++i)
11972 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11973 break;
11974 case tcc_declaration:
11975 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11976 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11977 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11978 {
11979 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11980 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11981 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11982 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11983 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11984 }
11985
11986 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11987 {
11988 if (TREE_CODE (expr) == FUNCTION_DECL)
11989 {
11990 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11991 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
11992 }
11993 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11994 }
11995 break;
11996 case tcc_type:
11997 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11998 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11999 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12000 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12001 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12002 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12003 if (INTEGRAL_TYPE_P (expr)
12004 || SCALAR_FLOAT_TYPE_P (expr))
12005 {
12006 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12007 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12008 }
12009 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12010 if (TREE_CODE (expr) == RECORD_TYPE
12011 || TREE_CODE (expr) == UNION_TYPE
12012 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12013 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12014 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12015 break;
12016 default:
12017 break;
12018 }
12019 }
12020
12021 /* Helper function for outputting the checksum of a tree T. When
12022 debugging with gdb, you can "define mynext" to be "next" followed
12023 by "call debug_fold_checksum (op0)", then just trace down till the
12024 outputs differ. */
12025
12026 DEBUG_FUNCTION void
12027 debug_fold_checksum (const_tree t)
12028 {
12029 int i;
12030 unsigned char checksum[16];
12031 struct md5_ctx ctx;
12032 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12033
12034 md5_init_ctx (&ctx);
12035 fold_checksum_tree (t, &ctx, &ht);
12036 md5_finish_ctx (&ctx, checksum);
12037 ht.empty ();
12038
12039 for (i = 0; i < 16; i++)
12040 fprintf (stderr, "%d ", checksum[i]);
12041
12042 fprintf (stderr, "\n");
12043 }
12044
12045 #endif
12046
12047 /* Fold a unary tree expression with code CODE of type TYPE with an
12048 operand OP0. LOC is the location of the resulting expression.
12049 Return a folded expression if successful. Otherwise, return a tree
12050 expression with code CODE of type TYPE with an operand OP0. */
12051
12052 tree
12053 fold_build1_loc (location_t loc,
12054 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12055 {
12056 tree tem;
12057 #ifdef ENABLE_FOLD_CHECKING
12058 unsigned char checksum_before[16], checksum_after[16];
12059 struct md5_ctx ctx;
12060 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12061
12062 md5_init_ctx (&ctx);
12063 fold_checksum_tree (op0, &ctx, &ht);
12064 md5_finish_ctx (&ctx, checksum_before);
12065 ht.empty ();
12066 #endif
12067
12068 tem = fold_unary_loc (loc, code, type, op0);
12069 if (!tem)
12070 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12071
12072 #ifdef ENABLE_FOLD_CHECKING
12073 md5_init_ctx (&ctx);
12074 fold_checksum_tree (op0, &ctx, &ht);
12075 md5_finish_ctx (&ctx, checksum_after);
12076
12077 if (memcmp (checksum_before, checksum_after, 16))
12078 fold_check_failed (op0, tem);
12079 #endif
12080 return tem;
12081 }
12082
12083 /* Fold a binary tree expression with code CODE of type TYPE with
12084 operands OP0 and OP1. LOC is the location of the resulting
12085 expression. Return a folded expression if successful. Otherwise,
12086 return a tree expression with code CODE of type TYPE with operands
12087 OP0 and OP1. */
12088
12089 tree
12090 fold_build2_loc (location_t loc,
12091 enum tree_code code, tree type, tree op0, tree op1
12092 MEM_STAT_DECL)
12093 {
12094 tree tem;
12095 #ifdef ENABLE_FOLD_CHECKING
12096 unsigned char checksum_before_op0[16],
12097 checksum_before_op1[16],
12098 checksum_after_op0[16],
12099 checksum_after_op1[16];
12100 struct md5_ctx ctx;
12101 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12102
12103 md5_init_ctx (&ctx);
12104 fold_checksum_tree (op0, &ctx, &ht);
12105 md5_finish_ctx (&ctx, checksum_before_op0);
12106 ht.empty ();
12107
12108 md5_init_ctx (&ctx);
12109 fold_checksum_tree (op1, &ctx, &ht);
12110 md5_finish_ctx (&ctx, checksum_before_op1);
12111 ht.empty ();
12112 #endif
12113
12114 tem = fold_binary_loc (loc, code, type, op0, op1);
12115 if (!tem)
12116 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12117
12118 #ifdef ENABLE_FOLD_CHECKING
12119 md5_init_ctx (&ctx);
12120 fold_checksum_tree (op0, &ctx, &ht);
12121 md5_finish_ctx (&ctx, checksum_after_op0);
12122 ht.empty ();
12123
12124 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12125 fold_check_failed (op0, tem);
12126
12127 md5_init_ctx (&ctx);
12128 fold_checksum_tree (op1, &ctx, &ht);
12129 md5_finish_ctx (&ctx, checksum_after_op1);
12130
12131 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12132 fold_check_failed (op1, tem);
12133 #endif
12134 return tem;
12135 }
12136
12137 /* Fold a ternary tree expression with code CODE of type TYPE with
12138 operands OP0, OP1, and OP2. Return a folded expression if
12139 successful. Otherwise, return a tree expression with code CODE of
12140 type TYPE with operands OP0, OP1, and OP2. */
12141
12142 tree
12143 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12144 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12145 {
12146 tree tem;
12147 #ifdef ENABLE_FOLD_CHECKING
12148 unsigned char checksum_before_op0[16],
12149 checksum_before_op1[16],
12150 checksum_before_op2[16],
12151 checksum_after_op0[16],
12152 checksum_after_op1[16],
12153 checksum_after_op2[16];
12154 struct md5_ctx ctx;
12155 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12156
12157 md5_init_ctx (&ctx);
12158 fold_checksum_tree (op0, &ctx, &ht);
12159 md5_finish_ctx (&ctx, checksum_before_op0);
12160 ht.empty ();
12161
12162 md5_init_ctx (&ctx);
12163 fold_checksum_tree (op1, &ctx, &ht);
12164 md5_finish_ctx (&ctx, checksum_before_op1);
12165 ht.empty ();
12166
12167 md5_init_ctx (&ctx);
12168 fold_checksum_tree (op2, &ctx, &ht);
12169 md5_finish_ctx (&ctx, checksum_before_op2);
12170 ht.empty ();
12171 #endif
12172
12173 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12174 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12175 if (!tem)
12176 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12177
12178 #ifdef ENABLE_FOLD_CHECKING
12179 md5_init_ctx (&ctx);
12180 fold_checksum_tree (op0, &ctx, &ht);
12181 md5_finish_ctx (&ctx, checksum_after_op0);
12182 ht.empty ();
12183
12184 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12185 fold_check_failed (op0, tem);
12186
12187 md5_init_ctx (&ctx);
12188 fold_checksum_tree (op1, &ctx, &ht);
12189 md5_finish_ctx (&ctx, checksum_after_op1);
12190 ht.empty ();
12191
12192 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12193 fold_check_failed (op1, tem);
12194
12195 md5_init_ctx (&ctx);
12196 fold_checksum_tree (op2, &ctx, &ht);
12197 md5_finish_ctx (&ctx, checksum_after_op2);
12198
12199 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12200 fold_check_failed (op2, tem);
12201 #endif
12202 return tem;
12203 }
12204
12205 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12206 arguments in ARGARRAY, and a null static chain.
12207 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12208 of type TYPE from the given operands as constructed by build_call_array. */
12209
12210 tree
12211 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12212 int nargs, tree *argarray)
12213 {
12214 tree tem;
12215 #ifdef ENABLE_FOLD_CHECKING
12216 unsigned char checksum_before_fn[16],
12217 checksum_before_arglist[16],
12218 checksum_after_fn[16],
12219 checksum_after_arglist[16];
12220 struct md5_ctx ctx;
12221 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12222 int i;
12223
12224 md5_init_ctx (&ctx);
12225 fold_checksum_tree (fn, &ctx, &ht);
12226 md5_finish_ctx (&ctx, checksum_before_fn);
12227 ht.empty ();
12228
12229 md5_init_ctx (&ctx);
12230 for (i = 0; i < nargs; i++)
12231 fold_checksum_tree (argarray[i], &ctx, &ht);
12232 md5_finish_ctx (&ctx, checksum_before_arglist);
12233 ht.empty ();
12234 #endif
12235
12236 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12237 if (!tem)
12238 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12239
12240 #ifdef ENABLE_FOLD_CHECKING
12241 md5_init_ctx (&ctx);
12242 fold_checksum_tree (fn, &ctx, &ht);
12243 md5_finish_ctx (&ctx, checksum_after_fn);
12244 ht.empty ();
12245
12246 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12247 fold_check_failed (fn, tem);
12248
12249 md5_init_ctx (&ctx);
12250 for (i = 0; i < nargs; i++)
12251 fold_checksum_tree (argarray[i], &ctx, &ht);
12252 md5_finish_ctx (&ctx, checksum_after_arglist);
12253
12254 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12255 fold_check_failed (NULL_TREE, tem);
12256 #endif
12257 return tem;
12258 }
12259
12260 /* Perform constant folding and related simplification of initializer
12261 expression EXPR. These behave identically to "fold_buildN" but ignore
12262 potential run-time traps and exceptions that fold must preserve. */
12263
12264 #define START_FOLD_INIT \
12265 int saved_signaling_nans = flag_signaling_nans;\
12266 int saved_trapping_math = flag_trapping_math;\
12267 int saved_rounding_math = flag_rounding_math;\
12268 int saved_trapv = flag_trapv;\
12269 int saved_folding_initializer = folding_initializer;\
12270 flag_signaling_nans = 0;\
12271 flag_trapping_math = 0;\
12272 flag_rounding_math = 0;\
12273 flag_trapv = 0;\
12274 folding_initializer = 1;
12275
12276 #define END_FOLD_INIT \
12277 flag_signaling_nans = saved_signaling_nans;\
12278 flag_trapping_math = saved_trapping_math;\
12279 flag_rounding_math = saved_rounding_math;\
12280 flag_trapv = saved_trapv;\
12281 folding_initializer = saved_folding_initializer;
12282
12283 tree
12284 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12285 tree type, tree op)
12286 {
12287 tree result;
12288 START_FOLD_INIT;
12289
12290 result = fold_build1_loc (loc, code, type, op);
12291
12292 END_FOLD_INIT;
12293 return result;
12294 }
12295
12296 tree
12297 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12298 tree type, tree op0, tree op1)
12299 {
12300 tree result;
12301 START_FOLD_INIT;
12302
12303 result = fold_build2_loc (loc, code, type, op0, op1);
12304
12305 END_FOLD_INIT;
12306 return result;
12307 }
12308
12309 tree
12310 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12311 int nargs, tree *argarray)
12312 {
12313 tree result;
12314 START_FOLD_INIT;
12315
12316 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12317
12318 END_FOLD_INIT;
12319 return result;
12320 }
12321
12322 #undef START_FOLD_INIT
12323 #undef END_FOLD_INIT
12324
12325 /* Determine if first argument is a multiple of second argument. Return 0 if
12326 it is not, or we cannot easily determined it to be.
12327
12328 An example of the sort of thing we care about (at this point; this routine
12329 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12330 fold cases do now) is discovering that
12331
12332 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12333
12334 is a multiple of
12335
12336 SAVE_EXPR (J * 8)
12337
12338 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12339
12340 This code also handles discovering that
12341
12342 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12343
12344 is a multiple of 8 so we don't have to worry about dealing with a
12345 possible remainder.
12346
12347 Note that we *look* inside a SAVE_EXPR only to determine how it was
12348 calculated; it is not safe for fold to do much of anything else with the
12349 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12350 at run time. For example, the latter example above *cannot* be implemented
12351 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12352 evaluation time of the original SAVE_EXPR is not necessarily the same at
12353 the time the new expression is evaluated. The only optimization of this
12354 sort that would be valid is changing
12355
12356 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12357
12358 divided by 8 to
12359
12360 SAVE_EXPR (I) * SAVE_EXPR (J)
12361
12362 (where the same SAVE_EXPR (J) is used in the original and the
12363 transformed version). */
12364
12365 int
12366 multiple_of_p (tree type, const_tree top, const_tree bottom)
12367 {
12368 gimple *stmt;
12369 tree t1, op1, op2;
12370
12371 if (operand_equal_p (top, bottom, 0))
12372 return 1;
12373
12374 if (TREE_CODE (type) != INTEGER_TYPE)
12375 return 0;
12376
12377 switch (TREE_CODE (top))
12378 {
12379 case BIT_AND_EXPR:
12380 /* Bitwise and provides a power of two multiple. If the mask is
12381 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12382 if (!integer_pow2p (bottom))
12383 return 0;
12384 /* FALLTHRU */
12385
12386 case MULT_EXPR:
12387 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12388 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12389
12390 case MINUS_EXPR:
12391 /* It is impossible to prove if op0 - op1 is multiple of bottom
12392 precisely, so be conservative here checking if both op0 and op1
12393 are multiple of bottom. Note we check the second operand first
12394 since it's usually simpler. */
12395 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12396 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12397
12398 case PLUS_EXPR:
12399 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12400 as op0 - 3 if the expression has unsigned type. For example,
12401 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12402 op1 = TREE_OPERAND (top, 1);
12403 if (TYPE_UNSIGNED (type)
12404 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12405 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12406 return (multiple_of_p (type, op1, bottom)
12407 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12408
12409 case LSHIFT_EXPR:
12410 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12411 {
12412 op1 = TREE_OPERAND (top, 1);
12413 /* const_binop may not detect overflow correctly,
12414 so check for it explicitly here. */
12415 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12416 wi::to_wide (op1))
12417 && 0 != (t1 = fold_convert (type,
12418 const_binop (LSHIFT_EXPR,
12419 size_one_node,
12420 op1)))
12421 && !TREE_OVERFLOW (t1))
12422 return multiple_of_p (type, t1, bottom);
12423 }
12424 return 0;
12425
12426 case NOP_EXPR:
12427 /* Can't handle conversions from non-integral or wider integral type. */
12428 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12429 || (TYPE_PRECISION (type)
12430 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12431 return 0;
12432
12433 /* fall through */
12434
12435 case SAVE_EXPR:
12436 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12437
12438 case COND_EXPR:
12439 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12440 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12441
12442 case INTEGER_CST:
12443 if (TREE_CODE (bottom) != INTEGER_CST
12444 || integer_zerop (bottom)
12445 || (TYPE_UNSIGNED (type)
12446 && (tree_int_cst_sgn (top) < 0
12447 || tree_int_cst_sgn (bottom) < 0)))
12448 return 0;
12449 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12450 SIGNED);
12451
12452 case SSA_NAME:
12453 if (TREE_CODE (bottom) == INTEGER_CST
12454 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12455 && gimple_code (stmt) == GIMPLE_ASSIGN)
12456 {
12457 enum tree_code code = gimple_assign_rhs_code (stmt);
12458
12459 /* Check for special cases to see if top is defined as multiple
12460 of bottom:
12461
12462 top = (X & ~(bottom - 1) ; bottom is power of 2
12463
12464 or
12465
12466 Y = X % bottom
12467 top = X - Y. */
12468 if (code == BIT_AND_EXPR
12469 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12470 && TREE_CODE (op2) == INTEGER_CST
12471 && integer_pow2p (bottom)
12472 && wi::multiple_of_p (wi::to_widest (op2),
12473 wi::to_widest (bottom), UNSIGNED))
12474 return 1;
12475
12476 op1 = gimple_assign_rhs1 (stmt);
12477 if (code == MINUS_EXPR
12478 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12479 && TREE_CODE (op2) == SSA_NAME
12480 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12481 && gimple_code (stmt) == GIMPLE_ASSIGN
12482 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12483 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12484 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12485 return 1;
12486 }
12487
12488 /* fall through */
12489
12490 default:
12491 return 0;
12492 }
12493 }
12494
12495 #define tree_expr_nonnegative_warnv_p(X, Y) \
12496 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12497
12498 #define RECURSE(X) \
12499 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12500
12501 /* Return true if CODE or TYPE is known to be non-negative. */
12502
12503 static bool
12504 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12505 {
12506 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12507 && truth_value_p (code))
12508 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12509 have a signed:1 type (where the value is -1 and 0). */
12510 return true;
12511 return false;
12512 }
12513
12514 /* Return true if (CODE OP0) is known to be non-negative. If the return
12515 value is based on the assumption that signed overflow is undefined,
12516 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12517 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12518
12519 bool
12520 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12521 bool *strict_overflow_p, int depth)
12522 {
12523 if (TYPE_UNSIGNED (type))
12524 return true;
12525
12526 switch (code)
12527 {
12528 case ABS_EXPR:
12529 /* We can't return 1 if flag_wrapv is set because
12530 ABS_EXPR<INT_MIN> = INT_MIN. */
12531 if (!ANY_INTEGRAL_TYPE_P (type))
12532 return true;
12533 if (TYPE_OVERFLOW_UNDEFINED (type))
12534 {
12535 *strict_overflow_p = true;
12536 return true;
12537 }
12538 break;
12539
12540 case NON_LVALUE_EXPR:
12541 case FLOAT_EXPR:
12542 case FIX_TRUNC_EXPR:
12543 return RECURSE (op0);
12544
12545 CASE_CONVERT:
12546 {
12547 tree inner_type = TREE_TYPE (op0);
12548 tree outer_type = type;
12549
12550 if (TREE_CODE (outer_type) == REAL_TYPE)
12551 {
12552 if (TREE_CODE (inner_type) == REAL_TYPE)
12553 return RECURSE (op0);
12554 if (INTEGRAL_TYPE_P (inner_type))
12555 {
12556 if (TYPE_UNSIGNED (inner_type))
12557 return true;
12558 return RECURSE (op0);
12559 }
12560 }
12561 else if (INTEGRAL_TYPE_P (outer_type))
12562 {
12563 if (TREE_CODE (inner_type) == REAL_TYPE)
12564 return RECURSE (op0);
12565 if (INTEGRAL_TYPE_P (inner_type))
12566 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12567 && TYPE_UNSIGNED (inner_type);
12568 }
12569 }
12570 break;
12571
12572 default:
12573 return tree_simple_nonnegative_warnv_p (code, type);
12574 }
12575
12576 /* We don't know sign of `t', so be conservative and return false. */
12577 return false;
12578 }
12579
12580 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12581 value is based on the assumption that signed overflow is undefined,
12582 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12583 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12584
12585 bool
12586 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12587 tree op1, bool *strict_overflow_p,
12588 int depth)
12589 {
12590 if (TYPE_UNSIGNED (type))
12591 return true;
12592
12593 switch (code)
12594 {
12595 case POINTER_PLUS_EXPR:
12596 case PLUS_EXPR:
12597 if (FLOAT_TYPE_P (type))
12598 return RECURSE (op0) && RECURSE (op1);
12599
12600 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12601 both unsigned and at least 2 bits shorter than the result. */
12602 if (TREE_CODE (type) == INTEGER_TYPE
12603 && TREE_CODE (op0) == NOP_EXPR
12604 && TREE_CODE (op1) == NOP_EXPR)
12605 {
12606 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12607 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12608 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12609 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12610 {
12611 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12612 TYPE_PRECISION (inner2)) + 1;
12613 return prec < TYPE_PRECISION (type);
12614 }
12615 }
12616 break;
12617
12618 case MULT_EXPR:
12619 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12620 {
12621 /* x * x is always non-negative for floating point x
12622 or without overflow. */
12623 if (operand_equal_p (op0, op1, 0)
12624 || (RECURSE (op0) && RECURSE (op1)))
12625 {
12626 if (ANY_INTEGRAL_TYPE_P (type)
12627 && TYPE_OVERFLOW_UNDEFINED (type))
12628 *strict_overflow_p = true;
12629 return true;
12630 }
12631 }
12632
12633 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12634 both unsigned and their total bits is shorter than the result. */
12635 if (TREE_CODE (type) == INTEGER_TYPE
12636 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12637 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12638 {
12639 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12640 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12641 : TREE_TYPE (op0);
12642 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12643 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12644 : TREE_TYPE (op1);
12645
12646 bool unsigned0 = TYPE_UNSIGNED (inner0);
12647 bool unsigned1 = TYPE_UNSIGNED (inner1);
12648
12649 if (TREE_CODE (op0) == INTEGER_CST)
12650 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12651
12652 if (TREE_CODE (op1) == INTEGER_CST)
12653 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12654
12655 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12656 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12657 {
12658 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12659 ? tree_int_cst_min_precision (op0, UNSIGNED)
12660 : TYPE_PRECISION (inner0);
12661
12662 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12663 ? tree_int_cst_min_precision (op1, UNSIGNED)
12664 : TYPE_PRECISION (inner1);
12665
12666 return precision0 + precision1 < TYPE_PRECISION (type);
12667 }
12668 }
12669 return false;
12670
12671 case BIT_AND_EXPR:
12672 case MAX_EXPR:
12673 return RECURSE (op0) || RECURSE (op1);
12674
12675 case BIT_IOR_EXPR:
12676 case BIT_XOR_EXPR:
12677 case MIN_EXPR:
12678 case RDIV_EXPR:
12679 case TRUNC_DIV_EXPR:
12680 case CEIL_DIV_EXPR:
12681 case FLOOR_DIV_EXPR:
12682 case ROUND_DIV_EXPR:
12683 return RECURSE (op0) && RECURSE (op1);
12684
12685 case TRUNC_MOD_EXPR:
12686 return RECURSE (op0);
12687
12688 case FLOOR_MOD_EXPR:
12689 return RECURSE (op1);
12690
12691 case CEIL_MOD_EXPR:
12692 case ROUND_MOD_EXPR:
12693 default:
12694 return tree_simple_nonnegative_warnv_p (code, type);
12695 }
12696
12697 /* We don't know sign of `t', so be conservative and return false. */
12698 return false;
12699 }
12700
12701 /* Return true if T is known to be non-negative. If the return
12702 value is based on the assumption that signed overflow is undefined,
12703 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12704 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12705
12706 bool
12707 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12708 {
12709 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12710 return true;
12711
12712 switch (TREE_CODE (t))
12713 {
12714 case INTEGER_CST:
12715 return tree_int_cst_sgn (t) >= 0;
12716
12717 case REAL_CST:
12718 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12719
12720 case FIXED_CST:
12721 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12722
12723 case COND_EXPR:
12724 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12725
12726 case SSA_NAME:
12727 /* Limit the depth of recursion to avoid quadratic behavior.
12728 This is expected to catch almost all occurrences in practice.
12729 If this code misses important cases that unbounded recursion
12730 would not, passes that need this information could be revised
12731 to provide it through dataflow propagation. */
12732 return (!name_registered_for_update_p (t)
12733 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12734 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12735 strict_overflow_p, depth));
12736
12737 default:
12738 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12739 }
12740 }
12741
12742 /* Return true if T is known to be non-negative. If the return
12743 value is based on the assumption that signed overflow is undefined,
12744 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12745 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12746
12747 bool
12748 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12749 bool *strict_overflow_p, int depth)
12750 {
12751 switch (fn)
12752 {
12753 CASE_CFN_ACOS:
12754 CASE_CFN_ACOSH:
12755 CASE_CFN_CABS:
12756 CASE_CFN_COSH:
12757 CASE_CFN_ERFC:
12758 CASE_CFN_EXP:
12759 CASE_CFN_EXP10:
12760 CASE_CFN_EXP2:
12761 CASE_CFN_FABS:
12762 CASE_CFN_FDIM:
12763 CASE_CFN_HYPOT:
12764 CASE_CFN_POW10:
12765 CASE_CFN_FFS:
12766 CASE_CFN_PARITY:
12767 CASE_CFN_POPCOUNT:
12768 CASE_CFN_CLZ:
12769 CASE_CFN_CLRSB:
12770 case CFN_BUILT_IN_BSWAP32:
12771 case CFN_BUILT_IN_BSWAP64:
12772 /* Always true. */
12773 return true;
12774
12775 CASE_CFN_SQRT:
12776 /* sqrt(-0.0) is -0.0. */
12777 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12778 return true;
12779 return RECURSE (arg0);
12780
12781 CASE_CFN_ASINH:
12782 CASE_CFN_ATAN:
12783 CASE_CFN_ATANH:
12784 CASE_CFN_CBRT:
12785 CASE_CFN_CEIL:
12786 CASE_CFN_ERF:
12787 CASE_CFN_EXPM1:
12788 CASE_CFN_FLOOR:
12789 CASE_CFN_FMOD:
12790 CASE_CFN_FREXP:
12791 CASE_CFN_ICEIL:
12792 CASE_CFN_IFLOOR:
12793 CASE_CFN_IRINT:
12794 CASE_CFN_IROUND:
12795 CASE_CFN_LCEIL:
12796 CASE_CFN_LDEXP:
12797 CASE_CFN_LFLOOR:
12798 CASE_CFN_LLCEIL:
12799 CASE_CFN_LLFLOOR:
12800 CASE_CFN_LLRINT:
12801 CASE_CFN_LLROUND:
12802 CASE_CFN_LRINT:
12803 CASE_CFN_LROUND:
12804 CASE_CFN_MODF:
12805 CASE_CFN_NEARBYINT:
12806 CASE_CFN_RINT:
12807 CASE_CFN_ROUND:
12808 CASE_CFN_SCALB:
12809 CASE_CFN_SCALBLN:
12810 CASE_CFN_SCALBN:
12811 CASE_CFN_SIGNBIT:
12812 CASE_CFN_SIGNIFICAND:
12813 CASE_CFN_SINH:
12814 CASE_CFN_TANH:
12815 CASE_CFN_TRUNC:
12816 /* True if the 1st argument is nonnegative. */
12817 return RECURSE (arg0);
12818
12819 CASE_CFN_FMAX:
12820 /* True if the 1st OR 2nd arguments are nonnegative. */
12821 return RECURSE (arg0) || RECURSE (arg1);
12822
12823 CASE_CFN_FMIN:
12824 /* True if the 1st AND 2nd arguments are nonnegative. */
12825 return RECURSE (arg0) && RECURSE (arg1);
12826
12827 CASE_CFN_COPYSIGN:
12828 /* True if the 2nd argument is nonnegative. */
12829 return RECURSE (arg1);
12830
12831 CASE_CFN_POWI:
12832 /* True if the 1st argument is nonnegative or the second
12833 argument is an even integer. */
12834 if (TREE_CODE (arg1) == INTEGER_CST
12835 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12836 return true;
12837 return RECURSE (arg0);
12838
12839 CASE_CFN_POW:
12840 /* True if the 1st argument is nonnegative or the second
12841 argument is an even integer valued real. */
12842 if (TREE_CODE (arg1) == REAL_CST)
12843 {
12844 REAL_VALUE_TYPE c;
12845 HOST_WIDE_INT n;
12846
12847 c = TREE_REAL_CST (arg1);
12848 n = real_to_integer (&c);
12849 if ((n & 1) == 0)
12850 {
12851 REAL_VALUE_TYPE cint;
12852 real_from_integer (&cint, VOIDmode, n, SIGNED);
12853 if (real_identical (&c, &cint))
12854 return true;
12855 }
12856 }
12857 return RECURSE (arg0);
12858
12859 default:
12860 break;
12861 }
12862 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
12863 }
12864
12865 /* Return true if T is known to be non-negative. If the return
12866 value is based on the assumption that signed overflow is undefined,
12867 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12868 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12869
12870 static bool
12871 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12872 {
12873 enum tree_code code = TREE_CODE (t);
12874 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12875 return true;
12876
12877 switch (code)
12878 {
12879 case TARGET_EXPR:
12880 {
12881 tree temp = TARGET_EXPR_SLOT (t);
12882 t = TARGET_EXPR_INITIAL (t);
12883
12884 /* If the initializer is non-void, then it's a normal expression
12885 that will be assigned to the slot. */
12886 if (!VOID_TYPE_P (t))
12887 return RECURSE (t);
12888
12889 /* Otherwise, the initializer sets the slot in some way. One common
12890 way is an assignment statement at the end of the initializer. */
12891 while (1)
12892 {
12893 if (TREE_CODE (t) == BIND_EXPR)
12894 t = expr_last (BIND_EXPR_BODY (t));
12895 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12896 || TREE_CODE (t) == TRY_CATCH_EXPR)
12897 t = expr_last (TREE_OPERAND (t, 0));
12898 else if (TREE_CODE (t) == STATEMENT_LIST)
12899 t = expr_last (t);
12900 else
12901 break;
12902 }
12903 if (TREE_CODE (t) == MODIFY_EXPR
12904 && TREE_OPERAND (t, 0) == temp)
12905 return RECURSE (TREE_OPERAND (t, 1));
12906
12907 return false;
12908 }
12909
12910 case CALL_EXPR:
12911 {
12912 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
12913 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
12914
12915 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
12916 get_call_combined_fn (t),
12917 arg0,
12918 arg1,
12919 strict_overflow_p, depth);
12920 }
12921 case COMPOUND_EXPR:
12922 case MODIFY_EXPR:
12923 return RECURSE (TREE_OPERAND (t, 1));
12924
12925 case BIND_EXPR:
12926 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
12927
12928 case SAVE_EXPR:
12929 return RECURSE (TREE_OPERAND (t, 0));
12930
12931 default:
12932 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12933 }
12934 }
12935
12936 #undef RECURSE
12937 #undef tree_expr_nonnegative_warnv_p
12938
12939 /* Return true if T is known to be non-negative. If the return
12940 value is based on the assumption that signed overflow is undefined,
12941 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12942 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12943
12944 bool
12945 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12946 {
12947 enum tree_code code;
12948 if (t == error_mark_node)
12949 return false;
12950
12951 code = TREE_CODE (t);
12952 switch (TREE_CODE_CLASS (code))
12953 {
12954 case tcc_binary:
12955 case tcc_comparison:
12956 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12957 TREE_TYPE (t),
12958 TREE_OPERAND (t, 0),
12959 TREE_OPERAND (t, 1),
12960 strict_overflow_p, depth);
12961
12962 case tcc_unary:
12963 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12964 TREE_TYPE (t),
12965 TREE_OPERAND (t, 0),
12966 strict_overflow_p, depth);
12967
12968 case tcc_constant:
12969 case tcc_declaration:
12970 case tcc_reference:
12971 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
12972
12973 default:
12974 break;
12975 }
12976
12977 switch (code)
12978 {
12979 case TRUTH_AND_EXPR:
12980 case TRUTH_OR_EXPR:
12981 case TRUTH_XOR_EXPR:
12982 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12983 TREE_TYPE (t),
12984 TREE_OPERAND (t, 0),
12985 TREE_OPERAND (t, 1),
12986 strict_overflow_p, depth);
12987 case TRUTH_NOT_EXPR:
12988 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12989 TREE_TYPE (t),
12990 TREE_OPERAND (t, 0),
12991 strict_overflow_p, depth);
12992
12993 case COND_EXPR:
12994 case CONSTRUCTOR:
12995 case OBJ_TYPE_REF:
12996 case ASSERT_EXPR:
12997 case ADDR_EXPR:
12998 case WITH_SIZE_EXPR:
12999 case SSA_NAME:
13000 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13001
13002 default:
13003 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13004 }
13005 }
13006
13007 /* Return true if `t' is known to be non-negative. Handle warnings
13008 about undefined signed overflow. */
13009
13010 bool
13011 tree_expr_nonnegative_p (tree t)
13012 {
13013 bool ret, strict_overflow_p;
13014
13015 strict_overflow_p = false;
13016 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13017 if (strict_overflow_p)
13018 fold_overflow_warning (("assuming signed overflow does not occur when "
13019 "determining that expression is always "
13020 "non-negative"),
13021 WARN_STRICT_OVERFLOW_MISC);
13022 return ret;
13023 }
13024
13025
13026 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13027 For floating point we further ensure that T is not denormal.
13028 Similar logic is present in nonzero_address in rtlanal.h.
13029
13030 If the return value is based on the assumption that signed overflow
13031 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13032 change *STRICT_OVERFLOW_P. */
13033
13034 bool
13035 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13036 bool *strict_overflow_p)
13037 {
13038 switch (code)
13039 {
13040 case ABS_EXPR:
13041 return tree_expr_nonzero_warnv_p (op0,
13042 strict_overflow_p);
13043
13044 case NOP_EXPR:
13045 {
13046 tree inner_type = TREE_TYPE (op0);
13047 tree outer_type = type;
13048
13049 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13050 && tree_expr_nonzero_warnv_p (op0,
13051 strict_overflow_p));
13052 }
13053 break;
13054
13055 case NON_LVALUE_EXPR:
13056 return tree_expr_nonzero_warnv_p (op0,
13057 strict_overflow_p);
13058
13059 default:
13060 break;
13061 }
13062
13063 return false;
13064 }
13065
13066 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13067 For floating point we further ensure that T is not denormal.
13068 Similar logic is present in nonzero_address in rtlanal.h.
13069
13070 If the return value is based on the assumption that signed overflow
13071 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13072 change *STRICT_OVERFLOW_P. */
13073
13074 bool
13075 tree_binary_nonzero_warnv_p (enum tree_code code,
13076 tree type,
13077 tree op0,
13078 tree op1, bool *strict_overflow_p)
13079 {
13080 bool sub_strict_overflow_p;
13081 switch (code)
13082 {
13083 case POINTER_PLUS_EXPR:
13084 case PLUS_EXPR:
13085 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13086 {
13087 /* With the presence of negative values it is hard
13088 to say something. */
13089 sub_strict_overflow_p = false;
13090 if (!tree_expr_nonnegative_warnv_p (op0,
13091 &sub_strict_overflow_p)
13092 || !tree_expr_nonnegative_warnv_p (op1,
13093 &sub_strict_overflow_p))
13094 return false;
13095 /* One of operands must be positive and the other non-negative. */
13096 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13097 overflows, on a twos-complement machine the sum of two
13098 nonnegative numbers can never be zero. */
13099 return (tree_expr_nonzero_warnv_p (op0,
13100 strict_overflow_p)
13101 || tree_expr_nonzero_warnv_p (op1,
13102 strict_overflow_p));
13103 }
13104 break;
13105
13106 case MULT_EXPR:
13107 if (TYPE_OVERFLOW_UNDEFINED (type))
13108 {
13109 if (tree_expr_nonzero_warnv_p (op0,
13110 strict_overflow_p)
13111 && tree_expr_nonzero_warnv_p (op1,
13112 strict_overflow_p))
13113 {
13114 *strict_overflow_p = true;
13115 return true;
13116 }
13117 }
13118 break;
13119
13120 case MIN_EXPR:
13121 sub_strict_overflow_p = false;
13122 if (tree_expr_nonzero_warnv_p (op0,
13123 &sub_strict_overflow_p)
13124 && tree_expr_nonzero_warnv_p (op1,
13125 &sub_strict_overflow_p))
13126 {
13127 if (sub_strict_overflow_p)
13128 *strict_overflow_p = true;
13129 }
13130 break;
13131
13132 case MAX_EXPR:
13133 sub_strict_overflow_p = false;
13134 if (tree_expr_nonzero_warnv_p (op0,
13135 &sub_strict_overflow_p))
13136 {
13137 if (sub_strict_overflow_p)
13138 *strict_overflow_p = true;
13139
13140 /* When both operands are nonzero, then MAX must be too. */
13141 if (tree_expr_nonzero_warnv_p (op1,
13142 strict_overflow_p))
13143 return true;
13144
13145 /* MAX where operand 0 is positive is positive. */
13146 return tree_expr_nonnegative_warnv_p (op0,
13147 strict_overflow_p);
13148 }
13149 /* MAX where operand 1 is positive is positive. */
13150 else if (tree_expr_nonzero_warnv_p (op1,
13151 &sub_strict_overflow_p)
13152 && tree_expr_nonnegative_warnv_p (op1,
13153 &sub_strict_overflow_p))
13154 {
13155 if (sub_strict_overflow_p)
13156 *strict_overflow_p = true;
13157 return true;
13158 }
13159 break;
13160
13161 case BIT_IOR_EXPR:
13162 return (tree_expr_nonzero_warnv_p (op1,
13163 strict_overflow_p)
13164 || tree_expr_nonzero_warnv_p (op0,
13165 strict_overflow_p));
13166
13167 default:
13168 break;
13169 }
13170
13171 return false;
13172 }
13173
13174 /* Return true when T is an address and is known to be nonzero.
13175 For floating point we further ensure that T is not denormal.
13176 Similar logic is present in nonzero_address in rtlanal.h.
13177
13178 If the return value is based on the assumption that signed overflow
13179 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13180 change *STRICT_OVERFLOW_P. */
13181
13182 bool
13183 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13184 {
13185 bool sub_strict_overflow_p;
13186 switch (TREE_CODE (t))
13187 {
13188 case INTEGER_CST:
13189 return !integer_zerop (t);
13190
13191 case ADDR_EXPR:
13192 {
13193 tree base = TREE_OPERAND (t, 0);
13194
13195 if (!DECL_P (base))
13196 base = get_base_address (base);
13197
13198 if (base && TREE_CODE (base) == TARGET_EXPR)
13199 base = TARGET_EXPR_SLOT (base);
13200
13201 if (!base)
13202 return false;
13203
13204 /* For objects in symbol table check if we know they are non-zero.
13205 Don't do anything for variables and functions before symtab is built;
13206 it is quite possible that they will be declared weak later. */
13207 int nonzero_addr = maybe_nonzero_address (base);
13208 if (nonzero_addr >= 0)
13209 return nonzero_addr;
13210
13211 /* Constants are never weak. */
13212 if (CONSTANT_CLASS_P (base))
13213 return true;
13214
13215 return false;
13216 }
13217
13218 case COND_EXPR:
13219 sub_strict_overflow_p = false;
13220 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13221 &sub_strict_overflow_p)
13222 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13223 &sub_strict_overflow_p))
13224 {
13225 if (sub_strict_overflow_p)
13226 *strict_overflow_p = true;
13227 return true;
13228 }
13229 break;
13230
13231 case SSA_NAME:
13232 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13233 break;
13234 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13235
13236 default:
13237 break;
13238 }
13239 return false;
13240 }
13241
13242 #define integer_valued_real_p(X) \
13243 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13244
13245 #define RECURSE(X) \
13246 ((integer_valued_real_p) (X, depth + 1))
13247
13248 /* Return true if the floating point result of (CODE OP0) has an
13249 integer value. We also allow +Inf, -Inf and NaN to be considered
13250 integer values. Return false for signaling NaN.
13251
13252 DEPTH is the current nesting depth of the query. */
13253
13254 bool
13255 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13256 {
13257 switch (code)
13258 {
13259 case FLOAT_EXPR:
13260 return true;
13261
13262 case ABS_EXPR:
13263 return RECURSE (op0);
13264
13265 CASE_CONVERT:
13266 {
13267 tree type = TREE_TYPE (op0);
13268 if (TREE_CODE (type) == INTEGER_TYPE)
13269 return true;
13270 if (TREE_CODE (type) == REAL_TYPE)
13271 return RECURSE (op0);
13272 break;
13273 }
13274
13275 default:
13276 break;
13277 }
13278 return false;
13279 }
13280
13281 /* Return true if the floating point result of (CODE OP0 OP1) has an
13282 integer value. We also allow +Inf, -Inf and NaN to be considered
13283 integer values. Return false for signaling NaN.
13284
13285 DEPTH is the current nesting depth of the query. */
13286
13287 bool
13288 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13289 {
13290 switch (code)
13291 {
13292 case PLUS_EXPR:
13293 case MINUS_EXPR:
13294 case MULT_EXPR:
13295 case MIN_EXPR:
13296 case MAX_EXPR:
13297 return RECURSE (op0) && RECURSE (op1);
13298
13299 default:
13300 break;
13301 }
13302 return false;
13303 }
13304
13305 /* Return true if the floating point result of calling FNDECL with arguments
13306 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13307 considered integer values. Return false for signaling NaN. If FNDECL
13308 takes fewer than 2 arguments, the remaining ARGn are null.
13309
13310 DEPTH is the current nesting depth of the query. */
13311
13312 bool
13313 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13314 {
13315 switch (fn)
13316 {
13317 CASE_CFN_CEIL:
13318 CASE_CFN_FLOOR:
13319 CASE_CFN_NEARBYINT:
13320 CASE_CFN_RINT:
13321 CASE_CFN_ROUND:
13322 CASE_CFN_TRUNC:
13323 return true;
13324
13325 CASE_CFN_FMIN:
13326 CASE_CFN_FMAX:
13327 return RECURSE (arg0) && RECURSE (arg1);
13328
13329 default:
13330 break;
13331 }
13332 return false;
13333 }
13334
13335 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13336 has an integer value. We also allow +Inf, -Inf and NaN to be
13337 considered integer values. Return false for signaling NaN.
13338
13339 DEPTH is the current nesting depth of the query. */
13340
13341 bool
13342 integer_valued_real_single_p (tree t, int depth)
13343 {
13344 switch (TREE_CODE (t))
13345 {
13346 case REAL_CST:
13347 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13348
13349 case COND_EXPR:
13350 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13351
13352 case SSA_NAME:
13353 /* Limit the depth of recursion to avoid quadratic behavior.
13354 This is expected to catch almost all occurrences in practice.
13355 If this code misses important cases that unbounded recursion
13356 would not, passes that need this information could be revised
13357 to provide it through dataflow propagation. */
13358 return (!name_registered_for_update_p (t)
13359 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13360 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13361 depth));
13362
13363 default:
13364 break;
13365 }
13366 return false;
13367 }
13368
13369 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13370 has an integer value. We also allow +Inf, -Inf and NaN to be
13371 considered integer values. Return false for signaling NaN.
13372
13373 DEPTH is the current nesting depth of the query. */
13374
13375 static bool
13376 integer_valued_real_invalid_p (tree t, int depth)
13377 {
13378 switch (TREE_CODE (t))
13379 {
13380 case COMPOUND_EXPR:
13381 case MODIFY_EXPR:
13382 case BIND_EXPR:
13383 return RECURSE (TREE_OPERAND (t, 1));
13384
13385 case SAVE_EXPR:
13386 return RECURSE (TREE_OPERAND (t, 0));
13387
13388 default:
13389 break;
13390 }
13391 return false;
13392 }
13393
13394 #undef RECURSE
13395 #undef integer_valued_real_p
13396
13397 /* Return true if the floating point expression T has an integer value.
13398 We also allow +Inf, -Inf and NaN to be considered integer values.
13399 Return false for signaling NaN.
13400
13401 DEPTH is the current nesting depth of the query. */
13402
13403 bool
13404 integer_valued_real_p (tree t, int depth)
13405 {
13406 if (t == error_mark_node)
13407 return false;
13408
13409 tree_code code = TREE_CODE (t);
13410 switch (TREE_CODE_CLASS (code))
13411 {
13412 case tcc_binary:
13413 case tcc_comparison:
13414 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13415 TREE_OPERAND (t, 1), depth);
13416
13417 case tcc_unary:
13418 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13419
13420 case tcc_constant:
13421 case tcc_declaration:
13422 case tcc_reference:
13423 return integer_valued_real_single_p (t, depth);
13424
13425 default:
13426 break;
13427 }
13428
13429 switch (code)
13430 {
13431 case COND_EXPR:
13432 case SSA_NAME:
13433 return integer_valued_real_single_p (t, depth);
13434
13435 case CALL_EXPR:
13436 {
13437 tree arg0 = (call_expr_nargs (t) > 0
13438 ? CALL_EXPR_ARG (t, 0)
13439 : NULL_TREE);
13440 tree arg1 = (call_expr_nargs (t) > 1
13441 ? CALL_EXPR_ARG (t, 1)
13442 : NULL_TREE);
13443 return integer_valued_real_call_p (get_call_combined_fn (t),
13444 arg0, arg1, depth);
13445 }
13446
13447 default:
13448 return integer_valued_real_invalid_p (t, depth);
13449 }
13450 }
13451
13452 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13453 attempt to fold the expression to a constant without modifying TYPE,
13454 OP0 or OP1.
13455
13456 If the expression could be simplified to a constant, then return
13457 the constant. If the expression would not be simplified to a
13458 constant, then return NULL_TREE. */
13459
13460 tree
13461 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13462 {
13463 tree tem = fold_binary (code, type, op0, op1);
13464 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13465 }
13466
13467 /* Given the components of a unary expression CODE, TYPE and OP0,
13468 attempt to fold the expression to a constant without modifying
13469 TYPE or OP0.
13470
13471 If the expression could be simplified to a constant, then return
13472 the constant. If the expression would not be simplified to a
13473 constant, then return NULL_TREE. */
13474
13475 tree
13476 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13477 {
13478 tree tem = fold_unary (code, type, op0);
13479 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13480 }
13481
13482 /* If EXP represents referencing an element in a constant string
13483 (either via pointer arithmetic or array indexing), return the
13484 tree representing the value accessed, otherwise return NULL. */
13485
13486 tree
13487 fold_read_from_constant_string (tree exp)
13488 {
13489 if ((TREE_CODE (exp) == INDIRECT_REF
13490 || TREE_CODE (exp) == ARRAY_REF)
13491 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13492 {
13493 tree exp1 = TREE_OPERAND (exp, 0);
13494 tree index;
13495 tree string;
13496 location_t loc = EXPR_LOCATION (exp);
13497
13498 if (TREE_CODE (exp) == INDIRECT_REF)
13499 string = string_constant (exp1, &index);
13500 else
13501 {
13502 tree low_bound = array_ref_low_bound (exp);
13503 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13504
13505 /* Optimize the special-case of a zero lower bound.
13506
13507 We convert the low_bound to sizetype to avoid some problems
13508 with constant folding. (E.g. suppose the lower bound is 1,
13509 and its mode is QI. Without the conversion,l (ARRAY
13510 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13511 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13512 if (! integer_zerop (low_bound))
13513 index = size_diffop_loc (loc, index,
13514 fold_convert_loc (loc, sizetype, low_bound));
13515
13516 string = exp1;
13517 }
13518
13519 scalar_int_mode char_mode;
13520 if (string
13521 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13522 && TREE_CODE (string) == STRING_CST
13523 && TREE_CODE (index) == INTEGER_CST
13524 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13525 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13526 &char_mode)
13527 && GET_MODE_SIZE (char_mode) == 1)
13528 return build_int_cst_type (TREE_TYPE (exp),
13529 (TREE_STRING_POINTER (string)
13530 [TREE_INT_CST_LOW (index)]));
13531 }
13532 return NULL;
13533 }
13534
13535 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13536 an integer constant, real, or fixed-point constant.
13537
13538 TYPE is the type of the result. */
13539
13540 static tree
13541 fold_negate_const (tree arg0, tree type)
13542 {
13543 tree t = NULL_TREE;
13544
13545 switch (TREE_CODE (arg0))
13546 {
13547 case INTEGER_CST:
13548 {
13549 bool overflow;
13550 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13551 t = force_fit_type (type, val, 1,
13552 (overflow && ! TYPE_UNSIGNED (type))
13553 || TREE_OVERFLOW (arg0));
13554 break;
13555 }
13556
13557 case REAL_CST:
13558 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13559 break;
13560
13561 case FIXED_CST:
13562 {
13563 FIXED_VALUE_TYPE f;
13564 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13565 &(TREE_FIXED_CST (arg0)), NULL,
13566 TYPE_SATURATING (type));
13567 t = build_fixed (type, f);
13568 /* Propagate overflow flags. */
13569 if (overflow_p | TREE_OVERFLOW (arg0))
13570 TREE_OVERFLOW (t) = 1;
13571 break;
13572 }
13573
13574 default:
13575 gcc_unreachable ();
13576 }
13577
13578 return t;
13579 }
13580
13581 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13582 an integer constant or real constant.
13583
13584 TYPE is the type of the result. */
13585
13586 tree
13587 fold_abs_const (tree arg0, tree type)
13588 {
13589 tree t = NULL_TREE;
13590
13591 switch (TREE_CODE (arg0))
13592 {
13593 case INTEGER_CST:
13594 {
13595 /* If the value is unsigned or non-negative, then the absolute value
13596 is the same as the ordinary value. */
13597 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13598 t = arg0;
13599
13600 /* If the value is negative, then the absolute value is
13601 its negation. */
13602 else
13603 {
13604 bool overflow;
13605 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13606 t = force_fit_type (type, val, -1,
13607 overflow | TREE_OVERFLOW (arg0));
13608 }
13609 }
13610 break;
13611
13612 case REAL_CST:
13613 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13614 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13615 else
13616 t = arg0;
13617 break;
13618
13619 default:
13620 gcc_unreachable ();
13621 }
13622
13623 return t;
13624 }
13625
13626 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13627 constant. TYPE is the type of the result. */
13628
13629 static tree
13630 fold_not_const (const_tree arg0, tree type)
13631 {
13632 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13633
13634 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13635 }
13636
13637 /* Given CODE, a relational operator, the target type, TYPE and two
13638 constant operands OP0 and OP1, return the result of the
13639 relational operation. If the result is not a compile time
13640 constant, then return NULL_TREE. */
13641
13642 static tree
13643 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13644 {
13645 int result, invert;
13646
13647 /* From here on, the only cases we handle are when the result is
13648 known to be a constant. */
13649
13650 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13651 {
13652 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13653 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13654
13655 /* Handle the cases where either operand is a NaN. */
13656 if (real_isnan (c0) || real_isnan (c1))
13657 {
13658 switch (code)
13659 {
13660 case EQ_EXPR:
13661 case ORDERED_EXPR:
13662 result = 0;
13663 break;
13664
13665 case NE_EXPR:
13666 case UNORDERED_EXPR:
13667 case UNLT_EXPR:
13668 case UNLE_EXPR:
13669 case UNGT_EXPR:
13670 case UNGE_EXPR:
13671 case UNEQ_EXPR:
13672 result = 1;
13673 break;
13674
13675 case LT_EXPR:
13676 case LE_EXPR:
13677 case GT_EXPR:
13678 case GE_EXPR:
13679 case LTGT_EXPR:
13680 if (flag_trapping_math)
13681 return NULL_TREE;
13682 result = 0;
13683 break;
13684
13685 default:
13686 gcc_unreachable ();
13687 }
13688
13689 return constant_boolean_node (result, type);
13690 }
13691
13692 return constant_boolean_node (real_compare (code, c0, c1), type);
13693 }
13694
13695 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13696 {
13697 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13698 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13699 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13700 }
13701
13702 /* Handle equality/inequality of complex constants. */
13703 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13704 {
13705 tree rcond = fold_relational_const (code, type,
13706 TREE_REALPART (op0),
13707 TREE_REALPART (op1));
13708 tree icond = fold_relational_const (code, type,
13709 TREE_IMAGPART (op0),
13710 TREE_IMAGPART (op1));
13711 if (code == EQ_EXPR)
13712 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13713 else if (code == NE_EXPR)
13714 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13715 else
13716 return NULL_TREE;
13717 }
13718
13719 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13720 {
13721 if (!VECTOR_TYPE_P (type))
13722 {
13723 /* Have vector comparison with scalar boolean result. */
13724 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13725 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13726 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13727 {
13728 tree elem0 = VECTOR_CST_ELT (op0, i);
13729 tree elem1 = VECTOR_CST_ELT (op1, i);
13730 tree tmp = fold_relational_const (code, type, elem0, elem1);
13731 if (tmp == NULL_TREE)
13732 return NULL_TREE;
13733 if (integer_zerop (tmp))
13734 return constant_boolean_node (false, type);
13735 }
13736 return constant_boolean_node (true, type);
13737 }
13738 unsigned count = VECTOR_CST_NELTS (op0);
13739 gcc_assert (VECTOR_CST_NELTS (op1) == count
13740 && TYPE_VECTOR_SUBPARTS (type) == count);
13741
13742 auto_vec<tree, 32> elts (count);
13743 for (unsigned i = 0; i < count; i++)
13744 {
13745 tree elem_type = TREE_TYPE (type);
13746 tree elem0 = VECTOR_CST_ELT (op0, i);
13747 tree elem1 = VECTOR_CST_ELT (op1, i);
13748
13749 tree tem = fold_relational_const (code, elem_type,
13750 elem0, elem1);
13751
13752 if (tem == NULL_TREE)
13753 return NULL_TREE;
13754
13755 elts.quick_push (build_int_cst (elem_type,
13756 integer_zerop (tem) ? 0 : -1));
13757 }
13758
13759 return build_vector (type, elts);
13760 }
13761
13762 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13763
13764 To compute GT, swap the arguments and do LT.
13765 To compute GE, do LT and invert the result.
13766 To compute LE, swap the arguments, do LT and invert the result.
13767 To compute NE, do EQ and invert the result.
13768
13769 Therefore, the code below must handle only EQ and LT. */
13770
13771 if (code == LE_EXPR || code == GT_EXPR)
13772 {
13773 std::swap (op0, op1);
13774 code = swap_tree_comparison (code);
13775 }
13776
13777 /* Note that it is safe to invert for real values here because we
13778 have already handled the one case that it matters. */
13779
13780 invert = 0;
13781 if (code == NE_EXPR || code == GE_EXPR)
13782 {
13783 invert = 1;
13784 code = invert_tree_comparison (code, false);
13785 }
13786
13787 /* Compute a result for LT or EQ if args permit;
13788 Otherwise return T. */
13789 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13790 {
13791 if (code == EQ_EXPR)
13792 result = tree_int_cst_equal (op0, op1);
13793 else
13794 result = tree_int_cst_lt (op0, op1);
13795 }
13796 else
13797 return NULL_TREE;
13798
13799 if (invert)
13800 result ^= 1;
13801 return constant_boolean_node (result, type);
13802 }
13803
13804 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13805 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13806 itself. */
13807
13808 tree
13809 fold_build_cleanup_point_expr (tree type, tree expr)
13810 {
13811 /* If the expression does not have side effects then we don't have to wrap
13812 it with a cleanup point expression. */
13813 if (!TREE_SIDE_EFFECTS (expr))
13814 return expr;
13815
13816 /* If the expression is a return, check to see if the expression inside the
13817 return has no side effects or the right hand side of the modify expression
13818 inside the return. If either don't have side effects set we don't need to
13819 wrap the expression in a cleanup point expression. Note we don't check the
13820 left hand side of the modify because it should always be a return decl. */
13821 if (TREE_CODE (expr) == RETURN_EXPR)
13822 {
13823 tree op = TREE_OPERAND (expr, 0);
13824 if (!op || !TREE_SIDE_EFFECTS (op))
13825 return expr;
13826 op = TREE_OPERAND (op, 1);
13827 if (!TREE_SIDE_EFFECTS (op))
13828 return expr;
13829 }
13830
13831 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13832 }
13833
13834 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13835 of an indirection through OP0, or NULL_TREE if no simplification is
13836 possible. */
13837
13838 tree
13839 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13840 {
13841 tree sub = op0;
13842 tree subtype;
13843
13844 STRIP_NOPS (sub);
13845 subtype = TREE_TYPE (sub);
13846 if (!POINTER_TYPE_P (subtype)
13847 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13848 return NULL_TREE;
13849
13850 if (TREE_CODE (sub) == ADDR_EXPR)
13851 {
13852 tree op = TREE_OPERAND (sub, 0);
13853 tree optype = TREE_TYPE (op);
13854 /* *&CONST_DECL -> to the value of the const decl. */
13855 if (TREE_CODE (op) == CONST_DECL)
13856 return DECL_INITIAL (op);
13857 /* *&p => p; make sure to handle *&"str"[cst] here. */
13858 if (type == optype)
13859 {
13860 tree fop = fold_read_from_constant_string (op);
13861 if (fop)
13862 return fop;
13863 else
13864 return op;
13865 }
13866 /* *(foo *)&fooarray => fooarray[0] */
13867 else if (TREE_CODE (optype) == ARRAY_TYPE
13868 && type == TREE_TYPE (optype)
13869 && (!in_gimple_form
13870 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13871 {
13872 tree type_domain = TYPE_DOMAIN (optype);
13873 tree min_val = size_zero_node;
13874 if (type_domain && TYPE_MIN_VALUE (type_domain))
13875 min_val = TYPE_MIN_VALUE (type_domain);
13876 if (in_gimple_form
13877 && TREE_CODE (min_val) != INTEGER_CST)
13878 return NULL_TREE;
13879 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13880 NULL_TREE, NULL_TREE);
13881 }
13882 /* *(foo *)&complexfoo => __real__ complexfoo */
13883 else if (TREE_CODE (optype) == COMPLEX_TYPE
13884 && type == TREE_TYPE (optype))
13885 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13886 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13887 else if (TREE_CODE (optype) == VECTOR_TYPE
13888 && type == TREE_TYPE (optype))
13889 {
13890 tree part_width = TYPE_SIZE (type);
13891 tree index = bitsize_int (0);
13892 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13893 }
13894 }
13895
13896 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13897 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13898 {
13899 tree op00 = TREE_OPERAND (sub, 0);
13900 tree op01 = TREE_OPERAND (sub, 1);
13901
13902 STRIP_NOPS (op00);
13903 if (TREE_CODE (op00) == ADDR_EXPR)
13904 {
13905 tree op00type;
13906 op00 = TREE_OPERAND (op00, 0);
13907 op00type = TREE_TYPE (op00);
13908
13909 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13910 if (TREE_CODE (op00type) == VECTOR_TYPE
13911 && type == TREE_TYPE (op00type))
13912 {
13913 tree part_width = TYPE_SIZE (type);
13914 unsigned HOST_WIDE_INT max_offset
13915 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13916 * TYPE_VECTOR_SUBPARTS (op00type));
13917 if (tree_int_cst_sign_bit (op01) == 0
13918 && compare_tree_int (op01, max_offset) == -1)
13919 {
13920 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
13921 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13922 tree index = bitsize_int (indexi);
13923 return fold_build3_loc (loc,
13924 BIT_FIELD_REF, type, op00,
13925 part_width, index);
13926 }
13927 }
13928 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13929 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13930 && type == TREE_TYPE (op00type))
13931 {
13932 tree size = TYPE_SIZE_UNIT (type);
13933 if (tree_int_cst_equal (size, op01))
13934 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13935 }
13936 /* ((foo *)&fooarray)[1] => fooarray[1] */
13937 else if (TREE_CODE (op00type) == ARRAY_TYPE
13938 && type == TREE_TYPE (op00type))
13939 {
13940 tree type_domain = TYPE_DOMAIN (op00type);
13941 tree min = size_zero_node;
13942 if (type_domain && TYPE_MIN_VALUE (type_domain))
13943 min = TYPE_MIN_VALUE (type_domain);
13944 offset_int off = wi::to_offset (op01);
13945 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
13946 offset_int remainder;
13947 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
13948 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
13949 {
13950 off = off + wi::to_offset (min);
13951 op01 = wide_int_to_tree (sizetype, off);
13952 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13953 NULL_TREE, NULL_TREE);
13954 }
13955 }
13956 }
13957 }
13958
13959 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13960 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13961 && type == TREE_TYPE (TREE_TYPE (subtype))
13962 && (!in_gimple_form
13963 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13964 {
13965 tree type_domain;
13966 tree min_val = size_zero_node;
13967 sub = build_fold_indirect_ref_loc (loc, sub);
13968 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13969 if (type_domain && TYPE_MIN_VALUE (type_domain))
13970 min_val = TYPE_MIN_VALUE (type_domain);
13971 if (in_gimple_form
13972 && TREE_CODE (min_val) != INTEGER_CST)
13973 return NULL_TREE;
13974 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
13975 NULL_TREE);
13976 }
13977
13978 return NULL_TREE;
13979 }
13980
13981 /* Builds an expression for an indirection through T, simplifying some
13982 cases. */
13983
13984 tree
13985 build_fold_indirect_ref_loc (location_t loc, tree t)
13986 {
13987 tree type = TREE_TYPE (TREE_TYPE (t));
13988 tree sub = fold_indirect_ref_1 (loc, type, t);
13989
13990 if (sub)
13991 return sub;
13992
13993 return build1_loc (loc, INDIRECT_REF, type, t);
13994 }
13995
13996 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13997
13998 tree
13999 fold_indirect_ref_loc (location_t loc, tree t)
14000 {
14001 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14002
14003 if (sub)
14004 return sub;
14005 else
14006 return t;
14007 }
14008
14009 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14010 whose result is ignored. The type of the returned tree need not be
14011 the same as the original expression. */
14012
14013 tree
14014 fold_ignored_result (tree t)
14015 {
14016 if (!TREE_SIDE_EFFECTS (t))
14017 return integer_zero_node;
14018
14019 for (;;)
14020 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14021 {
14022 case tcc_unary:
14023 t = TREE_OPERAND (t, 0);
14024 break;
14025
14026 case tcc_binary:
14027 case tcc_comparison:
14028 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14029 t = TREE_OPERAND (t, 0);
14030 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14031 t = TREE_OPERAND (t, 1);
14032 else
14033 return t;
14034 break;
14035
14036 case tcc_expression:
14037 switch (TREE_CODE (t))
14038 {
14039 case COMPOUND_EXPR:
14040 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14041 return t;
14042 t = TREE_OPERAND (t, 0);
14043 break;
14044
14045 case COND_EXPR:
14046 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14047 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14048 return t;
14049 t = TREE_OPERAND (t, 0);
14050 break;
14051
14052 default:
14053 return t;
14054 }
14055 break;
14056
14057 default:
14058 return t;
14059 }
14060 }
14061
14062 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14063
14064 tree
14065 round_up_loc (location_t loc, tree value, unsigned int divisor)
14066 {
14067 tree div = NULL_TREE;
14068
14069 if (divisor == 1)
14070 return value;
14071
14072 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14073 have to do anything. Only do this when we are not given a const,
14074 because in that case, this check is more expensive than just
14075 doing it. */
14076 if (TREE_CODE (value) != INTEGER_CST)
14077 {
14078 div = build_int_cst (TREE_TYPE (value), divisor);
14079
14080 if (multiple_of_p (TREE_TYPE (value), value, div))
14081 return value;
14082 }
14083
14084 /* If divisor is a power of two, simplify this to bit manipulation. */
14085 if (pow2_or_zerop (divisor))
14086 {
14087 if (TREE_CODE (value) == INTEGER_CST)
14088 {
14089 wide_int val = wi::to_wide (value);
14090 bool overflow_p;
14091
14092 if ((val & (divisor - 1)) == 0)
14093 return value;
14094
14095 overflow_p = TREE_OVERFLOW (value);
14096 val += divisor - 1;
14097 val &= (int) -divisor;
14098 if (val == 0)
14099 overflow_p = true;
14100
14101 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14102 }
14103 else
14104 {
14105 tree t;
14106
14107 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14108 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14109 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14110 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14111 }
14112 }
14113 else
14114 {
14115 if (!div)
14116 div = build_int_cst (TREE_TYPE (value), divisor);
14117 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14118 value = size_binop_loc (loc, MULT_EXPR, value, div);
14119 }
14120
14121 return value;
14122 }
14123
14124 /* Likewise, but round down. */
14125
14126 tree
14127 round_down_loc (location_t loc, tree value, int divisor)
14128 {
14129 tree div = NULL_TREE;
14130
14131 gcc_assert (divisor > 0);
14132 if (divisor == 1)
14133 return value;
14134
14135 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14136 have to do anything. Only do this when we are not given a const,
14137 because in that case, this check is more expensive than just
14138 doing it. */
14139 if (TREE_CODE (value) != INTEGER_CST)
14140 {
14141 div = build_int_cst (TREE_TYPE (value), divisor);
14142
14143 if (multiple_of_p (TREE_TYPE (value), value, div))
14144 return value;
14145 }
14146
14147 /* If divisor is a power of two, simplify this to bit manipulation. */
14148 if (pow2_or_zerop (divisor))
14149 {
14150 tree t;
14151
14152 t = build_int_cst (TREE_TYPE (value), -divisor);
14153 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14154 }
14155 else
14156 {
14157 if (!div)
14158 div = build_int_cst (TREE_TYPE (value), divisor);
14159 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14160 value = size_binop_loc (loc, MULT_EXPR, value, div);
14161 }
14162
14163 return value;
14164 }
14165
14166 /* Returns the pointer to the base of the object addressed by EXP and
14167 extracts the information about the offset of the access, storing it
14168 to PBITPOS and POFFSET. */
14169
14170 static tree
14171 split_address_to_core_and_offset (tree exp,
14172 HOST_WIDE_INT *pbitpos, tree *poffset)
14173 {
14174 tree core;
14175 machine_mode mode;
14176 int unsignedp, reversep, volatilep;
14177 HOST_WIDE_INT bitsize;
14178 location_t loc = EXPR_LOCATION (exp);
14179
14180 if (TREE_CODE (exp) == ADDR_EXPR)
14181 {
14182 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14183 poffset, &mode, &unsignedp, &reversep,
14184 &volatilep);
14185 core = build_fold_addr_expr_loc (loc, core);
14186 }
14187 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14188 {
14189 core = TREE_OPERAND (exp, 0);
14190 STRIP_NOPS (core);
14191 *pbitpos = 0;
14192 *poffset = TREE_OPERAND (exp, 1);
14193 if (TREE_CODE (*poffset) == INTEGER_CST)
14194 {
14195 offset_int tem = wi::sext (wi::to_offset (*poffset),
14196 TYPE_PRECISION (TREE_TYPE (*poffset)));
14197 tem <<= LOG2_BITS_PER_UNIT;
14198 if (wi::fits_shwi_p (tem))
14199 {
14200 *pbitpos = tem.to_shwi ();
14201 *poffset = NULL_TREE;
14202 }
14203 }
14204 }
14205 else
14206 {
14207 core = exp;
14208 *pbitpos = 0;
14209 *poffset = NULL_TREE;
14210 }
14211
14212 return core;
14213 }
14214
14215 /* Returns true if addresses of E1 and E2 differ by a constant, false
14216 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14217
14218 bool
14219 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14220 {
14221 tree core1, core2;
14222 HOST_WIDE_INT bitpos1, bitpos2;
14223 tree toffset1, toffset2, tdiff, type;
14224
14225 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14226 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14227
14228 if (bitpos1 % BITS_PER_UNIT != 0
14229 || bitpos2 % BITS_PER_UNIT != 0
14230 || !operand_equal_p (core1, core2, 0))
14231 return false;
14232
14233 if (toffset1 && toffset2)
14234 {
14235 type = TREE_TYPE (toffset1);
14236 if (type != TREE_TYPE (toffset2))
14237 toffset2 = fold_convert (type, toffset2);
14238
14239 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14240 if (!cst_and_fits_in_hwi (tdiff))
14241 return false;
14242
14243 *diff = int_cst_value (tdiff);
14244 }
14245 else if (toffset1 || toffset2)
14246 {
14247 /* If only one of the offsets is non-constant, the difference cannot
14248 be a constant. */
14249 return false;
14250 }
14251 else
14252 *diff = 0;
14253
14254 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14255 return true;
14256 }
14257
14258 /* Return OFF converted to a pointer offset type suitable as offset for
14259 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14260 tree
14261 convert_to_ptrofftype_loc (location_t loc, tree off)
14262 {
14263 return fold_convert_loc (loc, sizetype, off);
14264 }
14265
14266 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14267 tree
14268 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14269 {
14270 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14271 ptr, convert_to_ptrofftype_loc (loc, off));
14272 }
14273
14274 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14275 tree
14276 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14277 {
14278 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14279 ptr, size_int (off));
14280 }
14281
14282 /* Return a char pointer for a C string if it is a string constant
14283 or sum of string constant and integer constant. We only support
14284 string constants properly terminated with '\0' character.
14285 If STRLEN is a valid pointer, length (including terminating character)
14286 of returned string is stored to the argument. */
14287
14288 const char *
14289 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14290 {
14291 tree offset_node;
14292
14293 if (strlen)
14294 *strlen = 0;
14295
14296 src = string_constant (src, &offset_node);
14297 if (src == 0)
14298 return NULL;
14299
14300 unsigned HOST_WIDE_INT offset = 0;
14301 if (offset_node != NULL_TREE)
14302 {
14303 if (!tree_fits_uhwi_p (offset_node))
14304 return NULL;
14305 else
14306 offset = tree_to_uhwi (offset_node);
14307 }
14308
14309 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14310 const char *string = TREE_STRING_POINTER (src);
14311
14312 /* Support only properly null-terminated strings. */
14313 if (string_length == 0
14314 || string[string_length - 1] != '\0'
14315 || offset >= string_length)
14316 return NULL;
14317
14318 if (strlen)
14319 *strlen = string_length - offset;
14320 return string + offset;
14321 }
14322
14323 #if CHECKING_P
14324
14325 namespace selftest {
14326
14327 /* Helper functions for writing tests of folding trees. */
14328
14329 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14330
14331 static void
14332 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14333 tree constant)
14334 {
14335 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14336 }
14337
14338 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14339 wrapping WRAPPED_EXPR. */
14340
14341 static void
14342 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14343 tree wrapped_expr)
14344 {
14345 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14346 ASSERT_NE (wrapped_expr, result);
14347 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14348 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14349 }
14350
14351 /* Verify that various arithmetic binary operations are folded
14352 correctly. */
14353
14354 static void
14355 test_arithmetic_folding ()
14356 {
14357 tree type = integer_type_node;
14358 tree x = create_tmp_var_raw (type, "x");
14359 tree zero = build_zero_cst (type);
14360 tree one = build_int_cst (type, 1);
14361
14362 /* Addition. */
14363 /* 1 <-- (0 + 1) */
14364 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14365 one);
14366 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14367 one);
14368
14369 /* (nonlvalue)x <-- (x + 0) */
14370 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14371 x);
14372
14373 /* Subtraction. */
14374 /* 0 <-- (x - x) */
14375 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14376 zero);
14377 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14378 x);
14379
14380 /* Multiplication. */
14381 /* 0 <-- (x * 0) */
14382 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14383 zero);
14384
14385 /* (nonlvalue)x <-- (x * 1) */
14386 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14387 x);
14388 }
14389
14390 /* Verify that various binary operations on vectors are folded
14391 correctly. */
14392
14393 static void
14394 test_vector_folding ()
14395 {
14396 tree inner_type = integer_type_node;
14397 tree type = build_vector_type (inner_type, 4);
14398 tree zero = build_zero_cst (type);
14399 tree one = build_one_cst (type);
14400
14401 /* Verify equality tests that return a scalar boolean result. */
14402 tree res_type = boolean_type_node;
14403 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14404 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14405 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14406 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14407 }
14408
14409 /* Run all of the selftests within this file. */
14410
14411 void
14412 fold_const_c_tests ()
14413 {
14414 test_arithmetic_folding ();
14415 test_vector_folding ();
14416 }
14417
14418 } // namespace selftest
14419
14420 #endif /* CHECKING_P */